blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
d4e5bc7a6151595d47ed28485d1f1f78d491b708 | 56a4c42176d4ada861518e7ec59d56b8e4d658d0 | /mysite/news/utils.py | 8c2c3c489b443918e357ed566ef7f7c45fa6c616 | [] | no_license | ArsenAjiev/django_course | 69661430bc9c13e4ed481f7035226fb8dc835d9c | b43b35643fd67985ce4594cf573bdaba55f5896d | refs/heads/master | 2023-07-27T20:22:24.566257 | 2021-09-10T11:08:07 | 2021-09-10T11:08:07 | 401,633,420 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 109 | py | class MyMixin(object):
mixin_prop = ' '
def get_prop(self):
return self.mixin_prop.upper()
| [
"[email protected]"
] | |
254aeee6b83580bec0d8ee4fc5f8fd2ea24fd3b8 | f6cc50c14759c0e865528125b896a47b464c834d | /tests/test_radials.py | 9b44072b9ef542765d017a72ddccc1a0bb6c44b7 | [
"MIT"
] | permissive | kwilcox/codar_processing | 243ddea6eea909c3c44d3f02478e5f024f04e2d8 | 3a327f5378a6a9d78d263c8e7b317088823245c1 | refs/heads/master | 2020-08-27T11:55:12.440346 | 2020-01-09T20:05:13 | 2020-01-09T20:05:13 | 217,358,645 | 0 | 0 | MIT | 2019-10-24T17:44:10 | 2019-10-24T17:44:10 | null | UTF-8 | Python | false | false | 6,077 | py | from pathlib import Path
import numpy as np
import xarray as xr
from codar_processing.src.radials import Radial, concatenate_radials
data_path = (Path(__file__).parent.with_name('codar_processing') / 'data').resolve()
output_path = (Path(__file__).parent.with_name('output')).resolve()
def test_codar_radial_to_netcdf():
radial_file = data_path / 'radials' / 'SEAB' / 'RDLi_SEAB_2019_01_01_0000.ruv'
nc_file = output_path / 'radials_nc' / 'SEAB' / 'RDLi_SEAB_2019_01_01_0000.nc'
# Converts the underlying .data (natively a pandas DataFrame)
# to an xarray object when `create_netcdf` is called.
# This automatically 'enhances' the netCDF file
# with better variable names and attributes.
rad1 = Radial(radial_file)
rad1.export(str(nc_file), file_type='netcdf')
# Convert it to an xarray Dataset with no variable
# or attribte enhancements
xds2 = rad1.to_xarray(enhance=False)
# Convert it to xarray Dataset with increased usability
# by changing variables names, adding attributes,
# and decoding the CF standards like scale_factor
xds3 = rad1.to_xarray(enhance=True)
with xr.open_dataset(nc_file) as xds1:
# The two enhanced files should be identical
assert xds1.identical(xds3)
# Enhanced and non-enhanced files should not
# be equal
assert not xds1.identical(xds2)
def test_wera_radial_to_netcdf():
radial_file = data_path / 'radials' / 'WERA' / 'RDL_csw_2019_10_24_162300.ruv'
nc_file = output_path / 'radials_nc' / 'WERA' / 'RDL_csw_2019_10_24_162300.nc'
# Converts the underlying .data (natively a pandas DataFrame)
# to an xarray object when `create_netcdf` is called.
# This automatically 'enhances' the netCDF file
# with better variable names and attributes.
rad1 = Radial(radial_file)
rad1.export(str(nc_file), file_type='netcdf')
# Convert it to an xarray Dataset with no variable
# or attribte enhancements
xds2 = rad1.to_xarray(enhance=False)
# Convert it to xarray Dataset with increased usability
# by changing variables names, adding attributes,
# and decoding the CF standards like scale_factor
xds3 = rad1.to_xarray(enhance=True)
with xr.open_dataset(nc_file) as xds1:
# The two enhanced files should be identical
assert xds1.identical(xds3)
# Enhanced and non-enhanced files should not
# be equal
assert not xds1.identical(xds2)
def test_wera_mask():
radial_file = data_path / 'radials' / 'WERA' / 'RDL_csw_2019_10_24_162300.ruv'
rad1 = Radial(radial_file, mask_over_land=False, replace_invalid=False)
# Total points before masking
assert len(rad1.data) == 6327
rad1.mask_over_land()
# Make sure we subset the land points
assert len(rad1.data) == 5745
def test_wera_qc():
radial_file = data_path / 'radials' / 'WERA' / 'RDL_csw_2019_10_24_162300.ruv'
rad1 = Radial(radial_file, mask_over_land=False, replace_invalid=False)
assert len(rad1.data) == 6327
rad1.mask_over_land()
rad1.qc_qartod_radial_count()
rad1.qc_qartod_valid_location()
rad1.qc_qartod_maximum_velocity()
rad1.qc_qartod_spatial_median()
assert len(rad1.data) == 5745
assert 'QC07' in rad1.data
assert 'QC08' not in rad1.data # no VFLG column so we can't run it
assert 'QC09' in rad1.data
assert 'QC10' in rad1.data
def test_wera_raw_to_quality_nc():
radial_file = data_path / 'radials' / 'WERA' / 'RDL_csw_2019_10_24_162300.ruv'
nc_file = output_path / 'radials_qc_nc' / 'WERA' / 'RDL_csw_2019_10_24_162300.nc'
rad1 = Radial(radial_file, mask_over_land=False, replace_invalid=False)
rad1.mask_over_land()
rad1.qc_qartod_radial_count()
rad1.qc_qartod_valid_location()
rad1.qc_qartod_maximum_velocity()
rad1.qc_qartod_spatial_median()
rad1.export(str(nc_file), file_type='netcdf')
xds2 = rad1.to_xarray(enhance=True)
with xr.open_dataset(nc_file) as xds1:
assert len(xds1.QCTest) == 3 # no VFLG column so one test not run
# The two enhanced files should be identical
assert xds1.identical(xds2)
class TestCombineRadials:
file_paths = list(
(data_path / 'radials' / 'SEAB').glob('*.ruv')
)
radial_files = [
str(r) for r in file_paths
]
radial_objects = [
Radial(str(r)) for r in radial_files
]
# Select even indexed file_paths and odd indexed radial objects
# into one array of mixed content types for concating
radial_mixed = radial_files[::2] + radial_objects[1:][::2]
def test_concat_radial_objects(self):
combined = concatenate_radials(self.radial_objects)
assert combined.time.size == len(self.file_paths)
# Make sure the dataset was sorted by time
assert np.array_equal(
combined.time.values,
np.sort(combined.time.values)
)
def test_concat_radial_files(self):
combined = concatenate_radials(self.radial_files)
assert combined.time.size == len(self.file_paths)
# Make sure the dataset was sorted by time
assert np.array_equal(
combined.time.values,
np.sort(combined.time.values)
)
def test_concat_mixed_radials(self):
combined = concatenate_radials(self.radial_mixed)
assert combined.time.size == len(self.file_paths)
# Make sure the dataset was sorted by time
assert np.array_equal(
combined.time.values,
np.sort(combined.time.values)
)
def test_concat_mixed_radials_enhance(self):
# Select even indexed file_paths and odd indexed radial objects
# into one array of mixed content types for concating
combined = concatenate_radials(self.radial_mixed, enhance=True)
assert combined.time.size == len(self.file_paths)
# Make sure the dataset was sorted by time
assert np.array_equal(
combined.time.values,
np.sort(combined.time.values)
)
| [
"[email protected]"
] | |
1660e21b6c4bd243001029dc05cf5eea57eddffc | f24edb38dd4f7de8a7683afbbc9ab2a4237a361e | /venv/lib/python3.6/site-packages/pip/_internal/commands/uninstall.py | 5e5b3553b2a53d1bbe5ecf496384cd9ec36ecdc9 | [] | no_license | ngecu/automate_django_data_filling | 882220f84a6b4af5484d4b136c740a803ccccfd2 | d6b7095904878f06e4aae6beb2156113a6145c21 | refs/heads/main | 2023-02-26T02:36:26.582387 | 2021-01-31T15:50:22 | 2021-01-31T15:50:22 | 317,846,258 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,271 | py | from __future__ import absolute_import
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.cli.base_command import Command
from pip._internal.cli.req_command import SessionCommandMixin
from pip._internal.cli.status_codes import SUCCESS
from pip._internal.exceptions import InstallationError
from pip._internal.req import parse_requirements
from pip._internal.req.constructors import (
install_req_from_line,
install_req_from_parsed_requirement,
)
from pip._internal.utils.misc import protect_pip_from_modification_on_windows
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from optparse import Values
from typing import List
class UninstallCommand(Command, SessionCommandMixin):
"""
Uninstall packages.
pip is able to uninstall most installed packages. Known exceptions are:
- Pure distutils packages installed with ``python setup.py install``, which
leave behind no metadata to determine what files were installed.
- Script wrappers installed by ``python setup.py develop``.
"""
usage = """
%prog [options] <package> ...
%prog [options] -r <requirements file> ..."""
def add_options(self):
# type: () -> None
cmd_opts.add_option(
'-r', '--requirement',
dest='requirements',
action='append',
default=[],
metavar='file',
help='Uninstall all the packages listed in the given requirements '
'file. This option can be used multiple times.',
)
cmd_opts.add_option(
'-y', '--yes',
dest='yes',
action='store_true',
help="Don't ask for confirmation of uninstall deletions.")
parser.insert_option_group(0, cmd_opts)
def run(self, options, args):
# type: (Values, List[str]) -> int
session = get_default_session(options)
reqs_to_uninstall = {}
for name in args:
req = install_req_from_line(
name, isolated=options.isolated_mode,
)
if req.name:
reqs_to_uninstall[canonicalize_name(req.name)] = req
for filename in options.requirements:
for parsed_req in parse_requirements(
filename,
options=options,
session=session):
req = install_req_from_parsed_requirement(
parsed_req,
isolated=options.isolated_mode
)
if req.name:
reqs_to_uninstall[canonicalize_name(req.name)] = req
if not reqs_to_uninstall:
raise InstallationError(
'You must give at least one requirement to {name} (see '
'"pip help {name}")'.format(**locals())
)
protect_pip_from_modification_on_windows(
modifying_pip="pip" in reqs_to_uninstall
)
for req in reqs_to_uninstall.values():
uninstall_pathset = req.uninstall(
auto_confirm=options.yes, verbose=verbosity > 0,
)
if uninstall_pathset:
uninstall_pathset.commit()
return SUCCESS
| [
"[email protected]"
] | |
a9dc210095439b4c997744c603e4dc3dd0542810 | 048f2002ed13503d50428c8949c95a2e4f9bd532 | /contest/weekcontest179/timeInformAllEmployees.py | a91f058cbbb49cce0012cd60c5c105810753fc70 | [] | no_license | ZhengLiangliang1996/Leetcode_ML_Daily | 9c9330bd2c7bab5964fbd3827a27eeff5bd2c502 | 8cdb97bc7588b96b91b1c550afd84e976c1926e0 | refs/heads/master | 2023-04-06T19:52:23.524186 | 2023-03-30T21:08:57 | 2023-03-30T21:08:57 | 213,055,072 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 932 | py | #! /usr/bin/env python3
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2022 liangliang <[email protected]>
#
# Distributed under terms of the MIT license.
class Solution(object):
def numOfMinutes(self, n, headID, manager, informTime):
"""
:type n: int
:type headID: int
:type manager: List[int]
:type informTime: List[int]
:rtype: int
"""
if n == 0: return 0
d = collections.defaultdict(list)
for i in range(len(manager)):
d[manager[i]].append(i)
res = informTime[headID]
# BFS traversal all
q, res = [(headID, 0)], 0
while q:
newQ = []
for (idx, time) in q:
res = max(res, time)
for k in d[idx]:
newQ += [(k, time + informTime[idx])]
q = newQ
return res
| [
"[email protected]"
] | |
0cae35ebd5a07c6f2ddafda22aafcab6985493dd | 786027545626c24486753351d6e19093b261cd7d | /ghidra9.2.1_pyi/ghidra/app/util/bin/format/coff/CoffSymbolAuxFunction.pyi | b98a12b2316fa9a794264f75a09d1c63d0c82264 | [
"MIT"
] | permissive | kohnakagawa/ghidra_scripts | 51cede1874ef2b1fed901b802316449b4bf25661 | 5afed1234a7266c0624ec445133280993077c376 | refs/heads/main | 2023-03-25T08:25:16.842142 | 2021-03-18T13:31:40 | 2021-03-18T13:31:40 | 338,577,905 | 14 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,795 | pyi | from typing import List
import ghidra.app.util.bin.format.coff
import ghidra.program.model.data
import java.lang
class CoffSymbolAuxFunction(object, ghidra.app.util.bin.format.coff.CoffSymbolAux):
ASCII: ghidra.program.model.data.DataType = char
BYTE: ghidra.program.model.data.DataType = byte
DWORD: ghidra.program.model.data.DataType = dword
IBO32: ghidra.program.model.data.DataType = ImageBaseOffset32
POINTER: ghidra.program.model.data.DataType = pointer
QWORD: ghidra.program.model.data.DataType = qword
STRING: ghidra.program.model.data.DataType = string
UTF16: ghidra.program.model.data.DataType = unicode
UTF8: ghidra.program.model.data.DataType = string-utf8
VOID: ghidra.program.model.data.DataType = void
WORD: ghidra.program.model.data.DataType = word
def equals(self, __a0: object) -> bool: ...
def getClass(self) -> java.lang.Class: ...
def getFilePointerToLineNumber(self) -> int: ...
def getNextEntryIndex(self) -> int: ...
def getSize(self) -> int: ...
def getTagIndex(self) -> int: ...
def getUnused(self) -> List[int]: ...
def hashCode(self) -> int: ...
def notify(self) -> None: ...
def notifyAll(self) -> None: ...
def toDataType(self) -> ghidra.program.model.data.DataType: ...
def toString(self) -> unicode: ...
@overload
def wait(self) -> None: ...
@overload
def wait(self, __a0: long) -> None: ...
@overload
def wait(self, __a0: long, __a1: int) -> None: ...
@property
def filePointerToLineNumber(self) -> int: ...
@property
def nextEntryIndex(self) -> int: ...
@property
def size(self) -> int: ...
@property
def tagIndex(self) -> int: ...
@property
def unused(self) -> List[int]: ...
| [
"[email protected]"
] | |
dccefe6f3232c64d27f4537dcf77df25ac6eabc3 | 62343cc4b4c44baef354f4552b449a9f53ca799e | /Model/engine/val_engine.py | 6d99465c5ef81d8810d06639485fbe399fba81eb | [] | no_license | xwjBupt/simpleval | 7c71d178657ae12ac1a5ac6f1275940023573884 | 87234e630d7801479575015b8c5bdd3588a3ceed | refs/heads/master | 2023-02-03T13:42:07.013196 | 2020-12-25T09:08:01 | 2020-12-25T09:08:01 | 324,154,886 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 582 | py | from util import registry
from .infer_engine import InferEngine
@registry.register_module('engine')
class ValEngine(InferEngine):
def __init__(self, model, meshgrid, converter, num_classes, use_sigmoid,
test_cfg, eval_metric):
super().__init__(model, meshgrid, converter, num_classes, use_sigmoid,
test_cfg)
self.eval_metric = eval_metric
def forward(self, data):
return self.forward_impl(**data)
def forward_impl(self, img, img_metas):
dets = self.infer(img, img_metas)
return dets
| [
"[email protected]"
] | |
30f3e4c9086c9f2e7f65a9ee018ca3c7b7c1fa32 | 73e80e6b6c4b00fddf100e2d6da852eeb7c75245 | /index.py | 4a7f0f90ca10423cdf6ec5b783be1f4650e66979 | [
"MIT"
] | permissive | zhoujie0053/test01 | cdf6da575be45fcf126dc3aacc8824412a1e4da4 | 8dfb1a21d7d95560e6f8f7543415fbedcb6218e4 | refs/heads/master | 2020-04-21T23:14:48.138880 | 2019-02-10T04:22:18 | 2019-02-10T04:22:18 | 169,940,533 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 20 | py | a = 1
b = 5
c = 0
| [
"[email protected]"
] | |
1a87dc748de7261e1f06a57f98bf07ce9f709d73 | b6fc54cff7037f5e4ef26cb4a645d5ea5a6fecdf | /001146StepikPyBegin/Stepik001146PyBeginсh07p05st06С03_my__20200422.py | 30ca61a2153e84595a3e1fc8625d4159136926a7 | [
"Apache-2.0"
] | permissive | SafonovMikhail/python_000577 | 5483eaf2f7c73bc619ce1f5de67d8d689d2e7dd4 | f2dccac82a37df430c4eb7425b5d084d83520409 | refs/heads/master | 2022-12-08T10:53:57.202746 | 2022-12-07T09:09:51 | 2022-12-07T09:09:51 | 204,713,341 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,110 | py | '''
Дано натуральное число n. Напишите программу, которая определяет его максимальную и минимальную цифры.
Формат входных данных
На вход программе подается одно натуральное число.
Формат выходных данных
Программа должна вывести максимальную и минимальную цифры введенного числа (с поясняющей надписью).
Sample Input 1:
26670
Sample Output 1:
Максимальная цифра равна 7
Минимальная цифра равна 0
'''
# num1 = int(input())
num1 = 123456
max1 = -1
min1 = 10
# print(num1)
while num1 != 0:
num2 = num1 % 10
# print(num2)
if min1 > num2:
min1 = num2
# print("min1: ", min1)
if max1 < num2:
max1 = num2
# print("max1: ", max1)
num1 = num1 // 10
print("Максимальная цифра равна", max1)
print("Минимальная цифра равна", min1)
| [
"[email protected]"
] | |
e70a8301be7918577b324e3ba6cd05445470022b | b3742c32903fa8fd6489033a3be3b4a597b734e2 | /venv/Scripts/pip3.7-script.py | 398ffa63ec58c5c25822257ce1bb9aef7d50de75 | [] | no_license | lokeshrapala/onlne6amproj1 | fceef41e6482c4f627c53207ba60efe1db24c16f | 9836c85b4a984ad6275080ab0c32a106c095bbfe | refs/heads/master | 2020-04-30T13:01:13.298204 | 2019-03-21T01:12:42 | 2019-03-21T01:12:42 | 176,843,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 421 | py | #!C:\Users\LOKESH\PycharmProjects\onlne6amproj1\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==9.0.1','console_scripts','pip3.7'
__requires__ = 'pip==9.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==9.0.1', 'console_scripts', 'pip3.7')()
)
| [
"lokesh.rapala"
] | lokesh.rapala |
aa7408e34b14224a77543b589599733eb3a7af96 | f0b8014a49ffe582526cce876f62c5d9aeb95009 | /database.py | e6a6d25103c3bf97818e4c63c3cd90a0de8587b9 | [
"MIT"
] | permissive | DrLapis/omnitool | 737a8b3a55ca12653b34624ac368d1da08ede3e2 | 248a80f8ff30a3d1e3514a55dea102a203c952a8 | refs/heads/master | 2020-12-24T10:32:32.928775 | 2015-08-04T16:24:01 | 2015-08-04T16:24:01 | 40,193,455 | 0 | 0 | null | 2015-08-04T15:39:13 | 2015-08-04T15:39:11 | Python | UTF-8 | Python | false | false | 100,405 | py | version = 39
versions = {22: "1.0.6.1",
12: "1.0.5",
36: "1.1",
39: "1.1.2",
100 : "1.2.4",
102 : "1.2.4.1",
}
cmod = False
npclist = ["Merchant", "Nurse", "Arms Dealer", "Dryad", "Guide", "Clothier",
"Demolitionist", "Goblin Tinkerer", "Wizard",
"Mechanic", "Old Man"]
names = ['Barney', 'Madeline', 'Manolinho', 'Caelia',
'Berserker66', 'Lloyd', 'Ijwu', 'Jenda', '7UR7L3', 'Sinna']
otiles = ['Dirt Block', 'Stone Block', 'Grass Block', 'Grass Flowers', 'Torch', 'Tree', 'Iron Ore', 'Copper Ore',
'Gold Ore', 'Silver Ore', 'Door Closed', 'Door Open', 'Crystal Heart', 'Bottle', 'Table', 'Chair',
'Iron Anvil', 'Furnace', 'Work Bench', 'Platform', 'Acorn', 'Chest', 'Demonite Ore', 'Corrupt Grass Block',
'Corruption Plants', 'Ebonstone Block', 'Altar', 'Sunflower', 'Pot', 'Piggy Bank', 'Wood', 'Orb Heart',
'Corruption Thorns', 'Candle', 'Chandelier', "Jack 'O Lantern", 'Present', 'Meteorite', 'Gray Brick',
'Red Brick', 'Clay Block', 'Blue Brick', 'Chain Lantern', 'Green Brick', 'Pink Brick', 'Gold Brick',
'Silver Brick', 'Copper Brick', 'Spike', 'Water Candle', 'Book', 'Cobweb', 'Vines', 'Sand Block', 'Glass',
'Sign', 'Obsidian', 'Ash Block', 'Hellstone', 'Mud Block', 'Jungle Grass Block', 'Jungle Plants',
'Jungle Vines', 'Gem Sapphire', 'Gem Ruby', 'Gem Emerald', 'Gem Topaz', 'Gem Amethyst', 'Gem Diamond',
'Jungle Thorns', 'Mushroom Grass Block', 'Mushroom Plant', 'Mushroom Tree', 'Tall Grass', 'Tall Jungle Grass',
'Obsidian Brick', 'Hellstone Brick', 'Hellforge', 'Clay Pot', 'Bed', 'Cactus', 'Coral', 'Daybloom Seeds',
'Herb Mature', 'Herb Bloom', 'Grave Marker', 'Loom', 'Piano', 'Dresser', 'Bench', 'Bathtub', 'Banner',
'Lamp Post', 'Lamp', 'Keg', 'Chinese Lantern', 'Cooking Pot', 'Safe', 'Skull Lantern', 'Trash Can',
'Candelabra', 'Bookcase', 'Throne', 'Bowl', 'Grandfather Clock', 'Statue', 'Sawmill', 'Cobalt Ore',
'Mythril Ore', 'Hallowed Grass Block', 'Hallowed Plants', 'Adamantite Ore', 'Ebonsand Block',
'Hallowed Tall Plants', "Tinkerer's Workshop", 'Hallowed Vines', 'Pearlsand Block', 'Pearlstone Block',
'Pearlstone Brick', 'Iridescent Brick', 'Mudstone Block', 'Cobalt Brick', 'Mythril Brick', 'Silt Block',
'Wooden Beam', 'Crystal Ball', 'Disco Ball', 'Ice (Ice Rod)', 'Mannequin', 'Crystal Shard',
'Active Stone Block', 'Inactive Stone Block', 'Lever', 'Adamantite Forge', 'Mythril Anvil', 'Pressure Plate',
'Switch', 'Dart Trap', 'Boulder', 'Music Box (Overworld Day)', 'Demonite Brick', 'Explosives', 'Inlet Pump',
'Outlet Pump', '1 Second Timer', 'Candy Cane Block', 'Green Candy Cane Block', 'Snow Block', 'Snow Brick',
'Blue Light', 'Adamantite Beam', 'Sandstone Brick', 'Ebonstone Brick', 'Red Stucco', 'Yellow Stucco',
'Green Stucco', 'Gray Stucco', 'Ebonwood', 'Rich Mahogany', 'Pearlwood', 'Rainbow Brick', 'Ice Block',
'Thin Ice', 'Purple Ice Block', 'Pink Ice Block', 'Large Deco', 'Tin Ore', 'Lead Ore', 'Tungsten Ore',
'Platinum Ore', 'Pine Tree Block', 'Christmas Tree', 'Sink', 'Platinum Candelabra', 'Platinum Candle',
'Tin Brick', 'Tungsten Brick', 'Platinum Brick', 'Sapphire', 'Green Moss', 'Yellow Moss', 'Red Moss',
'Blue Moss', 'Purple Moss', 'Moss Plants', 'Small Decos', '3x2 Decos', '3x2 Decos', 'Cactus', 'Cloud',
'Glowing Mushroom', 'Living Wood Wand', 'Leaf Wand', 'Slime Block', 'Bone Block', 'Flesh Block', 'Rain Cloud',
'Frozen Slime Block', 'Asphalt Block', 'Crimson Grass Block', 'Red Ice Block', 'Crimson Grass',
'Sunplate Block', 'Crimstone Block', 'Crimtane Ore', 'Crimson Vine', 'Ice Brick', 'Water Fountain',
'Shadewood', 'Cannon', 'Land Mine', 'Chlorophyte Ore', 'Snowball Launcher', 'Rope', 'Chain', 'Campfire',
'Rocket', 'Blend-O-Matic', 'Meat Grinder', 'Extractinator', 'Solidifier', 'Palladium Ore', 'Orichalcum Ore',
'Titanium Ore', 'Slush Block', 'Hive Wand', 'Lihzahrd Brick', 'Dye Plant', 'Dye Vat', 'Honey Block',
'Crispy Honey Block', 'Larva', 'Wooden Spike', 'Jungle Vegetation', 'Crimsand Block', 'Teleporter',
'Life Fruit Plant', 'Lihzahrd Altar', "Plantera's Bulb", 'Gold Bar', '3x3 Wall Hangings', 'Catacomb',
'6x4 Painting', 'Imbuing Station', 'Bubble Machine', '2x3 Painting', '3x2 Painting', 'Autohammer',
'Palladium Column', 'Bubblegum Block', 'Titanstone Block', 'Pumpkin', 'Hay', 'Spooky Wood', 'Pumpkin Plant',
'Amethyst Gemspark Block (Offline)', 'Topaz Gemspark Block (Offline)', 'Sapphire Gemspark Block (Offline)',
'Emerald Gemspark Block (Offline)', 'Ruby Gemspark Block (Offline)', 'Diamond Gemspark Block (Offline)',
'Amber Gemspark Block (Offline)', 'Amethyst Gemspark Block', 'Topaz Gemspark Block',
'Sapphire Gemspark Block', 'Emerald Gemspark Block', 'Ruby Gemspark Block', 'Diamond Gemspark Block',
'Amber Gemspark Block', 'Womannequin', 'Firefly in a Bottle', 'Lightning Bug in a Bottle', 'Cog',
'Stone Slab', 'Sandstone Slab', 'Bunny Cage', 'Squirrel Cage', 'Mallard Duck Cage', 'Duck Cage', 'Bird Cage',
'Blue Jay Cage', 'Cardinal Cage', 'Fish Bowl', 'Heavy Work Bench', 'Copper Plating', 'Snail Cage',
'Glowing Snail Cage', 'Ammo Box', 'Monarch Butterfly Jar', 'Purple Emperor Butterfly Jar',
'Red Admiral Butterfly Jar', 'Ulysses Butterfly Jar', 'Sulphur Butterfly Jar', 'Tree Nymph Butterfly Jar',
'Zebra Swallowtail Butterfly Jar', 'Julia Butterfly Jar', 'Scorpion Cage', 'Black Scorpion Cage', 'Frog Cage',
'Mouse Cage', 'Bone Welder', 'Flesh Cloning Vat', 'Glass Kiln', 'Lihzahrd Furnace', 'Living Loom', 'Sky Mill',
'Ice Machine', 'Steampunk Boiler', 'Honey Dispenser', 'Penguin Cage', 'Worm Cage', 'Dynasty Wood',
'Red Dynasty Shingles', 'Blue Dynasty Shingles', 'Minecart Track', 'Coralstone Block', 'Blue Jellyfish Jar',
'Green Jellyfish Jar', 'Pink Jellyfish Jar', 'Ship in a Bottle', 'Seaweed Planter', 'Boreal Wood',
'Palm Wood', 'Palm Tree', 'Seashell', 'Tin Plating', 'Waterfall Block', 'Lavafall Block', 'Confetti Block',
'Midnight Confetti Block', 'Copper Coin Pile', 'Silver Coin Pile', 'Gold Coin Pile', 'Platinum Coin Pile',
'Weapon Rack', 'Fireworks Box', 'Living Fire Block', 'Text Statue', 'Firework Fountain', 'Grasshopper Cage',
'Living Cursed Fire', 'Living Demon Fire', 'Living Frost Fire', 'Living Ichor', 'Living Ultrabright Fire',
'Honeyfall Block', 'Chlorophyte Brick', 'Crimtane Brick', 'Shroomite Plating', 'Mushroom Statue',
'Martian Conduit Plating', 'Chimney Smoke', 'Crimtane Thorns', 'Vine Rope', 'Bewitching Table',
'Alchemy Table', 'Enchanted Sundial', 'Marble Block', 'Gold Bird Cage', 'Gold Bunny Cage',
'Gold Butterfly Cage', 'Gold Frog Cage', 'Gold Grasshopper Cage', 'Gold Mouse Cage', 'Gold Worm Cage',
'Silk Rope', 'Web Rope', 'Marble', 'Granite', 'Granite Block', 'Meteorite Brick', 'Pink Slime Block',
'Peace Candle', 'Water Drip', 'Lava Drip', 'Honey Drip', 'Fishing Crate', 'Sharpening Station',
'Target Dummy', 'Bubble', 'Planter Box', 'Lava Moss', 'Vine Flowers', 'Living Mahogany',
'Living MahoganyLeaves', 'Crystal Block', 'Trapdoor Open', 'Trapdoor Closed', 'Tall Gate Closed',
'Tall Gate Open', 'Lava Lamp', 'Cage Enchanted Nightcrawler', 'Buggy Cage', 'Grubby Cage', 'Sluggy Cage',
'Item Frame', 'Sandstone', 'Hardened Sand', 'Corrupt Hardened Sand', 'Crimson Hardened Sand',
'Corrupt Sandstone', 'Crimson Sandstone', 'Hallow Hardened Sand', 'Hallow Sandstone', 'Desert Fossil',
'Fireplace', 'Chimney', 'Fossil Ore', 'Lunar Ore', 'Lunar Brick', 'Lunar Monolith', 'Detonator',
'Lunar Crafting Station', 'Red Squirrel Cage', 'Gold Squirrel Cage', 'Lunar Block Solar',
'Lunar Block Vortex', 'Lunar Block Nebula', 'Lunar Block Stardust']
owalls = ['WallNone', 'WallStone', 'WallDirt',
'WallEbonstone', 'WallWood', 'WallBrick', 'WallRed', 'WallBlue',
'WallGreen', 'WallPink', 'WallGold', 'WallSilver', 'WallCopper',
'WallHellstone', "WallDarkBlue", "WallAsh", "WallDirt2", "WallBlue2",
"WallGreen2", "WallPink2", "WallDarkBlue2"]
while len(owalls) < 111:
owalls.append("Wall%d" % (1 + len(owalls)))
multitiles = {3, 4, 5, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20,
21, 24, 26, 27, 28, 29, 31, 33, 34, 35, 36, 42, 50, 55, 61, 71, 72,
73, 74, 77, 78, 79, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92,
93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 110,
113, 114, 125, 126, 128, 129, 132, 133, 134, 135, 136, 137, 138, 139,
141, 142, 143, 144, 149, 165, 171, 172, 173, 174, 178, 184, 185,
186, 187, 201, 207, 209, 210, 212, 215, 216, 217, 218, 219, 220, 227,
228, 231, 233, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245,
246, 247, 269, 270, 271, 275, 276, 277, 278, 279, 280, 281, 282, 283,
285, 286, 287, 288, 289, 290, 291, 292, 293, 294, 295, 296, 297, 298,
299, 300, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 314, 316,
317, 318, 319, 320, 323, 324, 334, 335, 337, 338, 339, 254, 349, 354,
355, 356, 358, 359, 360, 361, 362, 363, 364, 372, 373, 374, 375, 376,
377, 378, 380, 386, 387, 388, 389, 390, 391, 392, 393, 394, 395, 405,
406, 410, 411, 412, 413, 414}
tiles = []
walls = []
nmultitiles = []
ntiles = {}
nwalls = {}
def get_custom_db(txt):
import colorlib
colors = colorlib.data.copy()
wallcolors = colorlib.walldata.copy()
ntiles = {}
x = 0
for t in tiles:
ntiles[x] = t
x += 1
nwalls = {}
x = 0
for t in walls:
nwalls[x] = t
x += 1
nmultitiles = multitiles[:]
for t in txt["tiles"]:
ntiles[t[0]] = t[1]
if t[2]: nmultitiles.append(t[0])
colors[t[0]] = t[4]
for w in txt["walls"]:
nwalls[w[0]] = w[1]
wallcolors[w[0]] = w[2]
return ntiles, nwalls, nmultitiles, colors, wallcolors
def parse():
for t in otiles:
try:
tiles.append(unicode(t))
except NameError:
tiles.append(t)
for t in owalls:
try:
walls.append(unicode(t))
except NameError:
walls.append(t)
i = 0
for name in tiles:
ntiles[name] = i
i += 1
i = 0
for name in walls:
nwalls[name] = i
i += 1
for m in multitiles:
nmultitiles.append(tiles[m])
parse()
if __name__ == "__main__":
print("Tiledata:")
for t, i in zip(tiles, range(len(tiles))):
print("%2d %s" % (i, t))
print("")
print("Walldata:")
for t, i in zip(walls, range(len(walls))):
print("%2d %s" % (i, t))
print("")
print("Multitiles:")
for t, i in zip(multitiles, nmultitiles):
print("%2d %s" % (t, i))
multitilestrides = {
3: 18,
4: 22,
5: 22,
10: 18,
11: 18,
12: 18,
13: 54,
14: 18,
15: 18,
16: 18,
18: 18,
19: 18,
20: 18,
21: 18,
24: 18,
26: 18,
28: 18,
29: 18,
31: 18,
33: 22,
34: 18,
35: 18,
36: 18,
42: 18,
50: 18,
55: 18,
61: 18,
71: 18,
72: 18,
73: 18,
74: 18,
79: 18,
81: 26,
82: 18,
83: 18,
84: 18,
85: 18,
87: 18,
88: 18,
89: 18,
90: 18,
91: 144,
93: 18,
96: 18,
100: 18,
101: 18,
103: 54,
104: 18,
105: 18,
106: 18,
110: 18,
113: 18,
114: 18,
125: 0,
126: 0,
136: 18,
139: 18,
141: 18,
142: 18,
143: 18,
144: 18,
165: 18,
174: 18,
184: 18,
185: 18,
186: 18,
187: 18,
207: 18,
209: 18,
215: 18,
216: 18,
233: 18,
236: 18,
238: 18,
239: 18,
240: 18,
242: 18,
245: 18,
246: 18,
247: 54,
270: 18,
271: 18,
275: 18,
276: 18,
277: 18,
278: 18,
279: 18,
280: 18,
281: 18,
282: 18,
285: 18,
286: 18,
288: 18,
289: 18,
290: 18,
291: 18,
292: 18,
293: 18,
294: 18,
295: 18,
296: 18,
297: 18,
298: 18,
299: 18,
300: 18,
302: 18,
306: 18,
309: 18,
310: 18,
316: 18,
317: 18,
318: 18,
320: 18,
323: 22,
324: 22,
334: 18,
339: 18}
manual_strides = {77 : 18}
multitilestrides.update(manual_strides)
itemlist = {
"Swiftness Potion": 30,
"Green Banner": 99,
"Lesser Mana Potion": 30,
"Poisoned Knife": 250,
"Diving Helmet": 1,
"Glass": 250,
"Summer Hat": 1,
"Goldfish": 99,
"Plumber's Hat": 1,
"Staff of Regrowth": 1,
"Flower of Fire": 1,
"Green Dye": 99,
"Topaz": 99,
"Jungle Spores": 99,
"Obsidian": 250,
"Barrel": 99,
"Anklet of the Wind": 1,
"Ninja Shirt": 1,
"Silver Axe": 1,
"Demon Bow": 1,
"Familiar Pants": 1,
"Cobweb": 250,
"Silver Brick Wall": 250,
"Meteor Hamaxe": 1,
"Glowing Mushroom": 99,
"Piggy Bank": 99,
"Fireblossom Seeds": 99,
"Wooden Boomerang": 1,
"Deathweed Seeds": 99,
"Gold Bar": 99,
"Iron Greaves": 1,
"Robe": 1,
"Hero's Hat": 1,
"Molten Fury": 1,
"Gold Broadsword": 1,
"Gold Brick": 250,
"Water Walking Potion": 30,
"Shadow Key": 1,
"Wood Platform": 99,
"Skull Lantern": 99,
"Whoopie Cushion": 1,
"Aqua Scepter": 1,
"Molten Pickaxe": 1,
"Gold Coin": 100,
"Restoration Potion": 20,
"Gold Bow": 1,
"Seed": 250,
"Iron Pickaxe": 1,
"Copper Ore": 99,
"Pink Vase": 99,
"Book": 99,
"Piano": 99,
"Necro Helmet": 1,
"Copper Helmet": 1,
"Sticky Bomb": 20,
"Sand Block": 250,
"Gold Shortsword": 1,
"Bookcase": 99,
"Statue": 99,
"Enchanted Boomerang": 1,
"Copper Watch": 1,
"Leather": 99,
"Pink Brick": 250,
"Bed": 99,
"Mime Mask": 1,
"Iron Ore": 99,
"Bottle": 99,
"Hellstone": 250,
"Thorn Chakram": 1,
"Iron Hammer": 1,
"Shadow Greaves": 1,
"Silver Ore": 99,
"Goblin Battle Standard": 1,
"Iron Anvil": 99,
"Necro Breastplate": 1,
"Gold Chainmail": 1,
"Tiki Torch": 99,
"Sawmill": 99,
"Chain Lantern": 250,
"Clay Pot": 99,
"Yellow Phaseblade": 1,
"Molten Greaves": 1,
"Jungle Grass Seeds": 99,
"Acorn": 99,
"Lamp Post": 99,
"Jungle Hat": 1,
"Ninja Hood": 1,
"Rotten Chunk": 99,
"Tombstone": 99,
"Aglet": 1,
"Invisibility Potion": 30,
"Silver Greaves": 1,
"Tuxedo Pants": 1,
"Silver Watch": 1,
"Ale": 30,
"Suspicious Looking Eye": 1,
"Stone Wall": 250,
"Nature's Gift": 1,
"Night Owl Potion": 30,
"Wood Wall": 250,
"Jungle Shirt": 1,
"Shadow Scalemail": 1,
"Red Phaseblade": 1,
"Silver Bow": 1,
"Daybloom": 99,
"Meteorite": 250,
"Copper Hammer": 1,
"Clay Block": 250,
"Wooden Door": 99,
"Flaming Arrow": 250,
"Illegal Gun Parts": 99,
"Wooden Sword": 1,
"Daybloom Seeds": 99,
"Plumber's Pants": 1,
"Sticky Glowstick": 99,
"Familiar Wig": 1,
"Emerald": 99,
"Water Bucket": 1,
"Wooden Table": 99,
"Throne": 99,
"Fiery Greatsword": 1,
"Depth Meter": 1,
"Jungle Pants": 1,
"Stinger": 99,
"Hermes Boots": 1,
"Rocket Boots": 1,
"Pink Brick Wall": 250,
"Cobalt Shield": 1,
"Jungle Rose": 1,
"The Doctor's Shirt": 1,
"Work Bench": 99,
"Iron Chain": 99,
"Demonite Ore": 99,
"Mana Crystal": 99,
"Shiny Red Balloon": 1,
"Mug": 99,
"Dynamite": 3,
"Silver Helmet": 1,
"Dresser": 99,
"Thorns Potion": 30,
"Keg": 99,
"Gold Watch": 1,
"Iron Bow": 1,
"Chinese Lantern": 99,
"Iron Shortsword": 1,
"Archaeologist's Jacket": 1,
"Diamond": 99,
"Flipper": 1,
"Dark Lance": 1,
"Phoenix Blaster": 1,
"Unholy Arrow": 250,
"Spear": 1,
"Vine": 99,
"Gold Axe": 1,
"Sign": 250,
"Worm Food": 1,
"Red Hat": 1,
"Ash Block": 250,
"Meteorite Bar": 99,
"Gold Pickaxe": 1,
"Shadow Scale": 99,
"Red Brick Wall": 250,
"Tattered Cloth": 99,
"Grappling Hook": 1,
"Lesser Restoration Potion": 20,
"Antlion Mandible": 99,
"Silver Broadsword": 1,
"Robot Hat": 1,
"Minishark": 1,
"Gills Potion": 30,
"Shadow Chest": 99,
"Silver Brick": 250,
"Bunny Hood": 1,
"Iron Bar": 99,
"Black Lens": 99,
"Ninja Pants": 1,
"Star Cannon": 1,
"Silver Chandelier": 99,
"Green Phaseblade": 1,
"Featherfall Potion": 30,
"Fireblossom": 99,
"Moonglow": 99,
"Silver Pickaxe": 1,
"Iron Helmet": 1,
"Yellow Banner": 99,
"Starfury": 1,
"Mushroom": 99,
"Silver Bar": 99,
"Purification Powder": 99,
"Glowstick": 99,
"Gold Hammer": 1,
"Wooden Chair": 99,
"Platinum Coin": 100,
"Copper Brick": 250,
"Purple Phaseblade": 1,
"Mana Potion": 30,
"Bone": 99,
"Gold Brick Wall": 250,
"Meteor Helmet": 1,
"Night's Edge": 1,
"Red Banner": 99,
"Sunflower": 99,
"Worm Tooth": 99,
"Nightmare Pickaxe": 1,
"Ball O' Hurt": 1,
"Familiar Shirt": 1,
"Amethyst": 99,
"Mining Helmet": 1,
"Dirt Block": 250,
"Green Brick": 250,
"Gold Crown": 1,
"Lesser Healing Potion": 30,
"Ironskin Potion": 30,
"Iron Chainmail": 1,
"Green Brick Wall": 250,
"Black Dye": 99,
"Obsidian Brick": 250,
"Grass Seeds": 99,
"Moonglow Seeds": 99,
"Copper Bow": 1,
"Corrupt Seeds": 99,
"Wooden Arrow": 250,
"Hellstone Bar": 99,
"Vile Mushroom": 99,
"Angel Statue": 1,
"Meteor Leggings": 1,
"White Phaseblade": 1,
"Gold Greaves": 1,
"Gold Chandelier": 99,
"Demonite Bar": 99,
"Band of Regeneration": 1,
"Copper Greaves": 1,
"Necro Greaves": 1,
"Silver Shortsword": 1,
"Waterleaf": 99,
"Waterleaf Seeds": 99,
"Candle": 99,
"Spelunker Potion": 30,
"Archaeologist's Hat": 1,
"Mud Block": 250,
"Cloud in a Bottle": 1,
"The Breaker": 1,
"Molten Breastplate": 1,
"Bench": 99,
"Musket Ball": 250,
"War Axe of the Night": 1,
"Feather": 99,
"Shark Fin": 99,
"Guide Voodoo Doll": 1,
"Silver Bullet": 250,
"Gel": 99,
"The Doctor's Pants": 1,
"Blue Brick": 250,
"Stone Block": 250,
"Archery Potion": 30,
"Wizard Hat": 1,
"Copper Chainmail": 1,
"Hook": 99,
"Jester's Arrow": 250,
"Iron Axe": 1,
"Lava Bucket": 1,
"Copper Chandelier": 99,
"Molten Hamaxe": 1,
"Dirt Wall": 250,
"Sunfury": 1,
"Chest": 99,
"Wood": 250,
"Regeneration Potion": 30,
"Copper Shortsword": 1,
"Empty Bucket": 1,
"Sunglasses": 1,
"Musket": 1,
"Hero's Pants": 1,
"Gravitation Potion": 30,
"Silk": 99,
"Band of Starpower": 1,
"Copper Pickaxe": 1,
"Coral": 250,
"Cooking Pot": 99,
"Silver Chainmail": 1,
"Blinkroot": 99,
"Magic Mirror": 1,
"Battle Potion": 30,
"Red Brick": 250,
"Flamelash": 1,
"Harpoon": 1,
"Cactus": 250,
"Hellstone Brick": 250,
"Magic Power Potion": 30,
"Vile Powder": 99,
"Spike": 250,
"Tuxedo Shirt": 1,
"Vilethorn": 1,
"Molten Helmet": 1,
"Life Crystal": 99,
"Hellfire Arrow": 250,
"Orb of Light": 1,
"Trident": 1,
"Wooden Hammer": 1,
"Handgun": 1,
"Mana Regeneration Potion": 30,
"Gray Brick Wall": 250,
"Flintlock Pistol": 1,
"Fish Bowl": 1,
"Bathtub": 99,
"Heart": 1,
"Bomb": 20,
"Blue Brick Wall": 250,
"Shuriken": 250,
"Light's Bane": 1,
"Copper Coin": 100,
"Throwing Knife": 250,
"Furnace": 99,
"Silver Hammer": 1,
"Sandgun": 1,
"Hero's Shirt": 1,
"Safe": 99,
"Top Hat": 1,
"Water Bolt": 1,
"Candelabra": 99,
"Ivy Whip": 1,
"Ruby": 99,
"Gold Ore": 99,
"Loom": 99,
"Obsidian Brick Wall": 250,
"Ebonstone Block": 250,
"Silver Coin": 100,
"Flamarang": 1,
"Blade of Grass": 1,
"Breathing Reed": 1,
"Toilet": 99,
"Meteor Shot": 250,
"Dirt Rod": 1,
"Goggles": 1,
"Iron Broadsword": 1,
"Grandfather Clock": 99,
"Spiky Ball": 250,
"Lucky Horseshoe": 1,
"Obsidian Skull": 1,
"Meteor Suit": 1,
"Bowl": 99,
"Mushroom Grass Seeds": 99,
"Golden Key": 99,
"Gray Brick": 250,
"Lens": 99,
"Archaeologist's Pants": 1,
"Bowl of Soup": 30,
"Gold Helmet": 1,
"Shadow Helmet": 1,
"Copper Brick Wall": 250,
"Star": 1,
"Hellforge": 99,
"Deathweed": 99,
"Grenade": 20,
"Magic Missile": 1,
"Muramasa": 1,
"Torch": 99,
"Gold Chest": 99,
"Blue Banner": 99,
"Bottled Water": 30,
"Blue Phaseblade": 1,
"Wooden Bow": 1,
"Shackle": 1,
"Trash Can": 99,
"Blowpipe": 1,
"Space Gun": 1,
"Copper Broadsword": 1,
"Fallen Star": 100,
"Plumber's Shirt": 1,
"Demon Scythe": 1,
"Healing Potion": 30,
"Blue Moon": 1,
"Feral Claws": 1,
"Sapphire": 99,
"Water Candle": 99,
"Copper Axe": 1,
"Blinkroot Seeds": 99,
"Hunter Potion": 30,
"Copper Bar": 99,
"Obsidian Skin Potion": 30,
"Shine Potion": 30,
}
items = {1: 'Iron Pickaxe', 2: 'Dirt Block', 3: 'Stone Block', 4: 'Iron Broadsword', 5: 'Mushroom',
6: 'Iron Shortsword', 7: 'Iron Hammer', 8: 'Torch', 9: 'Wood', 10: 'Iron Axe', 11: 'Iron Ore',
12: 'Copper Ore', 13: 'Gold Ore', 14: 'Silver Ore', 15: 'Copper Watch', 16: 'Silver Watch', 17: 'Gold Watch',
18: 'Depth Meter', 19: 'Gold Bar', 20: 'Copper Bar', 21: 'Silver Bar', 22: 'Iron Bar', 23: 'Gel',
24: 'Wooden Sword', 25: 'Wooden Door', 26: 'Stone Wall', 27: 'Acorn', 28: 'Lesser Healing Potion',
29: 'Life Crystal', 30: 'Dirt Wall', 31: 'Bottle', 32: 'Wooden Table', 33: 'Furnace', 34: 'Wooden Chair',
35: 'Iron Anvil', 36: 'Work Bench', 37: 'Goggles', 38: 'Lens', 39: 'Wooden Bow', 40: 'Wooden Arrow',
41: 'Flaming Arrow', 42: 'Shuriken', 43: 'Suspicious Looking Eye', 44: 'Demon Bow', 45: 'War Axe of the Night',
46: "Light's Bane", 47: 'Unholy Arrow', 48: 'Chest', 49: 'Band of Regeneration', 50: 'Magic Mirror',
51: "Jester's Arrow", 52: 'Angel Statue', 53: 'Cloud in a Bottle', 54: 'Hermes Boots',
55: 'Enchanted Boomerang', 56: 'Demonite Ore', 57: 'Demonite Bar', 58: 'Heart', 59: 'Corrupt Seeds',
60: 'Vile Mushroom', 61: 'Ebonstone Block', 62: 'Grass Seeds', 63: 'Sunflower', 64: 'Vilethorn',
65: 'Starfury', 66: 'Purification Powder', 67: 'Vile Powder', 68: 'Rotten Chunk', 69: 'Worm Tooth',
70: 'Worm Food', 71: 'Copper Coin', 72: 'Silver Coin', 73: 'Gold Coin', 74: 'Platinum Coin', 75: 'Fallen Star',
76: 'Copper Greaves', 77: 'Iron Greaves', 78: 'Silver Greaves', 79: 'Gold Greaves', 80: 'Copper Chainmail',
81: 'Iron Chainmail', 82: 'Silver Chainmail', 83: 'Gold Chainmail', 84: 'Grappling Hook', 85: 'Chain',
86: 'Shadow Scale', 87: 'Piggy Bank', 88: 'Mining Helmet', 89: 'Copper Helmet', 90: 'Iron Helmet',
91: 'Silver Helmet', 92: 'Gold Helmet', 93: 'Wood Wall', 94: 'Wood Platform', 95: 'Flintlock Pistol',
96: 'Musket', 97: 'Musket Ball', 98: 'Minishark', 99: 'Iron Bow', 100: 'Shadow Greaves',
101: 'Shadow Scalemail', 102: 'Shadow Helmet', 103: 'Nightmare Pickaxe', 104: 'The Breaker', 105: 'Candle',
106: 'Copper Chandelier', 107: 'Silver Chandelier', 108: 'Gold Chandelier', 109: 'Mana Crystal',
110: 'Lesser Mana Potion', 111: 'Band of Starpower', 112: 'Flower of Fire', 113: 'Magic Missile',
114: 'Dirt Rod', 115: 'Shadow Orb', 116: 'Meteorite', 117: 'Meteorite Bar', 118: 'Hook', 119: 'Flamarang',
120: 'Molten Fury', 121: 'Fiery Greatsword', 122: 'Molten Pickaxe', 123: 'Meteor Helmet', 124: 'Meteor Suit',
125: 'Meteor Leggings', 126: 'Bottled Water', 127: 'Space Gun', 128: 'Rocket Boots', 129: 'Gray Brick',
130: 'Gray Brick Wall', 131: 'Red Brick', 132: 'Red Brick Wall', 133: 'Clay Block', 134: 'Blue Brick',
135: 'Blue Brick Wall', 136: 'Chain Lantern', 137: 'Green Brick', 138: 'Green Brick Wall', 139: 'Pink Brick',
140: 'Pink Brick Wall', 141: 'Gold Brick', 142: 'Gold Brick Wall', 143: 'Silver Brick',
144: 'Silver Brick Wall', 145: 'Copper Brick', 146: 'Copper Brick Wall', 147: 'Spike', 148: 'Water Candle',
149: 'Book', 150: 'Cobweb', 151: 'Necro Helmet', 152: 'Necro Breastplate', 153: 'Necro Greaves', 154: 'Bone',
155: 'Muramasa', 156: 'Cobalt Shield', 157: 'Aqua Scepter', 158: 'Lucky Horseshoe', 159: 'Shiny Red Balloon',
160: 'Harpoon', 161: 'Spiky Ball', 162: "Ball O' Hurt", 163: 'Blue Moon', 164: 'Handgun', 165: 'Water Bolt',
166: 'Bomb', 167: 'Dynamite', 168: 'Grenade', 169: 'Sand Block', 170: 'Glass', 171: 'Sign', 172: 'Ash Block',
173: 'Obsidian', 174: 'Hellstone', 175: 'Hellstone Bar', 176: 'Mud Block', 177: 'Sapphire', 178: 'Ruby',
179: 'Emerald', 180: 'Topaz', 181: 'Amethyst', 182: 'Diamond', 183: 'Glowing Mushroom', 184: 'Star',
185: 'Ivy Whip', 186: 'Breathing Reed', 187: 'Flipper', 188: 'Healing Potion', 189: 'Mana Potion',
190: 'Blade of Grass', 191: 'Thorn Chakram', 192: 'Obsidian Brick', 193: 'Obsidian Skull',
194: 'Mushroom Grass Seeds', 195: 'Jungle Grass Seeds', 196: 'Wooden Hammer', 197: 'Star Cannon',
198: 'Blue Phaseblade', 199: 'Red Phaseblade', 200: 'Green Phaseblade', 201: 'Purple Phaseblade',
202: 'White Phaseblade', 203: 'Yellow Phaseblade', 204: 'Meteor Hamaxe', 205: 'Empty Bucket',
206: 'Water Bucket', 207: 'Lava Bucket', 208: 'Jungle Rose', 209: 'Stinger', 210: 'Vine', 211: 'Feral Claws',
212: 'Anklet of the Wind', 213: 'Staff of Regrowth', 214: 'Hellstone Brick', 215: 'Whoopie Cushion',
216: 'Shackle', 217: 'Molten Hamaxe', 218: 'Flamelash', 219: 'Phoenix Blaster', 220: 'Sunfury',
221: 'Hellforge', 222: 'Clay Pot', 223: "Nature's Gift", 224: 'Bed', 225: 'Silk',
226: 'Lesser Restoration Potion', 227: 'Restoration Potion', 228: 'Jungle Hat', 229: 'Jungle Shirt',
230: 'Jungle Pants', 231: 'Molten Helmet', 232: 'Molten Breastplate', 233: 'Molten Greaves',
234: 'Meteor Shot', 235: 'Sticky Bomb', 236: 'Black Lens', 237: 'Sunglasses', 238: 'Wizard Hat',
239: 'Top Hat', 240: 'Tuxedo Shirt', 241: 'Tuxedo Pants', 242: 'Summer Hat', 243: 'Bunny Hood',
244: "Plumber's Hat", 245: "Plumber's Shirt", 246: "Plumber's Pants", 247: "Hero's Hat", 248: "Hero's Shirt",
249: "Hero's Pants", 250: 'Fish Bowl', 251: "Archaeologist's Hat", 252: "Archaeologist's Jacket",
253: "Archaeologist's Pants", 254: 'Black Thread', 255: 'Green Thread', 256: 'Ninja Hood', 257: 'Ninja Shirt',
258: 'Ninja Pants', 259: 'Leather', 260: 'Red Hat', 261: 'Goldfish', 262: 'Robe', 263: 'Robot Hat',
264: 'Gold Crown', 265: 'Hellfire Arrow', 266: 'Sandgun', 267: 'Guide Voodoo Doll', 268: 'Diving Helmet',
269: 'Familiar Shirt', 270: 'Familiar Pants', 271: 'Familiar Wig', 272: 'Demon Scythe', 273: "Night's Edge",
274: 'Dark Lance', 275: 'Coral', 276: 'Cactus', 277: 'Trident', 278: 'Silver Bullet', 279: 'Throwing Knife',
280: 'Spear', 281: 'Blowpipe', 282: 'Glowstick', 283: 'Seed', 284: 'Wooden Boomerang', 285: 'Aglet',
286: 'Sticky Glowstick', 287: 'Poisoned Knife', 288: 'Obsidian Skin Potion', 289: 'Regeneration Potion',
290: 'Swiftness Potion', 291: 'Gills Potion', 292: 'Ironskin Potion', 293: 'Mana Regeneration Potion',
294: 'Magic Power Potion', 295: 'Featherfall Potion', 296: 'Spelunker Potion', 297: 'Invisibility Potion',
298: 'Shine Potion', 299: 'Night Owl Potion', 300: 'Battle Potion', 301: 'Thorns Potion',
302: 'Water Walking Potion', 303: 'Archery Potion', 304: 'Hunter Potion', 305: 'Gravitation Potion',
306: 'Gold Chest', 307: 'Daybloom Seeds', 308: 'Moonglow Seeds', 309: 'Blinkroot Seeds',
310: 'Deathweed Seeds', 311: 'Waterleaf Seeds', 312: 'Fireblossom Seeds', 313: 'Daybloom', 314: 'Moonglow',
315: 'Blinkroot', 316: 'Deathweed', 317: 'Waterleaf', 318: 'Fireblossom', 319: 'Shark Fin', 320: 'Feather',
321: 'Tombstone', 322: 'Mime Mask', 323: 'Antlion Mandible', 324: 'Illegal Gun Parts',
325: "The Doctor's Shirt", 326: "The Doctor's Pants", 327: 'Golden Key', 328: 'Shadow Chest',
329: 'Shadow Key', 330: 'Obsidian Brick Wall', 331: 'Jungle Spores', 332: 'Loom', 333: 'Piano', 334: 'Dresser',
335: 'Bench', 336: 'Bathtub', 337: 'Red Banner', 338: 'Green Banner', 339: 'Blue Banner', 340: 'Yellow Banner',
341: 'Lamp Post', 342: 'Tiki Torch', 343: 'Barrel', 344: 'Chinese Lantern', 345: 'Cooking Pot', 346: 'Safe',
347: 'Skull Lantern', 348: 'Trash Can', 349: 'Candelabra', 350: 'Pink Vase', 351: 'Mug', 352: 'Keg',
353: 'Ale', 354: 'Bookcase', 355: 'Throne', 356: 'Bowl', 357: 'Bowl of Soup', 358: 'Toilet',
359: 'Grandfather Clock', 360: 'Armor Statue', 361: 'Goblin Battle Standard', 362: 'Tattered Cloth',
363: 'Sawmill', 364: 'Cobalt Ore', 365: 'Mythril Ore', 366: 'Adamantite Ore', 367: 'Pwnhammer',
368: 'Excalibur', 369: 'Hallowed Seeds', 370: 'Ebonsand Block', 371: 'Cobalt Hat', 372: 'Cobalt Helmet',
373: 'Cobalt Mask', 374: 'Cobalt Breastplate', 375: 'Cobalt Leggings', 376: 'Mythril Hood',
377: 'Mythril Helmet', 378: 'Mythril Hat', 379: 'Mythril Chainmail', 380: 'Mythril Greaves', 381: 'Cobalt Bar',
382: 'Mythril Bar', 383: 'Cobalt Chainsaw', 384: 'Mythril Chainsaw', 385: 'Cobalt Drill', 386: 'Mythril Drill',
387: 'Adamantite Chainsaw', 388: 'Adamantite Drill', 389: 'Dao of Pow', 390: 'Mythril Halberd',
391: 'Adamantite Bar', 392: 'Glass Wall', 393: 'Compass', 394: 'Diving Gear', 395: 'GPS',
396: 'Obsidian Horseshoe', 397: 'Obsidian Shield', 398: "Tinkerer's Workshop", 399: 'Cloud in a Balloon',
400: 'Adamantite Headgear', 401: 'Adamantite Helmet', 402: 'Adamantite Mask', 403: 'Adamantite Breastplate',
404: 'Adamantite Leggings', 405: 'Spectre Boots', 406: 'Adamantite Glaive', 407: 'Toolbelt',
408: 'Pearlsand Block', 409: 'Pearlstone Block', 410: 'Mining Shirt', 411: 'Mining Pants',
412: 'Pearlstone Brick', 413: 'Iridescent Brick', 414: 'Mudstone Block', 415: 'Cobalt Brick',
416: 'Mythril Brick', 417: 'Pearlstone Brick Wall', 418: 'Iridescent Brick Wall', 419: 'Mudstone Brick Wall',
420: 'Cobalt Brick Wall', 421: 'Mythril Brick Wall', 422: 'Holy Water', 423: 'Unholy Water', 424: 'Silt Block',
425: 'Fairy Bell', 426: 'Breaker Blade', 427: 'Blue Torch', 428: 'Red Torch', 429: 'Green Torch',
430: 'Purple Torch', 431: 'White Torch', 432: 'Yellow Torch', 433: 'Demon Torch',
434: 'Clockwork Assault Rifle', 435: 'Cobalt Repeater', 436: 'Mythril Repeater', 437: 'Dual Hook',
438: 'Star Statue', 439: 'Sword Statue', 440: 'Slime Statue', 441: 'Goblin Statue', 442: 'Shield Statue',
443: 'Bat Statue', 444: 'Fish Statue', 445: 'Bunny Statue', 446: 'Skeleton Statue', 447: 'Reaper Statue',
448: 'Woman Statue', 449: 'Imp Statue', 450: 'Gargoyle Statue', 451: 'Gloom Statue', 452: 'Hornet Statue',
453: 'Bomb Statue', 454: 'Crab Statue', 455: 'Hammer Statue', 456: 'Potion Statue', 457: 'Spear Statue',
458: 'Cross Statue', 459: 'Jellyfish Statue', 460: 'Bow Statue', 461: 'Boomerang Statue', 462: 'Boot Statue',
463: 'Chest Statue', 464: 'Bird Statue', 465: 'Axe Statue', 466: 'Corrupt Statue', 467: 'Tree Statue',
468: 'Anvil Statue', 469: 'Pickaxe Statue', 470: 'Mushroom Statue', 471: 'Eyeball Statue',
472: 'Pillar Statue', 473: 'Heart Statue', 474: 'Pot Statue', 475: 'Sunflower Statue', 476: 'King Statue',
477: 'Queen Statue', 478: 'Piranha Statue', 479: 'Planked Wall', 480: 'Wooden Beam',
481: 'Adamantite Repeater', 482: 'Adamantite Sword', 483: 'Cobalt Sword', 484: 'Mythril Sword',
485: 'Moon Charm', 486: 'Ruler', 487: 'Crystal Ball', 488: 'Disco Ball', 489: 'Sorcerer Emblem',
490: 'Warrior Emblem', 491: 'Ranger Emblem', 492: 'Demon Wings', 493: 'Angel Wings', 494: 'Magical Harp',
495: 'Rainbow Rod', 496: 'Ice Rod', 497: "Neptune's Shell", 498: 'Mannequin', 499: 'Greater Healing Potion',
500: 'Greater Mana Potion', 501: 'Pixie Dust', 502: 'Crystal Shard', 503: 'Clown Hat', 504: 'Clown Shirt',
505: 'Clown Pants', 506: 'Flamethrower', 507: 'Bell', 508: 'Harp', 509: 'Wrench', 510: 'Wire Cutter',
511: 'Active Stone Block', 512: 'Inactive Stone Block', 513: 'Lever', 514: 'Laser Rifle',
515: 'Crystal Bullet', 516: 'Holy Arrow', 517: 'Magic Dagger', 518: 'Crystal Storm', 519: 'Cursed Flames',
520: 'Soul of Light', 521: 'Soul of Night', 522: 'Cursed Flame', 523: 'Cursed Torch', 524: 'Adamantite Forge',
525: 'Mythril Anvil', 526: 'Unicorn Horn', 527: 'Dark Shard', 528: 'Light Shard', 529: 'Red Pressure Plate',
530: 'Wire', 531: 'Spell Tome', 532: 'Star Cloak', 533: 'Megashark', 534: 'Shotgun',
535: "Philosopher's Stone", 536: 'Titan Glove', 537: 'Cobalt Naginata', 538: 'Switch', 539: 'Dart Trap',
540: 'Boulder', 541: 'Green Pressure Plate', 542: 'Gray Pressure Plate', 543: 'Brown Pressure Plate',
544: 'Mechanical Eye', 545: 'Cursed Arrow', 546: 'Cursed Bullet', 547: 'Soul of Fright', 548: 'Soul of Might',
549: 'Soul of Sight', 550: 'Gungnir', 551: 'Hallowed Plate Mail', 552: 'Hallowed Greaves',
553: 'Hallowed Helmet', 554: 'Cross Necklace', 555: 'Mana Flower', 556: 'Mechanical Worm',
557: 'Mechanical Skull', 558: 'Hallowed Headgear', 559: 'Hallowed Mask', 560: 'Slime Crown', 561: 'Light Disc',
562: 'Music Box (Overworld Day)', 563: 'Music Box (Eerie)', 564: 'Music Box (Night)', 565: 'Music Box (Title)',
566: 'Music Box (Underground)', 567: 'Music Box (Boss 1)', 568: 'Music Box (Jungle)',
569: 'Music Box (Corruption)', 570: 'Music Box (Underground Corruption)', 571: 'Music Box (The Hallow)',
572: 'Music Box (Boss 2)', 573: 'Music Box (Underground Hallow)', 574: 'Music Box (Boss 3)',
575: 'Soul of Flight', 576: 'Music Box', 577: 'Demonite Brick', 578: 'Hallowed Repeater', 579: 'Drax',
580: 'Explosives', 581: 'Inlet Pump', 582: 'Outlet Pump', 583: '1 Second Timer', 584: '3 Second Timer',
585: '5 Second Timer', 586: 'Candy Cane Block', 587: 'Candy Cane Wall', 588: 'Santa Hat', 589: 'Santa Shirt',
590: 'Santa Pants', 591: 'Green Candy Cane Block', 592: 'Green Candy Cane Wall', 593: 'Snow Block',
594: 'Snow Brick', 595: 'Snow Brick Wall', 596: 'Blue Light', 597: 'Red Light', 598: 'Green Light',
599: 'Blue Present', 600: 'Green Present', 601: 'Yellow Present', 602: 'Snow Globe', 603: 'Carrot',
604: 'Adamantite Beam', 605: 'Adamantite Beam Wall', 606: 'Demonite Brick Wall', 607: 'Sandstone Brick',
608: 'Sandstone Brick Wall', 609: 'Ebonstone Brick', 610: 'Ebonstone Brick Wall', 611: 'Red Stucco',
612: 'Yellow Stucco', 613: 'Green Stucco', 614: 'Gray Stucco', 615: 'Red Stucco Wall',
616: 'Yellow Stucco Wall', 617: 'Green Stucco Wall', 618: 'Gray Stucco Wall', 619: 'Ebonwood',
620: 'Rich Mahogany', 621: 'Pearlwood', 622: 'Ebonwood Wall', 623: 'Rich Mahogany Wall', 624: 'Pearlwood Wall',
625: 'Ebonwood Chest', 626: 'Rich Mahogany Chest', 627: 'Pearlwood Chest', 628: 'Ebonwood Chair',
629: 'Rich Mahogany Chair', 630: 'Pearlwood Chair', 631: 'Ebonwood Platform', 632: 'Rich Mahogany Platform',
633: 'Pearlwood Platform', 634: 'Bone Platform', 635: 'Ebonwood Work Bench', 636: 'Rich Mahogany Work Bench',
637: 'Pearlwood Work Bench', 638: 'Ebonwood Table', 639: 'Rich Mahogany Table', 640: 'Pearlwood Table',
641: 'Ebonwood Piano', 642: 'Rich Mahogany Piano', 643: 'Pearlwood Piano', 644: 'Ebonwood Bed',
645: 'Rich Mahogany Bed', 646: 'Pearlwood Bed', 647: 'Ebonwood Dresser', 648: 'Rich Mahogany Dresser',
649: 'Pearlwood Dresser', 650: 'Ebonwood Door', 651: 'Rich Mahogany Door', 652: 'Pearlwood Door',
653: 'Ebonwood Sword', 654: 'Ebonwood Hammer', 655: 'Ebonwood Bow', 656: 'Rich Mahogany Sword',
657: 'Rich Mahogany Hammer', 658: 'Rich Mahogany Bow', 659: 'Pearlwood Sword', 660: 'Pearlwood Hammer',
661: 'Pearlwood Bow', 662: 'Rainbow Brick', 663: 'Rainbow Brick Wall', 664: 'Ice Block', 665: "Red's Wings",
666: "Red's Helmet", 667: "Red's Breastplate", 668: "Red's Leggings", 669: 'Fish', 670: 'Ice Boomerang',
671: 'Keybrand', 672: 'Cutlass', 673: 'Boreal Wood Work Bench', 674: 'True Excalibur',
675: "True Night's Edge", 676: 'Frostbrand', 677: 'Boreal Wood Table', 678: 'Red Potion',
679: 'Tactical Shotgun', 680: 'Ivy Chest', 681: 'Ice Chest', 682: 'Marrow', 683: 'Unholy Trident',
684: 'Frost Helmet', 685: 'Frost Breastplate', 686: 'Frost Leggings', 687: 'Tin Helmet', 688: 'Tin Chainmail',
689: 'Tin Greaves', 690: 'Lead Helmet', 691: 'Lead Chainmail', 692: 'Lead Greaves', 693: 'Tungsten Helmet',
694: 'Tungsten Chainmail', 695: 'Tungsten Greaves', 696: 'Platinum Helmet', 697: 'Platinum Chainmail',
698: 'Platinum Greaves', 699: 'Tin Ore', 700: 'Lead Ore', 701: 'Tungsten Ore', 702: 'Platinum Ore',
703: 'Tin Bar', 704: 'Lead Bar', 705: 'Tungsten Bar', 706: 'Platinum Bar', 707: 'Tin Watch',
708: 'Tungsten Watch', 709: 'Platinum Watch', 710: 'Tin Chandelier', 711: 'Tungsten Chandelier',
712: 'Platinum Chandelier', 713: 'Platinum Candle', 714: 'Platinum Candelabra', 715: 'Platinum Crown',
716: 'Lead Anvil', 717: 'Tin Brick', 718: 'Tungsten Brick', 719: 'Platinum Brick', 720: 'Tin Brick Wall',
721: 'Tungsten Brick Wall', 722: 'Platinum Brick Wall', 723: 'Beam Sword', 724: 'Ice Blade', 725: 'Ice Bow',
726: 'Frost Staff', 727: 'Wood Helmet', 728: 'Wood Breastplate', 729: 'Wood Greaves', 730: 'Ebonwood Helmet',
731: 'Ebonwood Breastplate', 732: 'Ebonwood Greaves', 733: 'Rich Mahogany Helmet',
734: 'Rich Mahogany Breastplate', 735: 'Rich Mahogany Greaves', 736: 'Pearlwood Helmet',
737: 'Pearlwood Breastplate', 738: 'Pearlwood Greaves', 739: 'Amethyst Staff', 740: 'Topaz Staff',
741: 'Sapphire Staff', 742: 'Emerald Staff', 743: 'Ruby Staff', 744: 'Diamond Staff', 745: 'Grass Wall',
746: 'Jungle Wall', 747: 'Flower Wall', 748: 'Jetpack', 749: 'Butterfly Wings', 750: 'Cactus Wall',
751: 'Cloud', 752: 'Cloud Wall', 753: 'Seaweed', 754: 'Rune Hat', 755: 'Rune Robe', 756: 'Mushroom Spear',
757: 'Terra Blade', 758: 'Grenade Launcher', 759: 'Rocket Launcher', 760: 'Proximity Mine Launcher',
761: 'Fairy Wings', 762: 'Slime Block', 763: 'Flesh Block', 764: 'Mushroom Wall', 765: 'Rain Cloud',
766: 'Bone Block', 767: 'Frozen Slime Block', 768: 'Bone Block Wall', 769: 'Slime Block Wall',
770: 'Flesh Block Wall', 771: 'Rocket I', 772: 'Rocket II', 773: 'Rocket III', 774: 'Rocket IV',
775: 'Asphalt Block', 776: 'Cobalt Pickaxe', 777: 'Mythril Pickaxe', 778: 'Adamantite Pickaxe',
779: 'Clentaminator', 780: 'Green Solution', 781: 'Blue Solution', 782: 'Purple Solution',
783: 'Dark Blue Solution', 784: 'Red Solution', 785: 'Harpy Wings', 786: 'Bone Wings', 787: 'Hammush',
788: 'Nettle Burst', 789: 'Ankh Banner', 790: 'Snake Banner', 791: 'Omega Banner', 792: 'Crimson Helmet',
793: 'Crimson Scalemail', 794: 'Crimson Greaves', 795: 'Blood Butcherer', 796: 'Tendon Bow',
797: 'Flesh Grinder', 798: 'Deathbringer Pickaxe', 799: 'Blood Lust Cluster', 800: 'The Undertaker',
801: 'The Meatball', 802: 'The Rotted Fork', 803: 'Eskimo Hood', 804: 'Eskimo Coat', 805: 'Eskimo Pants',
806: 'Living Wood Chair', 807: 'Cactus Chair', 808: 'Bone Chair', 809: 'Flesh Chair', 810: 'Mushroom Chair',
811: 'Bone Work Bench', 812: 'Cactus Work Bench', 813: 'Flesh Work Bench', 814: 'Mushroom Work Bench',
815: 'Slime Work Bench', 816: 'Cactus Door', 817: 'Flesh Door', 818: 'Mushroom Door', 819: 'Living Wood Door',
820: 'Bone Door', 821: 'Flame Wings', 822: 'Frozen Wings', 823: 'Ghost Wings', 824: 'Sunplate Block',
825: 'Disc Wall', 826: 'Skyware Chair', 827: 'Bone Table', 828: 'Flesh Table', 829: 'Living Wood Table',
830: 'Skyware Table', 831: 'Living Wood Chest', 832: 'Living Wood Wand', 833: 'Purple Ice Block',
834: 'Pink Ice Block', 835: 'Red Ice Block', 836: 'Crimstone Block', 837: 'Skyware Door', 838: 'Skyware Chest',
839: 'Steampunk Hat', 840: 'Steampunk Shirt', 841: 'Steampunk Pants', 842: 'Bee Hat', 843: 'Bee Shirt',
844: 'Bee Pants', 845: 'World Banner', 846: 'Sun Banner', 847: 'Gravity Banner', 848: "Pharaoh's Mask",
849: 'Actuator', 850: 'Blue Wrench', 851: 'Green Wrench', 852: 'Blue Pressure Plate',
853: 'Yellow Pressure Plate', 854: 'Discount Card', 855: 'Lucky Coin', 856: 'Unicorn on a Stick',
857: 'Sandstorm in a Bottle', 858: 'Boreal Wood Sofa', 859: 'Beach Ball', 860: 'Charm of Myths',
861: 'Moon Shell', 862: 'Star Veil', 863: 'Water Walking Boots', 864: 'Tiara', 865: 'Princess Dress',
866: "Pharaoh's Robe", 867: 'Green Cap', 868: 'Mushroom Cap', 869: "Tam O' Shanter", 870: 'Mummy Mask',
871: 'Mummy Shirt', 872: 'Mummy Pants', 873: 'Cowboy Hat', 874: 'Cowboy Jacket', 875: 'Cowboy Pants',
876: 'Pirate Hat', 877: 'Pirate Shirt', 878: 'Pirate Pants', 879: 'Viking Helmet', 880: 'Crimtane Ore',
881: 'Cactus Sword', 882: 'Cactus Pickaxe', 883: 'Ice Brick', 884: 'Ice Brick Wall', 885: 'Adhesive Bandage',
886: 'Armor Polish', 887: 'Bezoar', 888: 'Blindfold', 889: 'Fast Clock', 890: 'Megaphone', 891: 'Nazar',
892: 'Vitamins', 893: 'Trifold Map', 894: 'Cactus Helmet', 895: 'Cactus Breastplate', 896: 'Cactus Leggings',
897: 'Power Glove', 898: 'Lightning Boots', 899: 'Sun Stone', 900: 'Moon Stone', 901: 'Armor Bracing',
902: 'Medicated Bandage', 903: 'The Plan', 904: 'Countercurse Mantra', 905: 'Coin Gun', 906: 'Lava Charm',
907: 'Obsidian Water Walking Boots', 908: 'Lava Waders', 909: 'Pure Water Fountain',
910: 'Desert Water Fountain', 911: 'Shadewood', 912: 'Shadewood Door', 913: 'Shadewood Platform',
914: 'Shadewood Chest', 915: 'Shadewood Chair', 916: 'Shadewood Work Bench', 917: 'Shadewood Table',
918: 'Shadewood Dresser', 919: 'Shadewood Piano', 920: 'Shadewood Bed', 921: 'Shadewood Sword',
922: 'Shadewood Hammer', 923: 'Shadewood Bow', 924: 'Shadewood Helmet', 925: 'Shadewood Breastplate',
926: 'Shadewood Greaves', 927: 'Shadewood Wall', 928: 'Cannon', 929: 'Cannonball', 930: 'Flare Gun',
931: 'Flare', 932: 'Bone Wand', 933: 'Leaf Wand', 934: 'Flying Carpet', 935: 'Avenger Emblem',
936: 'Mechanical Glove', 937: 'Land Mine', 938: "Paladin's Shield", 939: 'Web Slinger',
940: 'Jungle Water Fountain', 941: 'Icy Water Fountain', 942: 'Corrupt Water Fountain',
943: 'Crimson Water Fountain', 944: 'Hallowed Water Fountain', 945: 'Blood Water Fountain', 946: 'Umbrella',
947: 'Chlorophyte Ore', 948: 'Steampunk Wings', 949: 'Snowball', 950: 'Ice Skates', 951: 'Snowball Launcher',
952: 'Web Covered Chest', 953: 'Climbing Claws', 954: 'Ancient Iron Helmet', 955: 'Ancient Gold Helmet',
956: 'Ancient Shadow Helmet', 957: 'Ancient Shadow Scalemail', 958: 'Ancient Shadow Greaves',
959: 'Ancient Necro Helmet', 960: 'Ancient Cobalt Helmet', 961: 'Ancient Cobalt Breastplate',
962: 'Ancient Cobalt Leggings', 963: 'Black Belt', 964: 'Boomstick', 965: 'Rope', 966: 'Campfire',
967: 'Marshmallow', 968: 'Marshmallow on a Stick', 969: 'Cooked Marshmallow', 970: 'Red Rocket',
971: 'Green Rocket', 972: 'Blue Rocket', 973: 'Yellow Rocket', 974: 'Ice Torch', 975: 'Shoe Spikes',
976: 'Tiger Climbing Gear', 977: 'Tabi', 978: 'Pink Eskimo Hood', 979: 'Pink Eskimo Coat',
980: 'Pink Eskimo Pants', 981: 'Pink Thread', 982: 'Mana Regeneration Band', 983: 'Sandstorm in a Balloon',
984: 'Master Ninja Gear', 985: 'Rope Coil', 986: 'Blowgun', 987: 'Blizzard in a Bottle',
988: 'Frostburn Arrow', 989: 'Enchanted Sword', 990: 'Pickaxe Axe', 991: 'Cobalt Waraxe',
992: 'Mythril Waraxe', 993: 'Adamantite Waraxe', 994: "Eater's Bone", 995: 'Blend-O-Matic',
996: 'Meat Grinder', 997: 'Extractinator', 998: 'Solidifier', 999: 'Amber', 1000: 'Confetti Gun',
1001: 'Chlorophyte Mask', 1002: 'Chlorophyte Helmet', 1003: 'Chlorophyte Headgear',
1004: 'Chlorophyte Plate Mail', 1005: 'Chlorophyte Greaves', 1006: 'Chlorophyte Bar', 1007: 'Red Dye',
1008: 'Orange Dye', 1009: 'Yellow Dye', 1010: 'Lime Dye', 1011: 'Green Dye', 1012: 'Teal Dye',
1013: 'Cyan Dye', 1014: 'Sky Blue Dye', 1015: 'Blue Dye', 1016: 'Purple Dye', 1017: 'Violet Dye',
1018: 'Pink Dye', 1019: 'Red and Black Dye', 1020: 'Orange and Black Dye', 1021: 'Yellow and Black Dye',
1022: 'Lime and Black Dye', 1023: 'Green and Black Dye', 1024: 'Teal and Black Dye',
1025: 'Cyan and Black Dye', 1026: 'Sky Blue and Black Dye', 1027: 'Blue and Black Dye',
1028: 'Purple and Black Dye', 1029: 'Violet and Black Dye', 1030: 'Pink and Black Dye', 1031: 'Flame Dye',
1032: 'Flame and Black Dye', 1033: 'Green Flame Dye', 1034: 'Green Flame and Black Dye',
1035: 'Blue Flame Dye', 1036: 'Blue Flame and Black Dye', 1037: 'Silver Dye', 1038: 'Bright Red Dye',
1039: 'Bright Orange Dye', 1040: 'Bright Yellow Dye', 1041: 'Bright Lime Dye', 1042: 'Bright Green Dye',
1043: 'Bright Teal Dye', 1044: 'Bright Cyan Dye', 1045: 'Bright Sky Blue Dye', 1046: 'Bright Blue Dye',
1047: 'Bright Purple Dye', 1048: 'Bright Violet Dye', 1049: 'Bright Pink Dye', 1050: 'Black Dye',
1051: 'Red and Silver Dye', 1052: 'Orange and Silver Dye', 1053: 'Yellow and Silver Dye',
1054: 'Lime and Silver Dye', 1055: 'Green and Silver Dye', 1056: 'Teal and Silver Dye',
1057: 'Cyan and Silver Dye', 1058: 'Sky Blue and Silver Dye', 1059: 'Blue and Silver Dye',
1060: 'Purple and Silver Dye', 1061: 'Violet and Silver Dye', 1062: 'Pink and Silver Dye',
1063: 'Intense Flame Dye', 1064: 'Intense Green Flame Dye', 1065: 'Intense Blue Flame Dye',
1066: 'Rainbow Dye', 1067: 'Intense Rainbow Dye', 1068: 'Yellow Gradient Dye', 1069: 'Cyan Gradient Dye',
1070: 'Violet Gradient Dye', 1071: 'Paintbrush', 1072: 'Paint Roller', 1073: 'Red Paint', 1074: 'Orange Paint',
1075: 'Yellow Paint', 1076: 'Lime Paint', 1077: 'Green Paint', 1078: 'Teal Paint', 1079: 'Cyan Paint',
1080: 'Sky Blue Paint', 1081: 'Blue Paint', 1082: 'Purple Paint', 1083: 'Violet Paint', 1084: 'Pink Paint',
1085: 'Deep Red Paint', 1086: 'Deep Orange Paint', 1087: 'Deep Yellow Paint', 1088: 'Deep Lime Paint',
1089: 'Deep Green Paint', 1090: 'Deep Teal Paint', 1091: 'Deep Cyan Paint', 1092: 'Deep Sky Blue Paint',
1093: 'Deep Blue Paint', 1094: 'Deep Purple Paint', 1095: 'Deep Violet Paint', 1096: 'Deep Pink Paint',
1097: 'Black Paint', 1098: 'White Paint', 1099: 'Gray Paint', 1100: 'Paint Scraper', 1101: 'Lihzahrd Brick',
1102: 'Lihzahrd Brick Wall', 1103: 'Slush Block', 1104: 'Palladium Ore', 1105: 'Orichalcum Ore',
1106: 'Titanium Ore', 1107: 'Teal Mushroom', 1108: 'Green Mushroom', 1109: 'Sky Blue Flower',
1110: 'Yellow Marigold', 1111: 'Blue Berries', 1112: 'Lime Kelp', 1113: 'Pink Prickly Pear',
1114: 'Orange Bloodroot', 1115: 'Red Husk', 1116: 'Cyan Husk', 1117: 'Violet Husk', 1118: 'Purple Mucos',
1119: 'Black Ink', 1120: 'Dye Vat', 1121: 'Bee Gun', 1122: 'Possessed Hatchet', 1123: 'Bee Keeper',
1124: 'Hive', 1125: 'Honey Block', 1126: 'Hive Wall', 1127: 'Crispy Honey Block', 1128: 'Honey Bucket',
1129: 'Hive Wand', 1130: 'Beenade', 1131: 'Gravity Globe', 1132: 'Honey Comb', 1133: 'Abeemination',
1134: 'Bottled Honey', 1135: 'Rain Hat', 1136: 'Rain Coat', 1137: 'Lihzahrd Door', 1138: 'Dungeon Door',
1139: 'Lead Door', 1140: 'Iron Door', 1141: 'Temple Key', 1142: 'Lihzahrd Chest', 1143: 'Lihzahrd Chair',
1144: 'Lihzahrd Table', 1145: 'Lihzahrd Work Bench', 1146: 'Super Dart Trap', 1147: 'Flame Trap',
1148: 'Spiky Ball Trap', 1149: 'Spear Trap', 1150: 'Wooden Spike', 1151: 'Lihzahrd Pressure Plate',
1152: 'Lihzahrd Statue', 1153: 'Lihzahrd Watcher Statue', 1154: 'Lihzahrd Guardian Statue', 1155: 'Wasp Gun',
1156: 'Piranha Gun', 1157: 'Pygmy Staff', 1158: 'Pygmy Necklace', 1159: 'Tiki Mask', 1160: 'Tiki Shirt',
1161: 'Tiki Pants', 1162: 'Leaf Wings', 1163: 'Blizzard in a Balloon', 1164: 'Bundle of Balloons',
1165: 'Bat Wings', 1166: 'Bone Sword', 1167: 'Hercules Beetle', 1168: 'Smoke Bomb', 1169: 'Bone Key',
1170: 'Nectar', 1171: 'Tiki Totem', 1172: 'Lizard Egg', 1173: 'Grave Marker', 1174: 'Cross Grave Marker',
1175: 'Headstone', 1176: 'Gravestone', 1177: 'Obelisk', 1178: 'Leaf Blower', 1179: 'Chlorophyte Bullet',
1180: 'Parrot Cracker', 1181: 'Strange Glowing Mushroom', 1182: 'Seedling', 1183: 'Wisp in a Bottle',
1184: 'Palladium Bar', 1185: 'Palladium Sword', 1186: 'Palladium Pike', 1187: 'Palladium Repeater',
1188: 'Palladium Pickaxe', 1189: 'Palladium Drill', 1190: 'Palladium Chainsaw', 1191: 'Orichalcum Bar',
1192: 'Orichalcum Sword', 1193: 'Orichalcum Halberd', 1194: 'Orichalcum Repeater', 1195: 'Orichalcum Pickaxe',
1196: 'Orichalcum Drill', 1197: 'Orichalcum Chainsaw', 1198: 'Titanium Bar', 1199: 'Titanium Sword',
1200: 'Titanium Trident', 1201: 'Titanium Repeater', 1202: 'Titanium Pickaxe', 1203: 'Titanium Drill',
1204: 'Titanium Chainsaw', 1205: 'Palladium Mask', 1206: 'Palladium Helmet', 1207: 'Palladium Headgear',
1208: 'Palladium Breastplate', 1209: 'Palladium Leggings', 1210: 'Orichalcum Mask', 1211: 'Orichalcum Helmet',
1212: 'Orichalcum Headgear', 1213: 'Orichalcum Breastplate', 1214: 'Orichalcum Leggings',
1215: 'Titanium Mask', 1216: 'Titanium Helmet', 1217: 'Titanium Headgear', 1218: 'Titanium Breastplate',
1219: 'Titanium Leggings', 1220: 'Orichalcum Anvil', 1221: 'Titanium Forge', 1222: 'Palladium Waraxe',
1223: 'Orichalcum Waraxe', 1224: 'Titanium Waraxe', 1225: 'Hallowed Bar', 1226: 'Chlorophyte Claymore',
1227: 'Chlorophyte Saber', 1228: 'Chlorophyte Partisan', 1229: 'Chlorophyte Shotbow',
1230: 'Chlorophyte Pickaxe', 1231: 'Chlorophyte Drill', 1232: 'Chlorophyte Chainsaw',
1233: 'Chlorophyte Greataxe', 1234: 'Chlorophyte Warhammer', 1235: 'Chlorophyte Arrow', 1236: 'Amethyst Hook',
1237: 'Topaz Hook', 1238: 'Sapphire Hook', 1239: 'Emerald Hook', 1240: 'Ruby Hook', 1241: 'Diamond Hook',
1242: 'Amber Mosquito', 1243: 'Umbrella Hat', 1244: 'Nimbus Rod', 1245: 'Orange Torch', 1246: 'Crimsand Block',
1247: 'Bee Cloak', 1248: 'Eye of the Golem', 1249: 'Honey Balloon', 1250: 'Blue Horseshoe Balloon',
1251: 'White Horseshoe Balloon', 1252: 'Yellow Horseshoe Balloon', 1253: 'Frozen Turtle Shell',
1254: 'Sniper Rifle', 1255: 'Venus Magnum', 1256: 'Crimson Rod', 1257: 'Crimtane Bar', 1258: 'Stynger',
1259: 'Flower Pow', 1260: 'Rainbow Gun', 1261: 'Stynger Bolt', 1262: 'Chlorophyte Jackhammer',
1263: 'Teleporter', 1264: 'Flower of Frost', 1265: 'Uzi', 1266: 'Magnet Sphere', 1267: 'Purple Stained Glass',
1268: 'Yellow Stained Glass', 1269: 'Blue Stained Glass', 1270: 'Green Stained Glass',
1271: 'Red Stained Glass', 1272: 'Multicolored Stained Glass', 1273: 'Skeletron Hand', 1274: 'Skull',
1275: 'Balla Hat', 1276: 'Gangsta Hat', 1277: 'Sailor Hat', 1278: 'Eye Patch', 1279: 'Sailor Shirt',
1280: 'Sailor Pants', 1281: 'Skeletron Mask', 1282: 'Amethyst Robe', 1283: 'Topaz Robe', 1284: 'Sapphire Robe',
1285: 'Emerald Robe', 1286: 'Ruby Robe', 1287: 'Diamond Robe', 1288: 'White Tuxedo Shirt',
1289: 'White Tuxedo Pants', 1290: 'Panic Necklace', 1291: 'Life Fruit', 1292: 'Lihzahrd Altar',
1293: 'Lihzahrd Power Cell', 1294: 'Picksaw', 1295: 'Heat Ray', 1296: 'Staff of Earth', 1297: 'Golem Fist',
1298: 'Water Chest', 1299: 'Binoculars', 1300: 'Rifle Scope', 1301: 'Destroyer Emblem',
1302: 'High Velocity Bullet', 1303: 'Jellyfish Necklace', 1304: 'Zombie Arm', 1305: 'The Axe',
1306: 'Ice Sickle', 1307: 'Clothier Voodoo Doll', 1308: 'Poison Staff', 1309: 'Slime Staff',
1310: 'Poison Dart', 1311: 'Eye Spring', 1312: 'Toy Sled', 1313: 'Book of Skulls', 1314: 'KO Cannon',
1315: 'Pirate Map', 1316: 'Turtle Helmet', 1317: 'Turtle Scale Mail', 1318: 'Turtle Leggings',
1319: 'Snowball Cannon', 1320: 'Bone Pickaxe', 1321: 'Magic Quiver', 1322: 'Magma Stone',
1323: 'Obsidian Rose', 1324: 'Bananarang', 1325: 'Chain Knife', 1326: 'Rod of Discord', 1327: 'Death Sickle',
1328: 'Turtle Shell', 1329: 'Tissue Sample', 1330: 'Vertebrae', 1331: 'Bloody Spine', 1332: 'Ichor',
1333: 'Ichor Torch', 1334: 'Ichor Arrow', 1335: 'Ichor Bullet', 1336: 'Golden Shower', 1337: 'Bunny Cannon',
1338: 'Explosive Bunny', 1339: 'Vial of Venom', 1340: 'Flask of Venom', 1341: 'Venom Arrow',
1342: 'Venom Bullet', 1343: 'Fire Gauntlet', 1344: 'Cog', 1345: 'Confetti', 1346: 'Nanites',
1347: 'Explosive Powder', 1348: 'Gold Dust', 1349: 'Party Bullet', 1350: 'Nano Bullet',
1351: 'Exploding Bullet', 1352: 'Golden Bullet', 1353: 'Flask of Cursed Flames', 1354: 'Flask of Fire',
1355: 'Flask of Gold', 1356: 'Flask of Ichor', 1357: 'Flask of Nanites', 1358: 'Flask of Party',
1359: 'Flask of Poison', 1360: 'Eye of Cthulhu Trophy', 1361: 'Eater of Worlds Trophy',
1362: 'Brain of Cthulhu Trophy', 1363: 'Skeletron Trophy', 1364: 'Queen Bee Trophy',
1365: 'Wall of Flesh Trophy', 1366: 'Destroyer Trophy', 1367: 'Skeletron Prime Trophy',
1368: 'Retinazer Trophy', 1369: 'Spazmatism Trophy', 1370: 'Plantera Trophy', 1371: 'Golem Trophy',
1372: 'Blood Moon Rising', 1373: 'The Hanged Man', 1374: 'Glory of the Fire', 1375: 'Bone Warp',
1376: 'Wall Skeleton', 1377: 'Hanging Skeleton', 1378: 'Blue Slab Wall', 1379: 'Blue Tiled Wall',
1380: 'Pink Slab Wall', 1381: 'Pink Tiled Wall', 1382: 'Green Slab Wall', 1383: 'Green Tiled Wall',
1384: 'Blue Brick Platform', 1385: 'Pink Brick Platform', 1386: 'Green Brick Platform', 1387: 'Metal Shelf',
1388: 'Brass Shelf', 1389: 'Wood Shelf', 1390: 'Brass Lantern', 1391: 'Caged Lantern',
1392: 'Carriage Lantern', 1393: 'Alchemy Lantern', 1394: 'Diablost Lamp', 1395: 'Oil Rag Sconse',
1396: 'Blue Dungeon Chair', 1397: 'Blue Dungeon Table', 1398: 'Blue Dungeon Work Bench',
1399: 'Green Dungeon Chair', 1400: 'Green Dungeon Table', 1401: 'Green Dungeon Work Bench',
1402: 'Pink Dungeon Chair', 1403: 'Pink Dungeon Table', 1404: 'Pink Dungeon Work Bench',
1405: 'Blue Dungeon Candle', 1406: 'Green Dungeon Candle', 1407: 'Pink Dungeon Candle',
1408: 'Blue Dungeon Vase', 1409: 'Green Dungeon Vase', 1410: 'Pink Dungeon Vase', 1411: 'Blue Dungeon Door',
1412: 'Green Dungeon Door', 1413: 'Pink Dungeon Door', 1414: 'Blue Dungeon Bookcase',
1415: 'Green Dungeon Bookcase', 1416: 'Pink Dungeon Bookcase', 1417: 'Catacomb', 1418: 'Dungeon Shelf',
1419: 'Skellington J Skellingsworth', 1420: 'The Cursed Man', 1421: 'The Eye Sees the End',
1422: 'Something Evil is Watching You', 1423: 'The Twins Have Awoken', 1424: 'The Screamer',
1425: 'Goblins Playing Poker', 1426: 'Dryadisque', 1427: 'Sunflowers', 1428: 'Terrarian Gothic',
1429: 'Beanie', 1430: 'Imbuing Station', 1431: 'Star in a Bottle', 1432: 'Empty Bullet', 1433: 'Impact',
1434: 'Powered by Birds', 1435: 'The Destroyer', 1436: 'The Persistency of Eyes',
1437: 'Unicorn Crossing the Hallows', 1438: 'Great Wave', 1439: 'Starry Night', 1440: 'Guide Picasso',
1441: "The Guardian's Gaze", 1442: 'Father of Someone', 1443: 'Nurse Lisa', 1444: 'Shadowbeam Staff',
1445: 'Inferno Fork', 1446: 'Spectre Staff', 1447: 'Wooden Fence', 1448: 'Lead Fence', 1449: 'Bubble Machine',
1450: 'Bubble Wand', 1451: 'Marching Bones Banner', 1452: 'Necromantic Sign', 1453: 'Rusted Company Standard',
1454: 'Ragged Brotherhood Sigil', 1455: 'Molten Legion Flag', 1456: 'Diabolic Sigil',
1457: 'Obsidian Platform', 1458: 'Obsidian Door', 1459: 'Obsidian Chair', 1460: 'Obsidian Table',
1461: 'Obsidian Work Bench', 1462: 'Obsidian Vase', 1463: 'Obsidian Bookcase', 1464: 'Hellbound Banner',
1465: 'Hell Hammer Banner', 1466: 'Helltower Banner', 1467: 'Lost Hopes of Man Banner',
1468: 'Obsidian Watcher Banner', 1469: 'Lava Erupts Banner', 1470: 'Blue Dungeon Bed',
1471: 'Green Dungeon Bed', 1472: 'Pink Dungeon Bed', 1473: 'Obsidian Bed', 1474: 'Waldo', 1475: 'Darkness',
1476: 'Dark Soul Reaper', 1477: 'Land', 1478: 'Trapped Ghost', 1479: "Demon's Eye", 1480: 'Finding Gold',
1481: 'First Encounter', 1482: 'Good Morning', 1483: 'Underground Reward', 1484: 'Through the Window',
1485: 'Place Above the Clouds', 1486: 'Do Not Step on the Grass', 1487: 'Cold Waters in the White Land',
1488: 'Lightless Chasms', 1489: 'The Land of Deceiving Looks', 1490: 'Daylight', 1491: 'Secret of the Sands',
1492: 'Deadland Comes Alive', 1493: 'Evil Presence', 1494: 'Sky Guardian', 1495: 'American Explosive',
1496: 'Discover', 1497: 'Hand Earth', 1498: 'Old Miner', 1499: 'Skelehead',
1500: 'Facing the Cerebral Mastermind', 1501: 'Lake of Fire', 1502: 'Trio Super Heroes', 1503: 'Spectre Hood',
1504: 'Spectre Robe', 1505: 'Spectre Pants', 1506: 'Spectre Pickaxe', 1507: 'Spectre Hamaxe',
1508: 'Ectoplasm', 1509: 'Gothic Chair', 1510: 'Gothic Table', 1511: 'Gothic Work Bench',
1512: 'Gothic Bookcase', 1513: "Paladin's Hammer", 1514: 'SWAT Helmet', 1515: 'Bee Wings',
1516: 'Giant Harpy Feather', 1517: 'Bone Feather', 1518: 'Fire Feather', 1519: 'Ice Feather',
1520: 'Broken Bat Wing', 1521: 'Tattered Bee Wing', 1522: 'Large Amethyst', 1523: 'Large Topaz',
1524: 'Large Sapphire', 1525: 'Large Emerald', 1526: 'Large Ruby', 1527: 'Large Diamond', 1528: 'Jungle Chest',
1529: 'Corruption Chest', 1530: 'Crimson Chest', 1531: 'Hallowed Chest', 1532: 'Frozen Chest',
1533: 'Jungle Key', 1534: 'Corruption Key', 1535: 'Crimson Key', 1536: 'Hallowed Key', 1537: 'Frozen Key',
1538: 'Imp Face', 1539: 'Ominous Presence', 1540: 'Shining Moon', 1541: 'Living Gore', 1542: 'Flowing Magma',
1543: 'Spectre Paintbrush', 1544: 'Spectre Paint Roller', 1545: 'Spectre Paint Scraper',
1546: 'Shroomite Headgear', 1547: 'Shroomite Mask', 1548: 'Shroomite Helmet', 1549: 'Shroomite Breastplate',
1550: 'Shroomite Leggings', 1551: 'Autohammer', 1552: 'Shroomite Bar', 1553: 'S.D.M.G.', 1554: "Cenx's Tiara",
1555: "Cenx's Breastplate", 1556: "Cenx's Leggings", 1557: "Crowno's Mask", 1558: "Crowno's Breastplate",
1559: "Crowno's Leggings", 1560: "Will's Helmet", 1561: "Will's Breastplate", 1562: "Will's Leggings",
1563: "Jim's Helmet", 1564: "Jim's Breastplate", 1565: "Jim's Leggings", 1566: "Aaron's Helmet",
1567: "Aaron's Breastplate", 1568: "Aaron's Leggings", 1569: 'Vampire Knives', 1570: 'Broken Hero Sword',
1571: 'Scourge of the Corruptor', 1572: 'Staff of the Frost Hydra', 1573: 'The Creation of the Guide',
1574: 'The Merchant', 1575: 'Crowno Devours His Lunch', 1576: 'Rare Enchantment', 1577: 'Glorious Night',
1578: 'Sweetheart Necklace', 1579: 'Flurry Boots', 1580: "D-Town's Helmet", 1581: "D-Town's Breastplate",
1582: "D-Town's Leggings", 1583: "D-Town's Wings", 1584: "Will's Wings", 1585: "Crowno's Wings",
1586: "Cenx's Wings", 1587: "Cenx's Dress", 1588: "Cenx's Dress Pants", 1589: 'Palladium Column',
1590: 'Palladium Column Wall', 1591: 'Bubblegum Block', 1592: 'Bubblegum Block Wall', 1593: 'Titanstone Block',
1594: 'Titanstone Block Wall', 1595: 'Magic Cuffs', 1596: 'Music Box (Snow)', 1597: 'Music Box (Space)',
1598: 'Music Box (Crimson)', 1599: 'Music Box (Boss 4)', 1600: 'Music Box (Alt Overworld Day)',
1601: 'Music Box (Rain)', 1602: 'Music Box (Ice)', 1603: 'Music Box (Desert)', 1604: 'Music Box (Ocean)',
1605: 'Music Box (Dungeon)', 1606: 'Music Box (Plantera)', 1607: 'Music Box (Boss 5)',
1608: 'Music Box (Temple)', 1609: 'Music Box (Eclipse)', 1610: 'Music Box (Mushrooms)', 1611: 'Butterfly Dust',
1612: 'Ankh Charm', 1613: 'Ankh Shield', 1614: 'Blue Flare', 1615: 'Angler Fish Banner',
1616: 'Angry Nimbus Banner', 1617: 'Anomura Fungus Banner', 1618: 'Antlion Banner', 1619: 'Arapaima Banner',
1620: 'Armored Skeleton Banner', 1621: 'Bat Banner', 1622: 'Bird Banner', 1623: 'Black Recluse Banner',
1624: 'Blood Feeder Banner', 1625: 'Blood Jelly Banner', 1626: 'Blood Crawler Banner',
1627: 'Bone Serpent Banner', 1628: 'Bunny Banner', 1629: 'Chaos Elemental Banner', 1630: 'Mimic Banner',
1631: 'Clown Banner', 1632: 'Corrupt Bunny Banner', 1633: 'Corrupt Goldfish Banner', 1634: 'Crab Banner',
1635: 'Crimera Banner', 1636: 'Crimson Axe Banner', 1637: 'Cursed Hammer Banner', 1638: 'Demon Banner',
1639: 'Demon Eye Banner', 1640: 'Derpling Banner', 1641: 'Eater of Souls Banner',
1642: 'Enchanted Sword Banner', 1643: 'Zombie Eskimo Banner', 1644: 'Face Monster Banner',
1645: 'Floaty Gross Banner', 1646: 'Flying Fish Banner', 1647: 'Flying Snake Banner',
1648: 'Frankenstein Banner', 1649: 'Fungi Bulb Banner', 1650: 'Fungo Fish Banner', 1651: 'Gastropod Banner',
1652: 'Goblin Archer Banner', 1653: 'Goblin Sorcerer Banner', 1654: 'Goblin Scout Banner',
1655: 'Goblin Thief Banner', 1656: 'Goblin Warrior Banner', 1657: 'Goldfish Banner', 1658: 'Harpy Banner',
1659: 'Hellbat Banner', 1660: 'Herpling Banner', 1661: 'Hornet Banner', 1662: 'Ice Elemental Banner',
1663: 'Icy Merman Banner', 1664: 'Fire Imp Banner', 1665: 'Jellyfish Banner', 1666: 'Jungle Creeper Banner',
1667: 'Lihzahrd Banner', 1668: 'Man Eater Banner', 1669: 'Meteor Head Banner', 1670: 'Moth Banner',
1671: 'Mummy Banner', 1672: 'Mushi Ladybug Banner', 1673: 'Parrot Banner', 1674: 'Pigron Banner',
1675: 'Piranha Banner', 1676: 'Pirate Banner', 1677: 'Pixie Banner', 1678: 'Raincoat Zombie Banner',
1679: 'Reaper Banner', 1680: 'Shark Banner', 1681: 'Skeleton Banner', 1682: 'Skeleton Mage Banner',
1683: 'Slime Banner', 1684: 'Snow Flinx Banner', 1685: 'Spider Banner', 1686: 'Spore Zombie Banner',
1687: 'Swamp Thing Banner', 1688: 'Tortoise Banner', 1689: 'Toxic Sludge Banner',
1690: 'Umbrella Slime Banner', 1691: 'Unicorn Banner', 1692: 'Vampire Banner', 1693: 'Vulture Banner',
1694: 'Nymph Banner', 1695: 'Werewolf Banner', 1696: 'Wolf Banner', 1697: 'World Feeder Banner',
1698: 'Worm Banner', 1699: 'Wraith Banner', 1700: 'Wyvern Banner', 1701: 'Zombie Banner',
1702: 'Glass Platform', 1703: 'Glass Chair', 1704: 'Golden Chair', 1705: 'Golden Toilet', 1706: 'Bar Stool',
1707: 'Honey Chair', 1708: 'Steampunk Chair', 1709: 'Glass Door', 1710: 'Golden Door', 1711: 'Honey Door',
1712: 'Steampunk Door', 1713: 'Glass Table', 1714: 'Banquet Table', 1715: 'Bar', 1716: 'Golden Table',
1717: 'Honey Table', 1718: 'Steampunk Table', 1719: 'Glass Bed', 1720: 'Golden Bed', 1721: 'Honey Bed',
1722: 'Steampunk Bed', 1723: 'Living Wood Wall', 1724: 'Fart in a Jar', 1725: 'Pumpkin', 1726: 'Pumpkin Wall',
1727: 'Hay', 1728: 'Hay Wall', 1729: 'Spooky Wood', 1730: 'Spooky Wood Wall', 1731: 'Pumpkin Helmet',
1732: 'Pumpkin Breastplate', 1733: 'Pumpkin Leggings', 1734: 'Candy Apple', 1735: 'Soul Cake',
1736: 'Nurse Hat', 1737: 'Nurse Shirt', 1738: 'Nurse Pants', 1739: "Wizard's Hat", 1740: 'Guy Fawkes Mask',
1741: 'Dye Trader Robe', 1742: 'Steampunk Goggles', 1743: 'Cyborg Helmet', 1744: 'Cyborg Shirt',
1745: 'Cyborg Pants', 1746: 'Creeper Mask', 1747: 'Creeper Shirt', 1748: 'Creeper Pants', 1749: 'Cat Mask',
1750: 'Cat Shirt', 1751: 'Cat Pants', 1752: 'Ghost Mask', 1753: 'Ghost Shirt', 1754: 'Pumpkin Mask',
1755: 'Pumpkin Shirt', 1756: 'Pumpkin Pants', 1757: 'Robot Mask', 1758: 'Robot Shirt', 1759: 'Robot Pants',
1760: 'Unicorn Mask', 1761: 'Unicorn Shirt', 1762: 'Unicorn Pants', 1763: 'Vampire Mask',
1764: 'Vampire Shirt', 1765: 'Vampire Pants', 1766: 'Witch Hat', 1767: 'Leprechaun Hat',
1768: 'Leprechaun Shirt', 1769: 'Leprechaun Pants', 1770: 'Pixie Shirt', 1771: 'Pixie Pants',
1772: 'Princess Hat', 1773: 'Princess Dress', 1774: 'Goodie Bag', 1775: 'Witch Dress', 1776: 'Witch Boots',
1777: 'Bride of Frankenstein Mask', 1778: 'Bride of Frankenstein Dress', 1779: 'Karate Tortoise Mask',
1780: 'Karate Tortoise Shirt', 1781: 'Karate Tortoise Pants', 1782: 'Candy Corn Rifle', 1783: 'Candy Corn',
1784: "Jack 'O Lantern Launcher", 1785: "Explosive Jack 'O Lantern", 1786: 'Sickle', 1787: 'Pumpkin Pie',
1788: 'Scarecrow Hat', 1789: 'Scarecrow Shirt', 1790: 'Scarecrow Pants', 1791: 'Cauldron',
1792: 'Pumpkin Chair', 1793: 'Pumpkin Door', 1794: 'Pumpkin Table', 1795: 'Pumpkin Work Bench',
1796: 'Pumpkin Platform', 1797: 'Tattered Fairy Wings', 1798: 'Spider Egg', 1799: 'Magical Pumpkin Seed',
1800: 'Bat Hook', 1801: 'Bat Scepter', 1802: 'Raven Staff', 1803: 'Jungle Key Mold',
1804: 'Corruption Key Mold', 1805: 'Crimson Key Mold', 1806: 'Hallowed Key Mold', 1807: 'Frozen Key Mold',
1808: "Hanging Jack 'O Lantern", 1809: 'Rotten Egg', 1810: 'Unlucky Yarn', 1811: 'Black Fairy Dust',
1812: 'Jackelier', 1813: "Jack 'O Lantern", 1814: 'Spooky Chair', 1815: 'Spooky Door', 1816: 'Spooky Table',
1817: 'Spooky Work Bench', 1818: 'Spooky Platform', 1819: 'Reaper Hood', 1820: 'Reaper Robe', 1821: 'Fox Mask',
1822: 'Fox Shirt', 1823: 'Fox Pants', 1824: 'Cat Ears', 1825: 'Bloody Machete', 1826: "The Horseman's Blade",
1827: 'Bladed Glove', 1828: 'Pumpkin Seed', 1829: 'Spooky Hook', 1830: 'Spooky Wings', 1831: 'Spooky Twig',
1832: 'Spooky Helmet', 1833: 'Spooky Breastplate', 1834: 'Spooky Leggings', 1835: 'Stake Launcher',
1836: 'Stake', 1837: 'Cursed Sapling', 1838: 'Space Creature Mask', 1839: 'Space Creature Shirt',
1840: 'Space Creature Pants', 1841: 'Wolf Mask', 1842: 'Wolf Shirt', 1843: 'Wolf Pants',
1844: 'Pumpkin Moon Medallion', 1845: 'Necromantic Scroll', 1846: 'Jacking Skeletron', 1847: 'Bitter Harvest',
1848: 'Blood Moon Countess', 1849: "Hallow's Eve", 1850: 'Morbid Curiosity', 1851: 'Treasure Hunter Shirt',
1852: 'Treasure Hunter Pants', 1853: 'Dryad Coverings', 1854: 'Dryad Loincloth', 1855: 'Mourning Wood Trophy',
1856: 'Pumpking Trophy', 1857: "Jack 'O Lantern Mask", 1858: 'Sniper Scope', 1859: 'Heart Lantern',
1860: 'Jellyfish Diving Gear', 1861: 'Arctic Diving Gear', 1862: 'Frostspark Boots', 1863: 'Fart in a Balloon',
1864: 'Papyrus Scarab', 1865: 'Celestial Stone', 1866: 'Hoverboard', 1867: 'Candy Cane', 1868: 'Sugar Plum',
1869: 'Present', 1870: 'Red Ryder', 1871: 'Festive Wings', 1872: 'Pine Tree Block', 1873: 'Christmas Tree',
1874: 'Star Topper 1', 1875: 'Star Topper 2', 1876: 'Star Topper 3', 1877: 'Bow Topper', 1878: 'White Garland',
1879: 'White and Red Garland', 1880: 'Red Garland', 1881: 'Red and Green Garland', 1882: 'Green Garland',
1883: 'Green and White Garland', 1884: 'Multicolored Bulb', 1885: 'Red Bulb', 1886: 'Yellow Bulb',
1887: 'Green Bulb', 1888: 'Red and Green Bulb', 1889: 'Yellow and Green Bulb', 1890: 'Red and Yellow Bulb',
1891: 'White Bulb', 1892: 'White and Red Bulb', 1893: 'White and Yellow Bulb', 1894: 'White and Green Bulb',
1895: 'Multicolored Lights', 1896: 'Red Lights', 1897: 'Green Lights', 1898: 'Blue Lights',
1899: 'Yellow Lights', 1900: 'Red and Yellow Lights', 1901: 'Red and Green Lights',
1902: 'Yellow and Green Lights', 1903: 'Blue and Green Lights', 1904: 'Red and Blue Lights',
1905: 'Blue and Yellow Lights', 1906: 'Giant Bow', 1907: 'Reindeer Antlers', 1908: 'Holly',
1909: 'Candy Cane Sword', 1910: 'Elf Melter', 1911: 'Christmas Pudding', 1912: 'Eggnog', 1913: 'Star Anise',
1914: 'Reindeer Bells', 1915: 'Candy Cane Hook', 1916: 'Christmas Hook', 1917: 'Candy Cane Pickaxe',
1918: 'Fruitcake Chakram', 1919: 'Sugar Cookie', 1920: 'Gingerbread Cookie', 1921: 'Hand Warmer', 1922: 'Coal',
1923: 'Toolbox', 1924: 'Pine Door', 1925: 'Pine Chair', 1926: 'Pine Table', 1927: 'Dog Whistle',
1928: 'Christmas Tree Sword', 1929: 'Chain Gun', 1930: 'Razorpine', 1931: 'Blizzard Staff',
1932: 'Mrs. Claus Hat', 1933: 'Mrs. Claus Shirt', 1934: 'Mrs. Claus Heels', 1935: 'Parka Hood',
1936: 'Parka Coat', 1937: 'Parka Pants', 1938: 'Snow Hat', 1939: 'Ugly Sweater', 1940: 'Tree Mask',
1941: 'Tree Shirt', 1942: 'Tree Trunks', 1943: 'Elf Hat', 1944: 'Elf Shirt', 1945: 'Elf Pants',
1946: 'Snowman Cannon', 1947: 'North Pole', 1948: 'Christmas Tree Wallpaper', 1949: 'Ornament Wallpaper',
1950: 'Candy Cane Wallpaper', 1951: 'Festive Wallpaper', 1952: 'Stars Wallpaper', 1953: 'Squiggles Wallpaper',
1954: 'Snowflake Wallpaper', 1955: 'Krampus Horn Wallpaper', 1956: 'Bluegreen Wallpaper',
1957: 'Grinch Finger Wallpaper', 1958: 'Naughty Present', 1959: "Baby Grinch's Mischief Whistle",
1960: 'Ice Queen Trophy', 1961: 'Santa-NK1 Trophy', 1962: 'Everscream Trophy',
1963: 'Music Box (Pumpkin Moon)', 1964: 'Music Box (Alt Underground)', 1965: 'Music Box (Frost Moon)',
1966: 'Brown Paint', 1967: 'Shadow Paint', 1968: 'Negative Paint', 1969: 'Team Dye',
1970: 'Amethyst Gemspark Block', 1971: 'Topaz Gemspark Block', 1972: 'Sapphire Gemspark Block',
1973: 'Emerald Gemspark Block', 1974: 'Ruby Gemspark Block', 1975: 'Diamond Gemspark Block',
1976: 'Amber Gemspark Block', 1977: 'Life Hair Dye', 1978: 'Mana Hair Dye', 1979: 'Depth Hair Dye',
1980: 'Money Hair Dye', 1981: 'Time Hair Dye', 1982: 'Team Hair Dye', 1983: 'Biome Hair Dye',
1984: 'Party Hair Dye', 1985: 'Rainbow Hair Dye', 1986: 'Speed Hair Dye', 1987: 'Angel Halo', 1988: 'Fez',
1989: 'Womannequin', 1990: 'Hair Dye Remover', 1991: 'Bug Net', 1992: 'Firefly', 1993: 'Firefly in a Bottle',
1994: 'Monarch Butterfly', 1995: 'Purple Emperor Butterfly', 1996: 'Red Admiral Butterfly',
1997: 'Ulysses Butterfly', 1998: 'Sulphur Butterfly', 1999: 'Tree Nymph Butterfly',
2000: 'Zebra Swallowtail Butterfly', 2001: 'Julia Butterfly', 2002: 'Worm', 2003: 'Mouse',
2004: 'Lightning Bug', 2005: 'Lightning Bug in a Bottle', 2006: 'Snail', 2007: 'Glowing Snail',
2008: 'Fancy Grey Wallpaper', 2009: 'Ice Floe Wallpaper', 2010: 'Music Wallpaper',
2011: 'Purple Rain Wallpaper', 2012: 'Rainbow Wallpaper', 2013: 'Sparkle Stone Wallpaper',
2014: 'Starlit Heaven Wallpaper', 2015: 'Bird', 2016: 'Blue Jay', 2017: 'Cardinal', 2018: 'Squirrel',
2019: 'Bunny', 2020: 'Cactus Bookcase', 2021: 'Ebonwood Bookcase', 2022: 'Flesh Bookcase',
2023: 'Honey Bookcase', 2024: 'Steampunk Bookcase', 2025: 'Glass Bookcase', 2026: 'Rich Mahogany Bookcase',
2027: 'Pearlwood Bookcase', 2028: 'Spooky Bookcase', 2029: 'Skyware Bookcase', 2030: 'Lihzahrd Bookcase',
2031: 'Frozen Bookcase', 2032: 'Cactus Lantern', 2033: 'Ebonwood Lantern', 2034: 'Flesh Lantern',
2035: 'Honey Lantern', 2036: 'Steampunk Lantern', 2037: 'Glass Lantern', 2038: 'Rich Mahogany Lantern',
2039: 'Pearlwood Lantern', 2040: 'Frozen Lantern', 2041: 'Lihzahrd Lantern', 2042: 'Skyware Lantern',
2043: 'Spooky Lantern', 2044: 'Frozen Door', 2045: 'Cactus Candle', 2046: 'Ebonwood Candle',
2047: 'Flesh Candle', 2048: 'Glass Candle', 2049: 'Frozen Candle', 2050: 'Rich Mahogany Candle',
2051: 'Pearlwood Candle', 2052: 'Lihzahrd Candle', 2053: 'Skyware Candle', 2054: 'Pumpkin Candle',
2055: 'Cactus Chandelier', 2056: 'Ebonwood Chandelier', 2057: 'Flesh Chandelier', 2058: 'Honey Chandelier',
2059: 'Frozen Chandelier', 2060: 'Rich Mahogany Chandelier', 2061: 'Pearlwood Chandelier',
2062: 'Lihzahrd Chandelier', 2063: 'Skyware Chandelier', 2064: 'Spooky Chandelier', 2065: 'Glass Chandelier',
2066: 'Cactus Bed', 2067: 'Flesh Bed', 2068: 'Frozen Bed', 2069: 'Lihzahrd Bed', 2070: 'Skyware Bed',
2071: 'Spooky Bed', 2072: 'Cactus Bathtub', 2073: 'Ebonwood Bathtub', 2074: 'Flesh Bathtub',
2075: 'Glass Bathtub', 2076: 'Frozen Bathtub', 2077: 'Rich Mahogany Bathtub', 2078: 'Pearlwood Bathtub',
2079: 'Lihzahrd Bathtub', 2080: 'Skyware Bathtub', 2081: 'Spooky Bathtub', 2082: 'Cactus Lamp',
2083: 'Ebonwood Lamp', 2084: 'Flesh Lamp', 2085: 'Glass Lamp', 2086: 'Frozen Lamp', 2087: 'Rich Mahogany Lamp',
2088: 'Pearlwood Lamp', 2089: 'Lihzahrd Lamp', 2090: 'Skyware Lamp', 2091: 'Spooky Lamp',
2092: 'Cactus Candelabra', 2093: 'Ebonwood Candelabra', 2094: 'Flesh Candelabra', 2095: 'Honey Candelabra',
2096: 'Steampunk Candelabra', 2097: 'Glass Candelabra', 2098: 'Rich Mahogany Candelabra',
2099: 'Pearlwood Candelabra', 2100: 'Frozen Candelabra', 2101: 'Lihzahrd Candelabra',
2102: 'Skyware Candelabra', 2103: 'Spooky Candelabra', 2104: 'Brain of Cthulhu Mask',
2105: 'Wall of Flesh Mask', 2106: 'Twin Mask', 2107: 'Skeletron Prime Mask', 2108: 'Queen Bee Mask',
2109: 'Plantera Mask', 2110: 'Golem Mask', 2111: 'Eater of Worlds Mask', 2112: 'Eye of Cthulhu Mask',
2113: 'Destroyer Mask', 2114: 'Blacksmith Rack', 2115: 'Carpentry Rack', 2116: 'Helmet Rack',
2117: 'Spear Rack', 2118: 'Sword Rack', 2119: 'Stone Slab', 2120: 'Sandstone Slab', 2121: 'Frog',
2122: 'Mallard Duck', 2123: 'Duck', 2124: 'Honey Bathtub', 2125: 'Steampunk Bathtub',
2126: 'Living Wood Bathtub', 2127: 'Shadewood Bathtub', 2128: 'Bone Bathtub', 2129: 'Honey Lamp',
2130: 'Steampunk Lamp', 2131: 'Living Wood Lamp', 2132: 'Shadewood Lamp', 2133: 'Golden Lamp',
2134: 'Bone Lamp', 2135: 'Living Wood Bookcase', 2136: 'Shadewood Bookcase', 2137: 'Golden Bookcase',
2138: 'Bone Bookcase', 2139: 'Living Wood Bed', 2140: 'Bone Bed', 2141: 'Living Wood Chandelier',
2142: 'Shadewood Chandelier', 2143: 'Golden Chandelier', 2144: 'Bone Chandelier', 2145: 'Living Wood Lantern',
2146: 'Shadewood Lantern', 2147: 'Golden Lantern', 2148: 'Bone Lantern', 2149: 'Living Wood Candelabra',
2150: 'Shadewood Candelabra', 2151: 'Golden Candelabra', 2152: 'Bone Candelabra', 2153: 'Living Wood Candle',
2154: 'Shadewood Candle', 2155: 'Golden Candle', 2156: 'Black Scorpion', 2157: 'Scorpion',
2158: 'Bubble Wallpaper', 2159: 'Copper Pipe Wallpaper', 2160: 'Ducky Wallpaper', 2161: 'Frost Core',
2162: 'Bunny Cage', 2163: 'Squirrel Cage', 2164: 'Mallard Duck Cage', 2165: 'Duck Cage', 2166: 'Bird Cage',
2167: 'Blue Jay Cage', 2168: 'Cardinal Cage', 2169: 'Waterfall Wall', 2170: 'Lavafall Wall',
2171: 'Crimson Seeds', 2172: 'Heavy Work Bench', 2173: 'Copper Plating', 2174: 'Snail Cage',
2175: 'Glowing Snail Cage', 2176: 'Shroomite Digging Claw', 2177: 'Ammo Box', 2178: 'Monarch Butterfly Jar',
2179: 'Purple Emperor Butterfly Jar', 2180: 'Red Admiral Butterfly Jar', 2181: 'Ulysses Butterfly Jar',
2182: 'Sulphur Butterfly Jar', 2183: 'Tree Nymph Butterfly Jar', 2184: 'Zebra Swallowtail Butterfly Jar',
2185: 'Julia Butterfly Jar', 2186: 'Scorpion Cage', 2187: 'Black Scorpion Cage', 2188: 'Venom Staff',
2189: 'Spectre Mask', 2190: 'Frog Cage', 2191: 'Mouse Cage', 2192: 'Bone Welder', 2193: 'Flesh Cloning Vat',
2194: 'Glass Kiln', 2195: 'Lihzahrd Furnace', 2196: 'Living Loom', 2197: 'Sky Mill', 2198: 'Ice Machine',
2199: 'Beetle Helmet', 2200: 'Beetle Scale Mail', 2201: 'Beetle Shell', 2202: 'Beetle Leggings',
2203: 'Steampunk Boiler', 2204: 'Honey Dispenser', 2205: 'Penguin', 2206: 'Penguin Cage', 2207: 'Worm Cage',
2208: 'Terrarium', 2209: 'Super Mana Potion', 2210: 'Ebonwood Fence', 2211: 'Rich Mahogany Fence',
2212: 'Pearlwood Fence', 2213: 'Shadewood Fence', 2214: 'Brick Layer', 2215: 'Extendo Grip',
2216: 'Paint Sprayer', 2217: 'Portable Cement Mixer', 2218: 'Beetle Husk', 2219: 'Celestial Magnet',
2220: 'Celestial Emblem', 2221: 'Celestial Cuffs', 2222: "Peddler's Hat", 2223: 'Pulse Bow',
2224: 'Large Dynasty Lantern', 2225: 'Dynasty Lamp', 2226: 'Dynasty Lantern', 2227: 'Large Dynasty Candle',
2228: 'Dynasty Chair', 2229: 'Dynasty Work Bench', 2230: 'Dynasty Chest', 2231: 'Dynasty Bed',
2232: 'Dynasty Bathtub', 2233: 'Dynasty Bookcase', 2234: 'Dynasty Cup', 2235: 'Dynasty Bowl',
2236: 'Dynasty Candle', 2237: 'Dynasty Clock', 2238: 'Golden Clock', 2239: 'Glass Clock', 2240: 'Honey Clock',
2241: 'Steampunk Clock', 2242: 'Fancy Dishes', 2243: 'Glass Bowl', 2244: 'Wine Glass',
2245: 'Living Wood Piano', 2246: 'Flesh Piano', 2247: 'Frozen Piano', 2248: 'Frozen Table',
2249: 'Honey Chest', 2250: 'Steampunk Chest', 2251: 'Honey Work Bench', 2252: 'Frozen Work Bench',
2253: 'Steampunk Work Bench', 2254: 'Glass Piano', 2255: 'Honey Piano', 2256: 'Steampunk Piano',
2257: 'Honey Cup', 2258: 'Chalice', 2259: 'Dynasty Table', 2260: 'Dynasty Wood', 2261: 'Red Dynasty Shingles',
2262: 'Blue Dynasty Shingles', 2263: 'White Dynasty Wall', 2264: 'Blue Dynasty Wall', 2265: 'Dynasty Door',
2266: 'Sake', 2267: 'Pad Thai', 2268: 'Pho', 2269: 'Revolver', 2270: 'Gatligator', 2271: 'Arcane Rune Wall',
2272: 'Water Gun', 2273: 'Katana', 2274: 'Ultrabright Torch', 2275: 'Magic Hat', 2276: 'Diamond Ring',
2277: 'Gi', 2278: 'Kimono', 2279: 'Gypsy Robe', 2280: 'Beetle Wings', 2281: 'Tiger Skin', 2282: 'Leopard Skin',
2283: 'Zebra Skin', 2284: 'Crimson Cloak', 2285: 'Mysterious Cape', 2286: 'Red Cape', 2287: 'Winter Cape',
2288: 'Frozen Chair', 2289: 'Wood Fishing Pole', 2290: 'Bass', 2291: 'Reinforced Fishing Pole',
2292: 'Fiberglass Fishing Pole', 2293: 'Fisher of Souls', 2294: 'Golden Fishing Rod', 2295: "Mechanic's Rod",
2296: "Sitting Duck's Fishing Pole", 2297: 'Trout', 2298: 'Salmon', 2299: 'Atlantic Cod', 2300: 'Tuna',
2301: 'Red Snapper', 2302: 'Neon Tetra', 2303: 'Armored Cavefish', 2304: 'Damselfish',
2305: 'Crimson Tigerfish', 2306: 'Frost Minnow', 2307: 'Princess Fish', 2308: 'Golden Carp',
2309: 'Specular Fish', 2310: 'Prismite', 2311: 'Variegated Lardfish', 2312: 'Flarefin Koi', 2313: 'Double Cod',
2314: 'Honeyfin', 2315: 'Obsidifish', 2316: 'Shrimp', 2317: 'Chaos Fish', 2318: 'Ebonkoi', 2319: 'Hemopiranha',
2320: 'Rockfish', 2321: 'Stinkfish', 2322: 'Mining Potion', 2323: 'Heartreach Potion', 2324: 'Calming Potion',
2325: 'Builder Potion', 2326: 'Titan Potion', 2327: 'Flipper Potion', 2328: 'Summoning Potion',
2329: 'Dangersense Potion', 2330: 'Purple Clubberfish', 2331: 'Obsidian Swordfish', 2332: 'Swordfish',
2333: 'Iron Fence', 2334: 'Wooden Crate', 2335: 'Iron Crate', 2336: 'Golden Crate', 2337: 'Old Shoe',
2338: 'Seaweed', 2339: 'Tin Can', 2340: 'Minecart Track', 2341: 'Reaver Shark', 2342: 'Sawtooth Shark',
2343: 'Minecart', 2344: 'Ammo Reservation Potion', 2345: 'Lifeforce Potion', 2346: 'Endurance Potion',
2347: 'Rage Potion', 2348: 'Inferno Potion', 2349: 'Wrath Potion', 2350: 'Recall Potion',
2351: 'Teleportation Potion', 2352: 'Love Potion', 2353: 'Stink Potion', 2354: 'Fishing Potion',
2355: 'Sonar Potion', 2356: 'Crate Potion', 2357: 'Shiverthorn Seeds', 2358: 'Shiverthorn',
2359: 'Warmth Potion', 2360: 'Fish Hook', 2361: 'Bee Headgear', 2362: 'Bee Breastplate', 2363: 'Bee Greaves',
2364: 'Hornet Staff', 2365: 'Imp Staff', 2366: 'Queen Spider Staff', 2367: 'Angler Hat', 2368: 'Angler Vest',
2369: 'Angler Pants', 2370: 'Spider Mask', 2371: 'Spider Breastplate', 2372: 'Spider Greaves',
2373: 'High Test Fishing Line', 2374: 'Angler Earring', 2375: 'Tackle Box', 2376: 'Blue Dungeon Piano',
2377: 'Green Dungeon Piano', 2378: 'Pink Dungeon Piano', 2379: 'Golden Piano', 2380: 'Obsidian Piano',
2381: 'Bone Piano', 2382: 'Cactus Piano', 2383: 'Spooky Piano', 2384: 'Skyware Piano', 2385: 'Lihzahrd Piano',
2386: 'Blue Dungeon Dresser', 2387: 'Green Dungeon Dresser', 2388: 'Pink Dungeon Dresser',
2389: 'Golden Dresser', 2390: 'Obsidian Dresser', 2391: 'Bone Dresser', 2392: 'Cactus Dresser',
2393: 'Spooky Dresser', 2394: 'Skyware Dresser', 2395: 'Honey Dresser', 2396: 'Lihzahrd Dresser', 2397: 'Sofa',
2398: 'Ebonwood Sofa', 2399: 'Rich Mahogany Sofa', 2400: 'Pearlwood Sofa', 2401: 'Shadewood Sofa',
2402: 'Blue Dungeon Sofa', 2403: 'Green Dungeon Sofa', 2404: 'Pink Dungeon Sofa', 2405: 'Golden Sofa',
2406: 'Obsidian Sofa', 2407: 'Bone Sofa', 2408: 'Cactus Sofa', 2409: 'Spooky Sofa', 2410: 'Skyware Sofa',
2411: 'Honey Sofa', 2412: 'Steampunk Sofa', 2413: 'Mushroom Sofa', 2414: 'Glass Sofa', 2415: 'Pumpkin Sofa',
2416: 'Lihzahrd Sofa', 2417: 'Seashell Hairpin', 2418: 'Mermaid Adornment', 2419: 'Mermaid Tail',
2420: 'Zephyr Fish', 2421: 'Fleshcatcher', 2422: 'Hotline Fishing Hook', 2423: 'Frog Leg', 2424: 'Anchor',
2425: 'Cooked Fish', 2426: 'Cooked Shrimp', 2427: 'Sashimi', 2428: 'Fuzzy Carrot', 2429: 'Scaly Truffle',
2430: 'Slimy Saddle', 2431: 'Bee Wax', 2432: 'Copper Plating Wall', 2433: 'Stone Slab Wall', 2434: 'Sail',
2435: 'Coralstone Block', 2436: 'Blue Jellyfish', 2437: 'Green Jellyfish', 2438: 'Pink Jellyfish',
2439: 'Blue Jellyfish Jar', 2440: 'Green Jellyfish Jar', 2441: 'Pink Jellyfish Jar', 2442: 'Life Preserver',
2443: "Ship's Wheel", 2444: 'Compass Rose', 2445: 'Wall Anchor', 2446: 'Goldfish Trophy',
2447: 'Bunnyfish Trophy', 2448: 'Swordfish Trophy', 2449: 'Sharkteeth Trophy', 2450: 'Batfish',
2451: 'Bumblebee Tuna', 2452: 'Catfish', 2453: 'Cloudfish', 2454: 'Cursedfish', 2455: 'Dirtfish',
2456: 'Dynamite Fish', 2457: 'Eater of Plankton', 2458: 'Fallen Starfish', 2459: 'The Fish of Cthulhu',
2460: 'Fishotron', 2461: 'Harpyfish', 2462: 'Hungerfish', 2463: 'Ichorfish', 2464: 'Jewelfish',
2465: 'Mirage Fish', 2466: 'Mutant Flinxfin', 2467: 'Pengfish', 2468: 'Pixiefish', 2469: 'Spiderfish',
2470: 'Tundra Trout', 2471: 'Unicorn Fish', 2472: 'Guide Voodoo Fish', 2473: 'Wyverntail', 2474: 'Zombie Fish',
2475: 'Amanitia Fungifin', 2476: 'Angelfish', 2477: 'Bloody Manowar', 2478: 'Bonefish', 2479: 'Bunnyfish',
2480: "Cap'n Tunabeard", 2481: 'Clownfish', 2482: 'Demonic Hellfish', 2483: 'Derpfish', 2484: 'Fishron',
2485: 'Infected Scabbardfish', 2486: 'Mudfish', 2487: 'Slimefish', 2488: 'Tropical Barracuda',
2489: 'King Slime Trophy', 2490: 'Ship in a Bottle', 2491: 'Hardy Saddle', 2492: 'Pressure Plate Track',
2493: 'King Slime Mask', 2494: 'Fin Wings', 2495: 'Treasure Map', 2496: 'Seaweed Planter',
2497: 'Pillagin Me Pixels', 2498: 'Fish Costume Mask', 2499: 'Fish Costume Shirt',
2500: 'Fish Costume Finskirt', 2501: 'Ginger Beard', 2502: 'Honeyed Goggles', 2503: 'Boreal Wood',
2504: 'Palm Wood', 2505: 'Boreal Wood Wall', 2506: 'Palm Wood Wall', 2507: 'Boreal Wood Fence',
2508: 'Palm Wood Fence', 2509: 'Boreal Wood Helmet', 2510: 'Boreal Wood Breastplate',
2511: 'Boreal Wood Greaves', 2512: 'Palm Wood Helmet', 2513: 'Palm Wood Breastplate',
2514: 'Palm Wood Greaves', 2515: 'Palm Wood Bow', 2516: 'Palm Wood Hammer', 2517: 'Palm Wood Sword',
2518: 'Palm Wood Platform', 2519: 'Palm Wood Bathtub', 2520: 'Palm Wood Bed', 2521: 'Palm Wood Bench',
2522: 'Palm Wood Candelabra', 2523: 'Palm Wood Candle', 2524: 'Palm Wood Chair', 2525: 'Palm Wood Chandelier',
2526: 'Palm Wood Chest', 2527: 'Palm Wood Sofa', 2528: 'Palm Wood Door', 2529: 'Palm Wood Dresser',
2530: 'Palm Wood Lantern', 2531: 'Palm Wood Piano', 2532: 'Palm Wood Table', 2533: 'Palm Wood Lamp',
2534: 'Palm Wood Work Bench', 2535: 'Optic Staff', 2536: 'Palm Wood Bookcase', 2537: 'Mushroom Bathtub',
2538: 'Mushroom Bed', 2539: 'Mushroom Bench', 2540: 'Mushroom Bookcase', 2541: 'Mushroom Candelabra',
2542: 'Mushroom Candle', 2543: 'Mushroom Chandelier', 2544: 'Mushroom Chest', 2545: 'Mushroom Dresser',
2546: 'Mushroom Lantern', 2547: 'Mushroom Lamp', 2548: 'Mushroom Piano', 2549: 'Mushroom Platform',
2550: 'Mushroom Table', 2551: 'Spider Staff', 2552: 'Boreal Wood Bathtub', 2553: 'Boreal Wood Bed',
2554: 'Boreal Wood Bookcase', 2555: 'Boreal Wood Candelabra', 2556: 'Boreal Wood Candle',
2557: 'Boreal Wood Chair', 2558: 'Boreal Wood Chandelier', 2559: 'Boreal Wood Chest',
2560: 'Boreal Wood Clock', 2561: 'Boreal Wood Door', 2562: 'Boreal Wood Dresser', 2563: 'Boreal Wood Lamp',
2564: 'Boreal Wood Lantern', 2565: 'Boreal Wood Piano', 2566: 'Boreal Wood Platform', 2567: 'Slime Bathtub',
2568: 'Slime Bed', 2569: 'Slime Bookcase', 2570: 'Slime Candelabra', 2571: 'Slime Candle', 2572: 'Slime Chair',
2573: 'Slime Chandelier', 2574: 'Slime Chest', 2575: 'Slime Clock', 2576: 'Slime Door', 2577: 'Slime Dresser',
2578: 'Slime Lamp', 2579: 'Slime Lantern', 2580: 'Slime Piano', 2581: 'Slime Platform', 2582: 'Slime Sofa',
2583: 'Slime Table', 2584: 'Pirate Staff', 2585: 'Slime Hook', 2586: 'Sticky Grenade', 2587: 'Tartar Sauce',
2588: 'Duke Fishron Mask', 2589: 'Duke Fishron Trophy', 2590: 'Molotov Cocktail', 2591: 'Bone Clock',
2592: 'Cactus Clock', 2593: 'Ebonwood Clock', 2594: 'Frozen Clock', 2595: 'Lihzahrd Clock',
2596: 'Living Wood Clock', 2597: 'Rich Mahogany Clock', 2598: 'Flesh Clock', 2599: 'Mushroom Clock',
2600: 'Obsidian Clock', 2601: 'Palm Wood Clock', 2602: 'Pearlwood Clock', 2603: 'Pumpkin Clock',
2604: 'Shadewood Clock', 2605: 'Spooky Clock', 2606: 'Sunplate Clock', 2607: 'Spider Fang',
2608: 'Falcon Blade', 2609: 'Fishron Wings', 2610: 'Slime Gun', 2611: 'Flairon', 2612: 'Green Dungeon Chest',
2613: 'Pink Dungeon Chest', 2614: 'Blue Dungeon Chest', 2615: 'Bone Chest', 2616: 'Cactus Chest',
2617: 'Flesh Chest', 2618: 'Obsidian Chest', 2619: 'Pumpkin Chest', 2620: 'Spooky Chest',
2621: 'Tempest Staff', 2622: 'Razorblade Typhoon', 2623: 'Bubble Gun', 2624: 'Tsunami', 2625: 'Seashell',
2626: 'Starfish', 2627: 'Steampunk Platform', 2628: 'Skyware Platform', 2629: 'Living Wood Platform',
2630: 'Honey Platform', 2631: 'Skyware Work Bench', 2632: 'Glass Work Bench', 2633: 'Living Wood Work Bench',
2634: 'Flesh Sofa', 2635: 'Frozen Sofa', 2636: 'Living Wood Sofa', 2637: 'Pumpkin Dresser',
2638: 'Steampunk Dresser', 2639: 'Glass Dresser', 2640: 'Flesh Dresser', 2641: 'Pumpkin Lantern',
2642: 'Obsidian Lantern', 2643: 'Pumpkin Lamp', 2644: 'Obsidian Lamp', 2645: 'Blue Dungeon Lamp',
2646: 'Green Dungeon Lamp', 2647: 'Pink Dungeon Lamp', 2648: 'Honey Candle', 2649: 'Steampunk Candle',
2650: 'Spooky Candle', 2651: 'Obsidian Candle', 2652: 'Blue Dungeon Chandelier',
2653: 'Green Dungeon Chandelier', 2654: 'Pink Dungeon Chandelier', 2655: 'Steampunk Chandelier',
2656: 'Pumpkin Chandelier', 2657: 'Obsidian Chandelier', 2658: 'Blue Dungeon Bathtub',
2659: 'Green Dungeon Bathtub', 2660: 'Pink Dungeon Bathtub', 2661: 'Pumpkin Bathtub', 2662: 'Obsidian Bathtub',
2663: 'Golden Bathtub', 2664: 'Blue Dungeon Candelabra', 2665: 'Green Dungeon Candelabra',
2666: 'Pink Dungeon Candelabra', 2667: 'Obsidian Candelabra', 2668: 'Pumpkin Candelabra', 2669: 'Pumpkin Bed',
2670: 'Pumpkin Bookcase', 2671: 'Pumpkin Piano', 2672: 'Shark Statue', 2673: 'Truffle Worm',
2674: 'Apprentice Bait', 2675: 'Journeyman Bait', 2676: 'Master Bait', 2677: 'Amber Gemspark Wall',
2678: 'Offline Amber Gemspark Wall', 2679: 'Amethyst Gemspark Wall', 2680: 'Offline Amethyst Gemspark Wall',
2681: 'Diamond Gemspark Wall', 2682: 'Offline Diamond Gemspark Wall', 2683: 'Emerald Gemspark Wall',
2684: 'Offline Emerald Gemspark Wall', 2685: 'Ruby Gemspark Wall', 2686: 'Offline Ruby Gemspark Wall',
2687: 'Sapphire Gemspark Wall', 2688: 'Offline Sapphire Gemspark Wall', 2689: 'Topaz Gemspark Wall',
2690: 'Offline Topaz Gemspark Wall', 2691: 'Tin Plating Wall', 2692: 'Tin Plating', 2693: 'Waterfall Block',
2694: 'Lavafall Block', 2695: 'Confetti Block', 2696: 'Confetti Wall', 2697: 'Midnight Confetti Block',
2698: 'Midnight Confetti Wall', 2699: 'Weapon Rack', 2700: 'Fireworks Box', 2701: 'Living Fire Block',
2702: "'0' Statue", 2703: "'1' Statue", 2704: "'2' Statue", 2705: "'3' Statue", 2706: "'4' Statue",
2707: "'5' Statue", 2708: "'6' Statue", 2709: "'7' Statue", 2710: "'8' Statue", 2711: "'9' Statue",
2712: "'A' Statue", 2713: "'B' Statue", 2714: "'C' Statue", 2715: "'D' Statue", 2716: "'E' Statue",
2717: "'F' Statue", 2718: "'G' Statue", 2719: "'H' Statue", 2720: "'I' Statue", 2721: "'J' Statue",
2722: "'K' Statue", 2723: "'L' Statue", 2724: "'M' Statue", 2725: "'N' Statue", 2726: "'O' Statue",
2727: "'P' Statue", 2728: "'Q' Statue", 2729: "'R' Statue", 2730: "'S' Statue", 2731: "'T' Statue",
2732: "'U' Statue", 2733: "'V' Statue", 2734: "'W' Statue", 2735: "'X' Statue", 2736: "'Y' Statue",
2737: "'Z' Statue", 2738: 'Firework Fountain', 2739: 'Booster Track', 2740: 'Grasshopper',
2741: 'Grasshopper Cage', 2742: 'Music Box (Underground Crimson)', 2743: 'Cactus Table',
2744: 'Cactus Platform', 2745: 'Boreal Wood Sword', 2746: 'Boreal Wood Hammer', 2747: 'Boreal Wood Bow',
2748: 'Glass Chest', -2: 'Gold Broadsword', -48: 'Platinum Bow', -47: 'Platinum Hammer', -46: 'Platinum Axe',
-45: 'Platinum Shortsword', -44: 'Platinum Broadsword', -43: 'Platinum Pickaxe', -42: 'Tungsten Bow',
-41: 'Tungsten Hammer', -40: 'Tungsten Axe', -39: 'Tungsten Shortsword', -38: 'Tungsten Broadsword',
-37: 'Tungsten Pickaxe', -36: 'Lead Bow', -35: 'Lead Hammer', -34: 'Lead Axe', -33: 'Lead Shortsword',
-32: 'Lead Broadsword', -31: 'Lead Pickaxe', -30: 'Tin Bow', -29: 'Tin Hammer', -28: 'Tin Axe',
-27: 'Tin Shortsword', -26: 'Tin Broadsword', -25: 'Tin Pickaxe', -24: 'Yellow Phasesaber',
-23: 'White Phasesaber', -22: 'Purple Phasesaber', -21: 'Green Phasesaber', -20: 'Red Phasesaber',
-19: 'Blue Phasesaber', -18: 'Copper Bow', -17: 'Copper Hammer', -16: 'Copper Axe', -15: 'Copper Shortsword',
-14: 'Copper Broadsword', -13: 'Copper Pickaxe', -12: 'Silver Bow', -11: 'Silver Hammer', -10: 'Silver Axe',
-9: 'Silver Shortsword', -8: 'Silver Broadsword', -7: 'Silver Pickaxe', -6: 'Gold Bow', -5: 'Gold Hammer',
-4: 'Gold Axe', -3: 'Gold Shortsword', -1: 'Gold Pickaxe'}
stacks = {
1: 1,
2: 250,
3: 250,
4: 1,
5: 99,
6: 1,
7: 1,
8: 99,
9: 250,
10: 1,
11: 99,
12: 99,
13: 99,
14: 99,
15: 1,
16: 1,
17: 1,
18: 1,
19: 99,
20: 99,
21: 99,
22: 99,
23: 250,
24: 1,
26: 250,
27: 99,
28: 30,
29: 99,
30: 250,
32: 99,
33: 99,
34: 99,
35: 99,
36: 99,
37: 1,
38: 99,
39: 1,
40: 250,
41: 250,
42: 250,
43: 20,
44: 1,
45: 1,
46: 1,
47: 250,
48: 99,
49: 1,
50: 1,
51: 250,
52: 99,
53: 1,
54: 1,
55: 1,
56: 99,
57: 99,
59: 99,
60: 99,
61: 250,
62: 99,
63: 99,
64: 1,
65: 1,
66: 99,
67: 99,
68: 99,
69: 99,
70: 20,
71: 100,
72: 100,
73: 100,
74: 100,
76: 1,
77: 1,
78: 1,
79: 1,
80: 1,
81: 1,
82: 1,
83: 1,
84: 1,
85: 99,
86: 99,
87: 99,
88: 1,
89: 1,
90: 1,
91: 1,
92: 1,
93: 250,
95: 1,
96: 1,
97: 250,
98: 1,
99: 1,
100: 1,
101: 1,
102: 1,
103: 1,
104: 1,
105: 99,
106: 99,
107: 99,
108: 99,
109: 99,
110: 20,
111: 1,
112: 1,
113: 1,
114: 1,
115: 1,
116: 250,
117: 99,
118: 99,
119: 1,
120: 1,
121: 1,
122: 1,
123: 1,
124: 1,
125: 1,
126: 30,
127: 1,
128: 1,
129: 250,
130: 250,
131: 250,
132: 250,
133: 250,
134: 250,
135: 250,
136: 99,
137: 250,
138: 250,
139: 250,
140: 250,
141: 250,
142: 250,
143: 250,
144: 250,
145: 250,
146: 250,
147: 250,
148: 99,
149: 99,
150: 250,
151: 1,
152: 1,
153: 1,
154: 99,
155: 1,
156: 1,
157: 1,
158: 1,
159: 1,
160: 1,
161: 250,
162: 1,
163: 1,
164: 1,
165: 1,
166: 50,
167: 5,
168: 250,
169: 250,
170: 250,
172: 250,
174: 250,
175: 99,
176: 250,
177: 99,
178: 99,
179: 99,
180: 99,
181: 99,
182: 99,
183: 99,
185: 1,
186: 1,
187: 1,
188: 30,
189: 50,
190: 1,
191: 1,
192: 250,
193: 1,
194: 99,
196: 1,
197: 1,
198: 1,
199: 1,
200: 1,
201: 1,
202: 1,
203: 1,
204: 1,
205: 1,
206: 1,
207: 1,
208: 1,
209: 99,
210: 99,
211: 1,
212: 1,
213: 1,
214: 250,
215: 1,
216: 1,
217: 1,
218: 1,
219: 1,
220: 1,
221: 99,
223: 1,
225: 99,
226: 20,
227: 20,
228: 1,
229: 1,
230: 1,
231: 1,
232: 1,
233: 1,
234: 250,
235: 50,
236: 99,
237: 1,
238: 1,
239: 1,
240: 1,
241: 1,
242: 1,
243: 1,
244: 1,
245: 1,
246: 1,
247: 1,
248: 1,
249: 1,
250: 1,
251: 1,
252: 1,
253: 1,
254: 99,
255: 99,
256: 1,
257: 1,
258: 1,
259: 99,
260: 1,
262: 1,
263: 1,
264: 1,
265: 250,
266: 1,
267: 1,
268: 1,
269: 1,
270: 1,
271: 1,
272: 1,
273: 1,
274: 1,
275: 250,
276: 250,
277: 1,
278: 250,
279: 250,
280: 1,
281: 1,
282: 99,
283: 250,
284: 1,
285: 1,
286: 99,
287: 250,
288: 30,
289: 30,
290: 30,
291: 30,
292: 30,
293: 30,
294: 30,
295: 30,
296: 30,
297: 30,
298: 30,
299: 30,
300: 30,
301: 30,
302: 30,
303: 30,
304: 30,
305: 30,
306: 99,
307: 99,
308: 99,
309: 99,
310: 99,
311: 99,
312: 99,
313: 99,
314: 99,
315: 99,
316: 99,
317: 99,
318: 99,
319: 99,
320: 99,
322: 1,
323: 99,
324: 99,
325: 1,
326: 1,
327: 99,
328: 99,
329: 1,
330: 250,
332: 99,
333: 99,
334: 99,
335: 99,
336: 99,
337: 99,
338: 99,
339: 99,
340: 99,
341: 99,
342: 99,
343: 99,
344: 99,
345: 99,
346: 99,
347: 99,
348: 99,
349: 99,
352: 99,
353: 30,
354: 99,
355: 99,
357: 30,
358: 99,
360: 99,
361: 1,
362: 99,
363: 99,
364: 99,
365: 99,
366: 99,
367: 1,
368: 1,
369: 99,
370: 250,
371: 1,
372: 1,
373: 1,
374: 1,
375: 1,
376: 1,
377: 1,
378: 1,
379: 1,
380: 1,
381: 99,
382: 99,
383: 1,
384: 1,
385: 1,
386: 1,
387: 1,
388: 1,
389: 1,
390: 1,
391: 99,
392: 250,
393: 1,
394: 1,
395: 1,
396: 1,
397: 1,
398: 99,
399: 1,
400: 1,
401: 1,
402: 1,
403: 1,
404: 1,
405: 1,
406: 1,
407: 1,
408: 250,
409: 250,
410: 1,
411: 1,
412: 250,
413: 250,
414: 250,
415: 250,
416: 250,
417: 250,
418: 250,
419: 250,
420: 250,
421: 250,
422: 250,
423: 250,
424: 250,
425: 1,
426: 1,
427: 99,
428: 99,
429: 99,
430: 99,
431: 99,
432: 99,
433: 99,
434: 1,
435: 1,
436: 1,
437: 1,
438: 99,
439: 99,
440: 99,
441: 99,
442: 99,
443: 99,
444: 99,
445: 99,
446: 99,
447: 99,
448: 99,
449: 99,
450: 99,
451: 99,
452: 99,
453: 99,
454: 99,
455: 99,
456: 99,
457: 99,
458: 99,
459: 99,
460: 99,
461: 99,
462: 99,
463: 99,
464: 99,
465: 99,
466: 99,
467: 99,
468: 99,
469: 99,
470: 99,
471: 99,
472: 99,
473: 99,
474: 99,
475: 99,
476: 99,
477: 99,
478: 99,
479: 250,
480: 250,
481: 1,
482: 1,
483: 1,
484: 1,
485: 1,
486: 1,
488: 250,
489: 1,
490: 1,
491: 1,
492: 1,
493: 1,
494: 1,
495: 1,
496: 1,
497: 1,
498: 99,
499: 30,
500: 99,
501: 99,
502: 250,
503: 1,
504: 1,
505: 1,
506: 1,
507: 1,
508: 1,
509: 1,
510: 1,
511: 250,
512: 250,
513: 250,
514: 1,
515: 250,
516: 250,
517: 1,
518: 1,
519: 99,
520: 250,
521: 250,
523: 99,
524: 99,
525: 99,
526: 99,
527: 99,
528: 99,
529: 250,
530: 250,
531: 99,
532: 1,
533: 1,
534: 1,
535: 1,
536: 1,
537: 1,
538: 250,
539: 250,
540: 250,
541: 250,
542: 250,
543: 250,
544: 20,
545: 250,
546: 250,
547: 250,
548: 250,
549: 250,
550: 1,
551: 1,
552: 1,
553: 1,
554: 1,
555: 1,
556: 20,
557: 20,
558: 1,
559: 1,
560: 20,
561: 5,
562: 1,
563: 1,
564: 1,
565: 1,
566: 1,
567: 1,
568: 1,
569: 1,
570: 1,
571: 1,
572: 1,
573: 1,
574: 1,
575: 250,
576: 1,
577: 250,
578: 1,
579: 1,
580: 250,
581: 250,
582: 250,
583: 99,
584: 99,
585: 99,
586: 250,
587: 250,
588: 1,
589: 1,
590: 1,
591: 250,
592: 250,
593: 250,
594: 250,
595: 250,
596: 250,
597: 250,
598: 250,
599: 1,
600: 1,
601: 1,
602: 1,
-11: 1,
-24: 1,
-23: 1,
-22: 1,
-21: 1,
-20: 1,
-19: 1,
-18: 1,
-17: 1,
-16: 1,
-15: 1,
-14: 1,
-13: 1,
-12: 1,
-1: 1,
-10: 1,
-9: 1,
-8: 1,
-7: 1,
-6: 1,
-5: 1,
-4: 1,
-3: 1,
-2: 1,
}
rev_items = {v: k for k, v in items.items()}
| [
"[email protected]"
] | |
4bf5515c69160386a2bb612f15f2de43aaf77d3b | 6ff1c4a6ba35d1775d4aa2ec72462331fd09b4c4 | /course1/week4/points_and_segments.py | 8c354c31151ce1ed575cb72fbc6cfc24fc2ed24c | [] | no_license | MohamedFawzy/Data-Structures-And-Algorithms-Specialization | eb9bfd66e94a95b3b357f77df8863eb585d1cf13 | a57953b831e80cdb15ffd0984f3f3a3e7b80d657 | refs/heads/master | 2020-03-15T05:49:36.261276 | 2019-01-20T12:33:48 | 2019-01-20T12:33:48 | 131,995,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,195 | py | # Uses python2
import sys
from itertools import chain
def fast_count_segments(starts, ends, points):
cnt = [0] * len(points)
a = zip(starts, [float('-inf')]*len(starts))
b = zip(ends, [float('inf')]*len(ends))
c = zip(points, range(len(points)))
sortedlist = sorted(chain(a,b,c), key=lambda a : (a[0], a[1]))
stack = []
for i, j in sortedlist:
if j == float('-inf'):
stack.append(j)
elif j == float('inf'):
stack.pop()
else:
cnt[j] = len(stack)
return cnt
def naive_count_segments(starts, ends, points):
cnt = [0] * len(points)
for i in range(len(points)):
for j in range(len(starts)):
if starts[j] <= points[i] <= ends[j]:
cnt[i] += 1
return cnt
if __name__ == '__main__':
data = list(map(int, raw_input().split()))
n = data[0]
m = data[1]
starts = [0] * n
ends = [0] * n
for i in range(n):
starts[i], ends[i] = map(int, raw_input().split())
points = list(map(int, raw_input().split()))
#use fast_count_segments
cnt = fast_count_segments(starts, ends, points)
for x in cnt:
print str(x)+ ' ',
| [
"[email protected]"
] | |
9220c0779107ae7990c29f78fa1145f303124228 | 9fcd6a91132fd12731d259fe7d709cdf222381bb | /2022/24/foo.py | e15c500fc99027160b86dad1e257bfe3bc844f1d | [] | no_license | protocol7/advent-of-code | f5bdb541d21414ba833760958a1b9d05fc26f84a | fa110cef83510d86e82cb5d02f6af5bb7016f2c7 | refs/heads/master | 2023-04-05T15:33:26.146031 | 2023-03-18T14:22:43 | 2023-03-18T14:22:43 | 159,989,507 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,424 | py | import sys
from util import *
def parse(line):
return line.strip()
xs = list(map(parse, sys.stdin))
dir = {
">": RIGHT,
"<": LEFT,
"^": UP,
"v": DOWN,
}
ps = set()
winds = list()
g = Grid(xs)
for p, v in g.points():
if v == "." or v in dir:
ps.add(p)
if v in dir:
winds.append((v, p))
w = len(xs[0])
h = len(xs)
for x, v in enumerate(xs[0]):
if v == ".":
start = Point(x, 0)
for x, v in enumerate(xs[h-1]):
if v == ".":
end = Point(x, h-1)
def move_winds(winds):
nw = []
for wind in winds:
d, p = wind
np = p + dir[d]
if np not in ps:
if d == ">":
np = (1, p.y)
elif d == "<":
np = (w-2, p.y)
elif d == "^":
np = (p.x, h-2)
elif d == "v":
np = (p.x, 1)
nw.append((d, Point(np)))
return nw
turn = 0
for s, g in ((start, end), (end, start), (start, end)):
pp = {s}
while True:
ws = set(w for _, w in winds)
npp = set()
for p in pp:
if p not in ws:
npp.add(p)
for d in ORTHOGONAL:
np = p + d
if np not in ws and np in ps:
npp.add(np)
pp = npp
if g in pp:
break
turn += 1
winds = move_winds(winds)
print(turn)
| [
"[email protected]"
] | |
2f7a7a464cfa554501ff53105e96d444dbdb9d6e | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2247/60716/266118.py | b3cfb9af57e92d07680b4ef8e087e07bdcf75d44 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 329 | py | strs = input().split(',')
lists = [int(i) for i in strs]
alex = 0
lee = 0
index=0
while len(lists)>0:
temp = 0
if lists[0]>=lists[len(lists)-1]:
temp = lists.pop(0)
else:
temp = lists.pop()
if index%2==0:
alex += temp
else:
lee += temp
print(True) if alex>lee else print(False) | [
"[email protected]"
] | |
23c08622e46b36e8e88eeb8217c48ebaf5a30c2a | 22cec5da2b1fb83dcc9cf7c888f1e2078b05b62e | /flora/wallet/settings/settings_objects.py | e89c586d10d8514e9123368dd15457e76179d5de | [
"Apache-2.0"
] | permissive | JuEnPeHa/flora-blockchain | 649d351e096e73222ab79759c71e191e42da5d34 | 656b5346752d43edb89d7f58aaf35b1cacc9a366 | refs/heads/main | 2023-07-18T08:52:51.353754 | 2021-09-07T08:13:35 | 2021-09-07T08:13:35 | 399,297,784 | 0 | 0 | Apache-2.0 | 2021-08-24T01:30:45 | 2021-08-24T01:30:44 | null | UTF-8 | Python | false | false | 577 | py | from dataclasses import dataclass
from flora.util.streamable import Streamable, streamable
@dataclass(frozen=True)
@streamable
class BackupInitialized(Streamable):
"""
Stores user decision regarding import of backup info
"""
user_initialized: bool # Stores if user made a selection in UI. (Skip vs Import backup)
user_skipped: bool # Stores if user decided to skip import of backup info
backup_info_imported: bool # Stores if backup info has been imported
new_wallet: bool # Stores if this wallet is newly created / not restored from backup
| [
"[email protected]"
] | |
50595f67b3fda0a90dc73fa51951fbdfaf4170a5 | 5462142b5e72cb39bea5b802dd46f55357c4ea84 | /homework_zero_class/lesson8/函数的简介-times_1.py | 659bc9d42506df5e7f6a26bed1013354b9ef3d7a | [] | no_license | qqmadeinchina/myhomeocde | a0996ba195020da9af32613d6d2822b049e515a0 | 291a30fac236feb75b47610c4d554392d7b30139 | refs/heads/master | 2023-03-23T05:28:53.076041 | 2020-08-24T08:39:00 | 2020-08-24T08:39:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 530 | py | #!D:\Program Files\Anaconda3
# -*- coding: utf-8 -*-
# @Time : 2020/7/21 23:38
# @Author : 老萝卜
# @File : 函数的简介-times_1.py
# @Software: PyCharm Community Edition
print('nihao')
print('黑猫警长')
print('hahaha')
# 函数中保存的代码不会立即执行,需要调用函数才会执行
def fn():
print('这是第一个函数')
print('nihao')
print('黑猫警长')
print('hahaha')
print(fn)
fn()
fn()
fn()
fn()
# fn是函数对象 fn() 调用函数
# print函数对象 print() 调用函数 | [
"[email protected]"
] | |
c476976ac6e209478e0a9c8cddd2f916c3550b64 | ada9e04c44e9cb577acc1301915490adbb06edf5 | /test.py | 9d91270e57e3aa010ce6f5d7ce3911b7d3051506 | [
"MIT"
] | permissive | iiharu/pyoni | 9356b68a4ab52fa60d39cec01bf8145d7219ad03 | 3e62cedc1ed7dc726e421858ff143f7b7a713403 | refs/heads/master | 2021-01-28T00:24:07.078465 | 2020-03-03T08:22:15 | 2020-03-03T08:22:15 | 243,489,976 | 0 | 0 | MIT | 2020-02-27T10:16:06 | 2020-02-27T10:16:06 | null | UTF-8 | Python | false | false | 1,237 | py | #cap_openni2.cpp usa status = device.open(filename);
#cap_openni.cpp : status = context.OpenFileRecording( filename, productionNode );
import numpy as np
import cv2,sys
import cv2.cv as cv
cap = cv2.VideoCapture(sys.argv[1])
if True:
print("Depth generator output mode:")
print("FRAME_WIDTH " + str(cap.get(cv.CV_CAP_PROP_FRAME_WIDTH)))
print("FRAME_HEIGHT " + str(cap.get(cv.CV_CAP_PROP_FRAME_HEIGHT)))
print("FRAME_MAX_DEPTH " + str(cap.get(cv.CV_CAP_PROP_OPENNI_FRAME_MAX_DEPTH)) + "mm")
print("FPS " + str(cap.get(cv.CV_CAP_PROP_FPS)))
print("REGISTRATION " + str(cap.get(cv.CV_CAP_PROP_OPENNI_REGISTRATION)) + "\n")
while(True):
# Capture frame-by-frame
cap.grab();
if True:
frame = cap.retrieve( cv.CV_CAP_OPENNI_BGR_IMAGE )[1]
depthMap = cap.retrieve( cv.CV_CAP_OPENNI_DEPTH_MAP );
else:
frame = cap.retrieve()[1]
print frame
# Our operations on the frame come here
#gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# Display the resulting frame
cv2.imshow('frame',frame)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows() | [
"[email protected]"
] | |
7b28bb9e2f21cd5136e3513b6e6a30200795dc3e | 8a9b8fc7cbb0e24893f130d49edaf6b23501292d | /Python/find_angle.py | 2350449185b18e8345177efed19841d3a3fb0342 | [] | no_license | Rubabesabanova/HackerRank | 9ca6517944ac7253463e734d2fd5fd3e56e19a0a | 41a6c92f55c72bd20dcb86e32f6f5b792730148c | refs/heads/master | 2022-12-16T11:36:05.059682 | 2020-09-21T09:13:30 | 2020-09-21T09:13:30 | 291,142,383 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | # Difficulty : Medium
# Link : https://www.hackerrank.com/challenges/find-angle/problem
# Language : Python 3
from math import *
a=int(input())
b=int(input())
x=round(degrees(atan(a/b)))
print(str(x)+'°') | [
"[email protected]"
] | |
dbe508446af6c00e51cb1d2dadeea674f35d5fb6 | 3d7039903da398ae128e43c7d8c9662fda77fbdf | /database/React.js/juejin_203.py | f420f288b5fdf6dade263c68f68f241a62bde5c5 | [] | no_license | ChenYongChang1/spider_study | a9aa22e6ed986193bf546bb567712876c7be5e15 | fe5fbc1a5562ff19c70351303997d3df3af690db | refs/heads/master | 2023-08-05T10:43:11.019178 | 2021-09-18T01:30:22 | 2021-09-18T01:30:22 | 406,727,214 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 74,253 | py | {"err_no": 0, "err_msg": "success", "data": [{"article_id": "6934881286527909896", "article_info": {"article_id": "6934881286527909896", "user_id": "1626932942485959", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685], "visible_level": 0, "link_url": "", "cover_image": "", "is_gfw": 0, "title": "从0到1开发可视化拖拽H5编辑器(React)", "brief_content": " 年前年后比较闲,于是用React做了一个简单的lowcode平台,功能如上面动图所示。接下来按照完成功能点介绍下,主要包括: lowcode平台挺常见的,目前网上做的比较成熟且通用的有兔展、易企秀、码卡、图司机等,但是为了个性化的设置,比如要访问本公司的数据库,很多公司…", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1614652969", "mtime": "1614828248", "rtime": "1614828248", "draft_id": "6934881159578910733", "view_count": 6563, "collect_count": 281, "digg_count": 266, "comment_count": 30, "hot_index": 624, "is_hot": 0, "rank_index": 0.01054888, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "1626932942485959", "user_name": "花果山技术团队", "company": "花果山", "job_title": "element3", "avatar_large": "https://sf6-ttcdn-tos.pstatp.com/img/user-avatar/86e914bde8320224df00fc178add2ad4~300x300.image", "level": 3, "description": "早晨起床,拥抱太阳,满满的正能量的前端团队", "followee_count": 23, "follower_count": 1370, "post_article_count": 12, "digg_article_count": 37, "got_digg_count": 2263, "got_view_count": 88298, "post_shortmsg_count": 9, "digg_shortmsg_count": 5, "isfollowed": false, "favorable_author": 0, "power": 3145, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 1, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}], "user_interact": {"id": 6934881286527909896, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": {"org_type": 1, "org_id": "6932025169921703950", "online_version_id": 6932025335240523784, "latest_version_id": 6932025335240523784, "power": 4620, "ctime": 1613987921, "mtime": 1631692819, "audit_status": 2, "status": 0, "org_version": {"version_id": "6932025335240523784", "icon": "https://p1-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/5365bf87af3142b5895fbfbd64fba399~tplv-k3u1fbpfcp-watermark.image", "background": "https://p6-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/15349a3f3ef34831a1d976ef516a2024~tplv-k3u1fbpfcp-watermark.image", "name": "花果山技术团队", "introduction": "一个程序员的社区组织,致力于做出更好更优秀的教程和开源,以后会在element3的基础之上,做出全套的vue3实战教程,react生态也正在加入肿", "weibo_link": "https://weibo.com/woniuppp/home", "github_link": "https://github.com/hug-sun", "homepage_link": "https://space.bilibili.com/26995758", "ctime": 1614048084, "mtime": 1614048084, "org_id": "6932025169921703950", "brief_introduction": "一个程序员的社区组织,致力于做出更好更优秀的教程和开源", "introduction_preview": "一个程序员的社区组织,致力于做出更好更优秀的教程和开源,以后会在element3的基础之上,做出全套的vue3实战教程,react生态也正在加入肿"}, "follower_count": 9424, "article_view_count": 144174, "article_digg_count": 3179}, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "6998086416836067365", "article_info": {"article_id": "6998086416836067365", "user_id": "3676395418467687", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685, 6809640407484334093], "visible_level": 0, "link_url": "", "cover_image": "", "is_gfw": 0, "title": "React 18 Suspense 的变化", "brief_content": "在 React 16.x 版本中支持了 Suspense 功能,但是那时并没有完美支持 Suspense。在 React 18 版本中,Suspense 会更符合渲染模型,本文对此做了一些介绍。", "is_english": 0, "is_original": 1, "user_index": 5.678873587267573, "original_type": 0, "original_author": "", "content": "", "ctime": "1629368990", "mtime": "1629432012", "rtime": "1629432012", "draft_id": "6998074214741704735", "view_count": 682, "collect_count": 2, "digg_count": 6, "comment_count": 0, "hot_index": 40, "is_hot": 0, "rank_index": 0.01052965, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "3676395418467687", "user_name": "KooFE", "company": "公众号: ikoofe", "job_title": "前端开发", "avatar_large": "https://sf6-ttcdn-tos.pstatp.com/img/user-avatar/2efd82661ab36e83dda988f520730228~300x300.image", "level": 2, "description": "不定期发布一些技术文章", "followee_count": 0, "follower_count": 67, "post_article_count": 25, "digg_article_count": 5, "got_digg_count": 148, "got_view_count": 10240, "post_shortmsg_count": 0, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 250, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}], "user_interact": {"id": 6998086416836067365, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "6844904068431740936", "article_info": {"article_id": "6844904068431740936", "user_id": "3808363978429613", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685, 6809640407484334093], "visible_level": 0, "link_url": "https://juejin.im/post/6844904068431740936", "cover_image": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/gold-user-assets/2020/2/19/1705dccbac264cd9~tplv-t2oaga2asx-image.image", "is_gfw": 0, "title": "从0到1教你搭建前端团队的组件系统(高级进阶必备)", "brief_content": "随着vue/react这类以数据驱动为主的web框架的不断完善和壮大,越来越多的前端团队开始着手搭建内部的组件库。虽然目前市面上已经有很多功能强大且完善的组件库供我们使用,比如基于react的开源组件库ant-design,material,又比如基于vue的开源组件库elem…", "is_english": 0, "is_original": 1, "user_index": 12.756802044922, "original_type": 0, "original_author": "", "content": "", "ctime": "1582176836", "mtime": "1600500857", "rtime": "1582176836", "draft_id": "6845076642797207566", "view_count": 31214, "collect_count": 1214, "digg_count": 877, "comment_count": 76, "hot_index": 2513, "is_hot": 0, "rank_index": 0.01052933, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "3808363978429613", "user_name": "徐小夕", "company": "公号|趣谈前端", "job_title": "掘金签约作者", "avatar_large": "https://sf6-ttcdn-tos.pstatp.com/img/user-avatar/97aec65b1390d7500163f818edd2fe07~300x300.image", "level": 6, "description": "前端架构师,致力于前端工程化,可视化方向的研究。(微信:Mr_xuxiaoxi)", "followee_count": 73, "follower_count": 16084, "post_article_count": 153, "digg_article_count": 185, "got_digg_count": 20439, "got_view_count": 992122, "post_shortmsg_count": 41, "digg_shortmsg_count": 37, "isfollowed": false, "favorable_author": 1, "power": 30337, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}], "user_interact": {"id": 6844904068431740936, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "7005745475198386213", "article_info": {"article_id": "7005745475198386213", "user_id": "976022055959207", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685, 6809640407484334093], "visible_level": 0, "link_url": "", "cover_image": "", "is_gfw": 0, "title": "基于create-react-app 适配移动端方案", "brief_content": "我平时的技术栈是vue,最近在react的时候发现基于create-react-app搭建的项目有不少坑(react大佬请绕路),首先公司如果没有固定的手脚架的话就需要自己搭建项目,npm各种的库,难", "is_english": 0, "is_original": 1, "user_index": 2.709511291351455, "original_type": 0, "original_author": "", "content": "", "ctime": "1631152308", "mtime": "1631171024", "rtime": "1631171024", "draft_id": "6845076398109884429", "view_count": 67, "collect_count": 2, "digg_count": 1, "comment_count": 0, "hot_index": 4, "is_hot": 0, "rank_index": 0.01043942, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "976022055959207", "user_name": "皮皮大人", "company": "暂无", "job_title": "web前端", "avatar_large": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/gold-user-assets/2019/8/3/16c582682de81e5a~tplv-t2oaga2asx-image.image", "level": 1, "description": "", "followee_count": 29, "follower_count": 4, "post_article_count": 3, "digg_article_count": 14, "got_digg_count": 5, "got_view_count": 326, "post_shortmsg_count": 0, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 8, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}], "user_interact": {"id": 7005745475198386213, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "7004754412501467173", "article_info": {"article_id": "7004754412501467173", "user_id": "3685218705483070", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685], "visible_level": 0, "link_url": "", "cover_image": "", "is_gfw": 0, "title": "关于Recoil的atom跨RecoilRoot交互的二三事", "brief_content": "关于React的状态管理库Recoil,如何跨RecoilRoot组件且不使用override属性为false的数据交互分享", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1630921647", "mtime": "1631087324", "rtime": "1630987081", "draft_id": "7004695449894387725", "view_count": 82, "collect_count": 0, "digg_count": 6, "comment_count": 0, "hot_index": 10, "is_hot": 0, "rank_index": 0.01042217, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "3685218705483070", "user_name": "syoueicc", "company": "", "job_title": "", "avatar_large": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/mirror-assets/168e083a73fc7360d16~tplv-t2oaga2asx-image.image", "level": 1, "description": "", "followee_count": 16, "follower_count": 3, "post_article_count": 1, "digg_article_count": 1, "got_digg_count": 6, "got_view_count": 82, "post_shortmsg_count": 0, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 6, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}], "user_interact": {"id": 7004754412501467173, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "6960493325061193735", "article_info": {"article_id": "6960493325061193735", "user_id": "4098589725834317", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685, 6809640394175971342], "visible_level": 0, "link_url": "", "cover_image": "", "is_gfw": 0, "title": "Hi~ 这将是一个通用的新手引导解决方案", "brief_content": "组件背景 不管是老用户还是新用户,在产品发布新版本、有新功能上线、或是现有功能更新的场景下,都需要一定的指导。功能引导组件就是互联网产品中的指示牌,它旨在带领用户参观产品,帮助用户熟悉新的界面、交互与", "is_english": 0, "is_original": 1, "user_index": 11.881126923814996, "original_type": 0, "original_author": "", "content": "", "ctime": "1620616151", "mtime": "1620616480", "rtime": "1620616480", "draft_id": "6960490991610495012", "view_count": 3997, "collect_count": 121, "digg_count": 130, "comment_count": 11, "hot_index": 340, "is_hot": 0, "rank_index": 0.01027453, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "4098589725834317", "user_name": "字节前端", "company": "北京字节跳动网络技术有限公司", "job_title": "", "avatar_large": "https://sf1-ttcdn-tos.pstatp.com/img/user-avatar/3c4d172634bb28fa061a6ec7feae35ce~300x300.image", "level": 4, "description": "公众号:字节前端ByteFE", "followee_count": 5, "follower_count": 6445, "post_article_count": 136, "digg_article_count": 1, "got_digg_count": 6591, "got_view_count": 339814, "post_shortmsg_count": 3, "digg_shortmsg_count": 1, "isfollowed": false, "favorable_author": 1, "power": 9989, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 1, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}, {"id": 2546516, "tag_id": "6809640394175971342", "tag_name": "CSS", "color": "#244DE4", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/66de0c4eb9d10130d5bf.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432239426, "mtime": 1631688735, "id_type": 9, "tag_alias": "", "post_article_count": 14981, "concern_user_count": 297034}], "user_interact": {"id": 6960493325061193735, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": {"org_type": 1, "org_id": "6930802337313210381", "online_version_id": 6930890337229471751, "latest_version_id": 6930890337229471751, "power": 6386, "ctime": 1613706529, "mtime": 1631692819, "audit_status": 2, "status": 0, "org_version": {"version_id": "6930890337229471751", "icon": "https://p6-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/59fd4b984fc745de8cb38b345577ed31~tplv-k3u1fbpfcp-watermark.image", "background": "https://p1-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/41d1c0cd091e42b1b52de07f7fff87e4~tplv-k3u1fbpfcp-zoom-1.image", "name": "字节前端", "introduction": "字节前端,字节跳动官方前端技术分享账号。", "weibo_link": "", "github_link": "", "homepage_link": "", "ctime": 1613732604, "mtime": 1613732604, "org_id": "6930802337313210381", "brief_introduction": "字节前端的技术实践分享", "introduction_preview": "字节前端,字节跳动官方前端技术分享账号。"}, "follower_count": 5724, "article_view_count": 199705, "article_digg_count": 4389}, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "7001522150507020295", "article_info": {"article_id": "7001522150507020295", "user_id": "4001878056901998", "category_id": "6809637767543259144", "tag_ids": [6809640407484334093, 6809640357354012685], "visible_level": 0, "link_url": "", "cover_image": "https://p1-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/d9d37d5aad9740c6804332d12815527f~tplv-k3u1fbpfcp-watermark.image", "is_gfw": 0, "title": "React基础篇", "brief_content": "React基础篇 一、setState 1.正确使用setState setState(partialState, callback) partialState: object| function 用", "is_english": 0, "is_original": 1, "user_index": 4.921931921407154, "original_type": 0, "original_author": "", "content": "", "ctime": "1630169076", "mtime": "1630304876", "rtime": "1630304876", "draft_id": "7001517590602317832", "view_count": 345, "collect_count": 3, "digg_count": 1, "comment_count": 0, "hot_index": 18, "is_hot": 0, "rank_index": 0.01023445, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "4001878056901998", "user_name": "forestxieCode", "company": "长沙某公司", "job_title": "前端工程师", "avatar_large": "https://sf6-ttcdn-tos.pstatp.com/img/user-avatar/389bdcf62602d27a925ddf657830d3ba~300x300.image", "level": 2, "description": "输出暴露输入", "followee_count": 12, "follower_count": 4, "post_article_count": 15, "digg_article_count": 86, "got_digg_count": 59, "got_view_count": 5187, "post_shortmsg_count": 2, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 110, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}, {"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}], "user_interact": {"id": 7001522150507020295, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "6995918802546343973", "article_info": {"article_id": "6995918802546343973", "user_id": "2541726614695704", "category_id": "6809637767543259144", "tag_ids": [6809640779649138695, 6809640357354012685, 6809640407484334093], "visible_level": 0, "link_url": "", "cover_image": "https://p6-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/69ad710fc3bf49fdb7c38ae5673074db~tplv-k3u1fbpfcp-watermark.image", "is_gfw": 0, "title": "8月更文挑战|讲讲React中的传参方式", "brief_content": "今天来分享一下最近对react应用的总结,主要方向是react在中可以使用的几种传参方式和spa应用中常用的几种传参方式", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1628864320", "mtime": "1628939934", "rtime": "1628939934", "draft_id": "6994422360044470285", "view_count": 172, "collect_count": 1, "digg_count": 48, "comment_count": 1, "hot_index": 57, "is_hot": 0, "rank_index": 0.010234, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "2541726614695704", "user_name": "零狐冲", "company": "", "job_title": "搬砖人", "avatar_large": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/gold-user-assets/2019/8/6/16c649bae5364093~tplv-t2oaga2asx-image.image", "level": 3, "description": "撸铁", "followee_count": 23, "follower_count": 41, "post_article_count": 20, "digg_article_count": 802, "got_digg_count": 944, "got_view_count": 9941, "post_shortmsg_count": 8, "digg_shortmsg_count": 1, "isfollowed": false, "favorable_author": 0, "power": 1043, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546796, "tag_id": "6809640779649138695", "tag_name": "MobX", "color": "#000000", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/a264ed628465ced60fb9.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1488858117, "mtime": 1631070893, "id_type": 9, "tag_alias": "", "post_article_count": 240, "concern_user_count": 2447}, {"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}], "user_interact": {"id": 6995918802546343973, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "6989235408970186783", "article_info": {"article_id": "6989235408970186783", "user_id": "1943592291009511", "category_id": "6809637767543259144", "tag_ids": [6809640744349859848, 6809640357354012685], "visible_level": 0, "link_url": "", "cover_image": "https://p6-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/7f0e9c13075c42ce8e4767bdbe6bb804~tplv-k3u1fbpfcp-watermark.image", "is_gfw": 0, "title": "Chrome团队:如何曲线拯救KPI", "brief_content": "大家好,我是卡颂。 当聊到Chrome,你第一反应是啥? 市占率第一的浏览器?鼎鼎大名的V8引擎?浏览器调试的标配——DevTools? 对于Chrome团队成员来说,第一反应很可能是这两个指标(KP", "is_english": 0, "is_original": 1, "user_index": 9.465463134871271, "original_type": 0, "original_author": "", "content": "", "ctime": "1627308234", "mtime": "1627355727", "rtime": "1627355727", "draft_id": "6989234067807928333", "view_count": 1667, "collect_count": 3, "digg_count": 8, "comment_count": 3, "hot_index": 94, "is_hot": 0, "rank_index": 0.01022666, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "1943592291009511", "user_name": "魔术师卡颂", "company": "魔术师卡颂", "job_title": "公粽号", "avatar_large": "https://sf1-ttcdn-tos.pstatp.com/img/user-avatar/1af004c0850aa09a35522153b4b3cf55~300x300.image", "level": 3, "description": "| 自由职业", "followee_count": 15, "follower_count": 1753, "post_article_count": 86, "digg_article_count": 76, "got_digg_count": 2313, "got_view_count": 174832, "post_shortmsg_count": 2, "digg_shortmsg_count": 2, "isfollowed": false, "favorable_author": 0, "power": 4147, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546770, "tag_id": "6809640744349859848", "tag_name": "Babel", "color": "#000000", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/b8db05f1350a9fcbada6.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1487105648, "mtime": 1631692940, "id_type": 9, "tag_alias": "", "post_article_count": 813, "concern_user_count": 8237}, {"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}], "user_interact": {"id": 6989235408970186783, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "6998287682593882142", "article_info": {"article_id": "6998287682593882142", "user_id": "2981531265821416", "category_id": "6809637767543259144", "tag_ids": [6809640407484334093, 6809640357354012685], "visible_level": 0, "link_url": "", "cover_image": "https://p9-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/de93fc7997384be5b2d0a9e483575d6d~tplv-k3u1fbpfcp-watermark.image", "is_gfw": 0, "title": "(开源)给图片编辑器添加了【撤销重做】功能", "brief_content": "一款开源图片编辑器,采用React + Typescript + React-knova 框架开发.", "is_english": 0, "is_original": 1, "user_index": 13.671804832839573, "original_type": 0, "original_author": "", "content": "", "ctime": "1629417099", "mtime": "1629444157", "rtime": "1629418077", "draft_id": "6998155342794194951", "view_count": 493, "collect_count": 4, "digg_count": 5, "comment_count": 2, "hot_index": 31, "is_hot": 0, "rank_index": 0.01020163, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "2981531265821416", "user_name": "杰出D", "company": "公号|前端有话说", "job_title": "Web前端工程师", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/2e8908f0995d5b92dfe5884745a78d4d~300x300.image", "level": 3, "description": "React深度使用者", "followee_count": 8, "follower_count": 274, "post_article_count": 11, "digg_article_count": 15, "got_digg_count": 1699, "got_view_count": 77760, "post_shortmsg_count": 11, "digg_shortmsg_count": 3, "isfollowed": false, "favorable_author": 1, "power": 2476, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}, {"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}], "user_interact": {"id": 6998287682593882142, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "6844903715669999629", "article_info": {"article_id": "6844903715669999629", "user_id": "3896324938277646", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685, 6809640398105870343, 6809640407484334093, 6809640497393434632], "visible_level": 0, "link_url": "https://juejin.im/post/6844903715669999629", "cover_image": "", "is_gfw": 0, "title": "一年半经验,百度、有赞、阿里前端面试总结", "brief_content": "人家都说,前端需要每年定期出来面面试,衡量一下自己当前的技术水平以及价值,本人17年7月份,毕业到现在都没出来试过,也没很想换工作,就出来试试,看看自己水平咋样。 以下为我现场面试时候的一些回答,部分因人而异的问题我就不回答了,回答的都为参考答案,也有部分错误的地方或者不好的地…", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1542450031", "mtime": "1598478475", "rtime": "1542520881", "draft_id": "6845075653356699656", "view_count": 62171, "collect_count": 2201, "digg_count": 1817, "comment_count": 236, "hot_index": 5161, "is_hot": 0, "rank_index": 0.01001399, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "3896324938277646", "user_name": "杨溜溜", "company": "百度", "job_title": "WEB前端吹水工程师", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/89b094d666c3e928977e6d337192e9df~300x300.image", "level": 4, "description": "https://github.com/yacan8/blog", "followee_count": 9, "follower_count": 4186, "post_article_count": 18, "digg_article_count": 170, "got_digg_count": 4093, "got_view_count": 181790, "post_shortmsg_count": 1, "digg_shortmsg_count": 1, "isfollowed": false, "favorable_author": 1, "power": 5910, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}, {"id": 2546519, "tag_id": "6809640398105870343", "tag_name": "JavaScript", "color": "#616161", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/5d70fd6af940df373834.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1435884803, "mtime": 1631692583, "id_type": 9, "tag_alias": "", "post_article_count": 67405, "concern_user_count": 398956}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}, {"id": 2546591, "tag_id": "6809640497393434632", "tag_name": "Canvas", "color": "#F51A00", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/d21230e1c079d7706713.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1439500018, "mtime": 1631692166, "id_type": 9, "tag_alias": "", "post_article_count": 2046, "concern_user_count": 60618}], "user_interact": {"id": 6844903715669999629, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "6997174976654606367", "article_info": {"article_id": "6997174976654606367", "user_id": "1777229436029928", "category_id": "6809637767543259144", "tag_ids": [6809640407484334093, 6809640357354012685], "visible_level": 0, "link_url": "", "cover_image": "", "is_gfw": 0, "title": "[React]优化fetch加载loading,图片懒加载,分类页封装", "brief_content": "这是我参与8月更文挑战的第7天,活动详情查看:8月更文挑战。 一、fetch加载loading 效果如下 二、图片懒加载 三、新建一个分类页面 3.2 左侧导航栏 6. 右侧详情子路由 项目地址", "is_english": 0, "is_original": 1, "user_index": 5.749252780678518, "original_type": 0, "original_author": "", "content": "", "ctime": "1629157403", "mtime": "1629267961", "rtime": "1629267961", "draft_id": "6997135412502200334", "view_count": 569, "collect_count": 5, "digg_count": 13, "comment_count": 0, "hot_index": 41, "is_hot": 0, "rank_index": 0.00998955, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "1777229436029928", "user_name": "逍遥coding", "company": "湖南德雅曼达", "job_title": "", "avatar_large": "https://sf1-ttcdn-tos.pstatp.com/img/user-avatar/5251b37cc3556d0b3958dc790cc5fdec~300x300.image", "level": 2, "description": "", "followee_count": 37, "follower_count": 25, "post_article_count": 42, "digg_article_count": 174, "got_digg_count": 387, "got_view_count": 9277, "post_shortmsg_count": 0, "digg_shortmsg_count": 3, "isfollowed": false, "favorable_author": 0, "power": 479, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}, {"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}], "user_interact": {"id": 6997174976654606367, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "7005841142566633486", "article_info": {"article_id": "7005841142566633486", "user_id": "4195392104175527", "category_id": "6809637767543259144", "tag_ids": [6809640407484334093, 6809640357354012685], "visible_level": 0, "link_url": "", "cover_image": "https://p3-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/cebd25f17371421198ec7db9e6bf7efc~tplv-k3u1fbpfcp-watermark.image", "is_gfw": 0, "title": "React15完整功能实现(mount&&update&&patch)", "brief_content": "1,React15实现原理 1.1,挂载阶段 1,将挂载虚拟dom元素根据其类型(文本类型节点,原生标签类型节点,组件类型节点)生成对应的组件类实例(文本组件类实例,通用组件类实例,合成组件类实例)", "is_english": 0, "is_original": 1, "user_index": 4.885750781638452, "original_type": 0, "original_author": "", "content": "", "ctime": "1631174592", "mtime": "1631176847", "rtime": "1631176847", "draft_id": "7005495956162478087", "view_count": 39, "collect_count": 0, "digg_count": 0, "comment_count": 0, "hot_index": 1, "is_hot": 0, "rank_index": 0.00996843, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "4195392104175527", "user_name": "Tsuki_", "company": "苞米", "job_title": "more than two years", "avatar_large": "https://sf1-ttcdn-tos.pstatp.com/img/user-avatar/99973816f15115ba1f9437e862674257~300x300.image", "level": 2, "description": "没有翻不过的沟 只是你努力还不够", "followee_count": 10, "follower_count": 73, "post_article_count": 25, "digg_article_count": 43, "got_digg_count": 91, "got_view_count": 8933, "post_shortmsg_count": 1, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 180, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}, {"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}], "user_interact": {"id": 7005841142566633486, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "6995070859840847902", "article_info": {"article_id": "6995070859840847902", "user_id": "1574156383563496", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685, 6809640407484334093], "visible_level": 0, "link_url": "", "cover_image": "https://p9-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/4b9d5b04085e42b4ab21f5ec52be2a89~tplv-k3u1fbpfcp-watermark.image", "is_gfw": 0, "title": "React 小册 | 起底 JSX", "brief_content": "写在前面 本系列是博主在学习 React 过程中 阅读如下文档做出的笔记 如果有错误 希望在评论区指出哦 🤩🤩🤩 预计将会更新如下内容 React 小册 - 起步 JSX ✅ ✅ React 小册 -", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1628666857", "mtime": "1631354393", "rtime": "1628749526", "draft_id": "6995070067759120392", "view_count": 1002, "collect_count": 6, "digg_count": 12, "comment_count": 0, "hot_index": 61, "is_hot": 0, "rank_index": 0.00996173, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "1574156383563496", "user_name": "南树", "company": "", "job_title": "CV运动员", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/c1131afb86820d6152f6b8336aa704d8~300x300.image", "level": 2, "description": "走过清澈的往日 一如扑水的少年", "followee_count": 8, "follower_count": 25, "post_article_count": 31, "digg_article_count": 56, "got_digg_count": 255, "got_view_count": 13559, "post_shortmsg_count": 0, "digg_shortmsg_count": 2, "isfollowed": false, "favorable_author": 0, "power": 390, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}], "user_interact": {"id": 6995070859840847902, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "6975880358071894053", "article_info": {"article_id": "6975880358071894053", "user_id": "4300945218607197", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685], "visible_level": 0, "link_url": "", "cover_image": "https://p6-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/b9013bc90e284346a0d49a4fbb036179~tplv-k3u1fbpfcp-watermark.image", "is_gfw": 0, "title": "React-我们村刚通网之虚拟 DOM(一)", "brief_content": "本文从什么是虚拟 DOM、为什么使用虚拟 DOM、虚拟 DOM 的实现原理等 3 个角度对虚拟 DOM 进行讲述。", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1624198906", "mtime": "1624784070", "rtime": "1624258034", "draft_id": "6975761690415595550", "view_count": 3454, "collect_count": 29, "digg_count": 25, "comment_count": 4, "hot_index": 201, "is_hot": 0, "rank_index": 0.00990624, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "4300945218607197", "user_name": "清汤饺子", "company": "京东", "job_title": "前端", "avatar_large": "https://sf6-ttcdn-tos.pstatp.com/img/user-avatar/82a5e7eb6b926526e131a367855b5b30~300x300.image", "level": 3, "description": "或许你知道凹凸实验室吗\n", "followee_count": 20, "follower_count": 253, "post_article_count": 13, "digg_article_count": 103, "got_digg_count": 530, "got_view_count": 48595, "post_shortmsg_count": 3, "digg_shortmsg_count": 2, "isfollowed": false, "favorable_author": 0, "power": 1015, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}], "user_interact": {"id": 6975880358071894053, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "7001713115079376904", "article_info": {"article_id": "7001713115079376904", "user_id": "1284683727647950", "category_id": "6809637767543259144", "tag_ids": [6809640407484334093, 6809640357354012685], "visible_level": 0, "link_url": "", "cover_image": "https://p9-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/b2c1eff9b9d9416dbe85acf78b399bbf~tplv-k3u1fbpfcp-watermark.image", "is_gfw": 0, "title": "01 手摸手教学-React中的JSX语法-你真的会JSX吗?", "brief_content": "01 jsx初体验 02 jsx的注意点 1、JSX的特点 JSX 执行更快,因为它在编译为 JavaScript 代码后进行了优化。 它是类型安全的,在编译过程中就能发现错误。 使用 JSX 编写模", "is_english": 0, "is_original": 1, "user_index": 2.49491483101579, "original_type": 0, "original_author": "", "content": "", "ctime": "1630213405", "mtime": "1630307735", "rtime": "1630307735", "draft_id": "7001710809881509918", "view_count": 368, "collect_count": 2, "digg_count": 2, "comment_count": 0, "hot_index": 20, "is_hot": 0, "rank_index": 0.00976928, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "1284683727647950", "user_name": "Koi是攻城狮呦", "company": "", "job_title": "前端开发", "avatar_large": "https://sf1-ttcdn-tos.pstatp.com/img/user-avatar/a38b305f68883791bd5f3f172d319767~300x300.image", "level": 1, "description": "喜欢就坚持吧", "followee_count": 62, "follower_count": 5, "post_article_count": 6, "digg_article_count": 23, "got_digg_count": 10, "got_view_count": 1164, "post_shortmsg_count": 0, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 21, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}, {"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}], "user_interact": {"id": 7001713115079376904, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "7003628658862604302", "article_info": {"article_id": "7003628658862604302", "user_id": "2612095356253341", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685, 6809640407484334093], "visible_level": 0, "link_url": "", "cover_image": "https://p6-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/036f98ed2f944f228c91c87dd2fc93bb~tplv-k3u1fbpfcp-watermark.image", "is_gfw": 0, "title": "如何在React + TypeScript 声明带有children的Props", "brief_content": "声明带有children的Props,有很多种方式。 ReactNode 直接在prop上手动添加children属性 假如children是可选的,可以添加一个可选标记? 不过,不建议这种方式,因为", "is_english": 0, "is_original": 1, "user_index": 4.589179940530471, "original_type": 0, "original_author": "", "content": "", "ctime": "1630659514", "mtime": "1630905847", "rtime": "1630905847", "draft_id": "7003627359286853669", "view_count": 112, "collect_count": 1, "digg_count": 1, "comment_count": 0, "hot_index": 6, "is_hot": 0, "rank_index": 0.00984534, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "2612095356253341", "user_name": "太凉", "company": "饮马江湖,仗剑天涯", "job_title": "前端开发工程师", "avatar_large": "https://sf1-ttcdn-tos.pstatp.com/img/user-avatar/af3e17ed46d3e8427fac2d6173639987~300x300.image", "level": 1, "description": "勤思辨,守匠心", "followee_count": 9, "follower_count": 10, "post_article_count": 15, "digg_article_count": 31, "got_digg_count": 62, "got_view_count": 3625, "post_shortmsg_count": 1, "digg_shortmsg_count": 4, "isfollowed": false, "favorable_author": 0, "power": 98, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}], "user_interact": {"id": 7003628658862604302, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "6939766434159394830", "article_info": {"article_id": "6939766434159394830", "user_id": "2330620350708823", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685, 6809640398105870343], "visible_level": 0, "link_url": "", "cover_image": "https://p6-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/3a373c2e7ed04f87b254995f5d3fa166~tplv-k3u1fbpfcp-watermark.image", "is_gfw": 0, "title": "我打破了 React Hook 必须按顺序、不能在条件语句中调用的枷锁", "brief_content": "这个限制在开发中也确实会时常影响到我们的开发体验,比如函数组件中出现 if 语句提前 return 了,后面又出现 Hook 调用的话,React 官方推的 eslint 规则也会给出警告。 其实是个挺常见的用法,很多时候满足某个条件了我们就不希望组件继续渲染下去。但由于这个限…", "is_english": 0, "is_original": 1, "user_index": 15.528163819455008, "original_type": 0, "original_author": "", "content": "", "ctime": "1615790369", "mtime": "1615790374", "rtime": "1615790374", "draft_id": "6939766104533237767", "view_count": 6135, "collect_count": 97, "digg_count": 174, "comment_count": 41, "hot_index": 521, "is_hot": 0, "rank_index": 0.0097905, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "2330620350708823", "user_name": "ssh_晨曦时梦见兮", "company": "微信 sshsunlight,交个朋友", "job_title": "前端", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/090d6608420cd5864d7564939c8f72ab~300x300.image", "level": 6, "description": "前端从进阶到入院@公众号", "followee_count": 45, "follower_count": 17722, "post_article_count": 67, "digg_article_count": 297, "got_digg_count": 27253, "got_view_count": 1296157, "post_shortmsg_count": 26, "digg_shortmsg_count": 33, "isfollowed": false, "favorable_author": 1, "power": 40277, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 2, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}, {"id": 2546519, "tag_id": "6809640398105870343", "tag_name": "JavaScript", "color": "#616161", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/5d70fd6af940df373834.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1435884803, "mtime": 1631692583, "id_type": 9, "tag_alias": "", "post_article_count": 67405, "concern_user_count": 398956}], "user_interact": {"id": 6939766434159394830, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": {"org_type": 1, "org_id": "6930945479362478092", "online_version_id": 6956107828814118926, "latest_version_id": 6956107828814118926, "power": 23997, "ctime": 1613802274, "mtime": 1631692819, "audit_status": 2, "status": 0, "org_version": {"version_id": "6956107828814118926", "icon": "https://p6-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/498f2ad5f4cf43dd9093b71c5648f50f~tplv-k3u1fbpfcp-watermark.image", "background": "https://p9-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/71802e497d5142c89eb7f11b0ac01025~tplv-k3u1fbpfcp-watermark.image", "name": "ByteDance Web Infra", "introduction": "\n", "weibo_link": "", "github_link": "", "homepage_link": "https://webinfra.org/", "ctime": 1620471518, "mtime": 1620471518, "org_id": "6930945479362478092", "brief_introduction": "字节跳动 Web Infra 团队 | The best way to predict the future is to create it", "introduction_preview": "[图片]"}, "follower_count": 1581, "article_view_count": 740408, "article_digg_count": 16593}, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "6844903632488366088", "article_info": {"article_id": "6844903632488366088", "user_id": "3984285868490807", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685, 6809640361531539470, 6809640369764958215, 6809640398105870343, 6809640407484334093, 6809640411473117197], "visible_level": 0, "link_url": "https://juejin.im/post/6844903632488366088", "cover_image": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/gold-user-assets/2018/7/3/1645e0def78d2e49~tplv-t2oaga2asx-image.image", "is_gfw": 0, "title": "2018上半年掘金微信群日报优质文章合集:前端篇", "brief_content": "逐步学习什么是递归?通过使用场景来深入认识递归。 前端性能优化之路——图片篇。 今年的文章量比去年又多了好多,看来大家在掘金越来越活跃啦!可以Mark起来慢慢看~不过呢小饼还是建议大家到微信群里看每日小报,毕竟每天消化一两篇会更加有用,日积月累就会有很大的收益。反而积攒到一起的…", "is_english": 0, "is_original": 1, "user_index": 0, "original_type": 0, "original_author": "", "content": "", "ctime": "1530586430", "mtime": "1599542870", "rtime": "1530586430", "draft_id": "6845075560943599624", "view_count": 57673, "collect_count": 2086, "digg_count": 2932, "comment_count": 55, "hot_index": 5870, "is_hot": 0, "rank_index": 0.00967395, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "3984285868490807", "user_name": "赵小饼", "company": "", "job_title": "掘金最可爱的小姐姐", "avatar_large": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/mirror-assets/168e087825771e3e5ac~tplv-t2oaga2asx-image.image", "level": 4, "description": "", "followee_count": 21, "follower_count": 1750, "post_article_count": 5, "digg_article_count": 3937, "got_digg_count": 3907, "got_view_count": 80815, "post_shortmsg_count": 7, "digg_shortmsg_count": 11, "isfollowed": false, "favorable_author": 1, "power": 7314, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}, {"id": 2546492, "tag_id": "6809640361531539470", "tag_name": "Node.js", "color": "#e81864", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f16f548d25028a1fdd80.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234488, "mtime": 1631690352, "id_type": 9, "tag_alias": "", "post_article_count": 11514, "concern_user_count": 280711}, {"id": 2546498, "tag_id": "6809640369764958215", "tag_name": "Vue.js", "color": "#41B883", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/7b5c3eb591b671749fee.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234520, "mtime": 1631692660, "id_type": 9, "tag_alias": "", "post_article_count": 31256, "concern_user_count": 313520}, {"id": 2546519, "tag_id": "6809640398105870343", "tag_name": "JavaScript", "color": "#616161", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/5d70fd6af940df373834.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1435884803, "mtime": 1631692583, "id_type": 9, "tag_alias": "", "post_article_count": 67405, "concern_user_count": 398956}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}, {"id": 2546529, "tag_id": "6809640411473117197", "tag_name": "ECMAScript 6", "color": "#F46507", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/e384dbc6d1ab15f046cf.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1435971773, "mtime": 1631685869, "id_type": 9, "tag_alias": "", "post_article_count": 3420, "concern_user_count": 176180}], "user_interact": {"id": 6844903632488366088, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}, {"article_id": "7003606793775480862", "article_info": {"article_id": "7003606793775480862", "user_id": "70822370479901", "category_id": "6809637767543259144", "tag_ids": [6809640357354012685, 6809640407484334093], "visible_level": 0, "link_url": "", "cover_image": "https://p9-juejin.byteimg.com/tos-cn-i-k3u1fbpfcp/80766d5be0d4401da88435e47314b136~tplv-k3u1fbpfcp-watermark.image", "is_gfw": 0, "title": "【组件开发系列】骨架屏", "brief_content": "一期的开发中,我们把基础的常用的组件基本都完成了。二期计划将一些不太常用但是能提升交互体验组件纳入开发计划,比如骨架屏,比如步骤条等。组件开发系列第一篇,让我们一起来实现一个骨架屏组件的开发吧。", "is_english": 0, "is_original": 1, "user_index": 3.41902258270291, "original_type": 0, "original_author": "", "content": "", "ctime": "1630654402", "mtime": "1630905668", "rtime": "1630905668", "draft_id": "7003601517009715237", "view_count": 118, "collect_count": 1, "digg_count": 2, "comment_count": 0, "hot_index": 7, "is_hot": 0, "rank_index": 0.00965079, "status": 2, "verify_status": 1, "audit_status": 2, "mark_content": ""}, "author_user_info": {"user_id": "70822370479901", "user_name": "叶一一", "company": "", "job_title": "前端开发", "avatar_large": "https://sf3-ttcdn-tos.pstatp.com/img/user-avatar/c6c1a335a3b48adc43e011dd21bfdc60~300x300.image", "level": 1, "description": "苍生涂涂,天下缭燎,诸子百家,唯我纵横。", "followee_count": 5, "follower_count": 2, "post_article_count": 6, "digg_article_count": 8, "got_digg_count": 18, "got_view_count": 773, "post_shortmsg_count": 0, "digg_shortmsg_count": 0, "isfollowed": false, "favorable_author": 0, "power": 25, "study_point": 0, "university": {"university_id": "0", "name": "", "logo": ""}, "major": {"major_id": "0", "parent_id": "0", "name": ""}, "student_status": 0, "select_event_count": 0, "select_online_course_count": 0, "identity": 0, "is_select_annual": false, "select_annual_rank": 0, "annual_list_type": 0, "extraMap": {}, "is_logout": 0}, "category": {"category_id": "6809637767543259144", "category_name": "前端", "category_url": "frontend", "rank": 2, "back_ground": "https://lc-mhke0kuv.cn-n1.lcfile.com/8c95587526f346c0.png", "icon": "https://lc-mhke0kuv.cn-n1.lcfile.com/1c40f5eaba561e32.png", "ctime": 1457483942, "mtime": 1432503190, "show_type": 3, "item_type": 2, "promote_tag_cap": 4, "promote_priority": 2}, "tags": [{"id": 2546490, "tag_id": "6809640357354012685", "tag_name": "React.js", "color": "#61DAFB", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/f655215074250f10f8d4.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 0, "ctime": 1432234367, "mtime": 1631692935, "id_type": 9, "tag_alias": "", "post_article_count": 16999, "concern_user_count": 226420}, {"id": 2546526, "tag_id": "6809640407484334093", "tag_name": "前端", "color": "#60ADFF", "icon": "https://p1-jj.byteimg.com/tos-cn-i-t2oaga2asx/leancloud-assets/bac28828a49181c34110.png~tplv-t2oaga2asx-image.image", "back_ground": "", "show_navi": 1, "ctime": 1435971546, "mtime": 1631692835, "id_type": 9, "tag_alias": "", "post_article_count": 88827, "concern_user_count": 527704}], "user_interact": {"id": 7003606793775480862, "omitempty": 2, "user_id": 0, "is_digg": false, "is_follow": false, "is_collect": false}, "org": {"org_info": null, "org_user": null, "is_followed": false}, "req_id": "202109151602320102121621585D0053A3"}], "cursor": "eyJ2IjoiNzAwNzA0ODMwNjQzODE3Njc5OSIsImkiOjI0MH0=", "count": 6769, "has_more": true} | [
"[email protected]"
] | |
76bc65a6a3966eaf28d60e029208b8b96b2010f2 | 90a2d0bed5d9eeb6b56c7ac96cc5fbee79dc4c5e | /.history/string_format_20220425174809.py | 485acedc7e0afa5c104cd58b4822e854bba33fb0 | [] | no_license | KustomApe/dev | 2d495e22363707b15a22860a773dac6c463903ee | a936f5c3b0928eaa2efaf28c6be8cacc17c3ecb3 | refs/heads/master | 2023-04-28T11:20:03.056953 | 2023-04-07T17:43:40 | 2023-04-07T17:43:40 | 138,429,111 | 1 | 0 | null | 2023-04-25T19:26:09 | 2018-06-23T19:47:23 | Python | UTF-8 | Python | false | false | 254 | py | age = 20
year = 2022
next_age = 21
print('私の年齢は{}歳です。'.format(age))
print('今年は西暦{0}年です。私の年齢は{1}歳で、来年は{2}歳になります。'.format(year, age, next_age))
print('egg')
print('{0}'.format('egg')) | [
"[email protected]"
] | |
2800bc4bde551bf0062ded594820b2eccc44f3d9 | 5b492bf5a906141be9557e654f7f4e7181f4d8eb | /backend/placeNewOrder/serializers.py | 62a05d50a47a85685e7c7bf3fc9dfda57048e9a8 | [] | no_license | KaziMotiour/fast-courier | ac5dece7e7432e80614623dd3795e82d6e253cfa | 17c3cc26e0aa28467fccafa0cb2019aa0066ff63 | refs/heads/main | 2023-06-05T15:36:39.338321 | 2021-06-29T16:40:22 | 2021-06-29T16:40:22 | 361,289,665 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,122 | py | from rest_framework import serializers
from .models import Marchant, PlaceNewOrder
from django.contrib.auth.models import User
class UserSerializer(serializers.ModelSerializer):
class Meta:
model = User
fields = ['id', 'username', 'is_superuser', 'marchant']
class MarchentSerializer(serializers.ModelSerializer):
user=UserSerializer()
class Meta:
model = Marchant
fields = ['id', 'user', 'first_name', 'last_name']
class PlaceOrderSerializer(serializers.ModelSerializer):
class Meta:
model = PlaceNewOrder
fields = ['marchant', 'percel_name', 'percel_type', 'newInvoiceID', 'weight', 'weight_unit', 'cost', 'cod_charge', 'return_charge', 'total_cost', 'Location', 'return_cost', 'timestamp']
class PlaceOrderListSerializer(serializers.ModelSerializer):
marchant = MarchentSerializer()
class Meta:
model = PlaceNewOrder
fields = ['marchant', 'percel_name', 'percel_type', 'newInvoiceID', 'weight', 'weight_unit', 'cost', 'cod_charge', 'return_charge', 'total_cost', 'return_cost', 'Location', 'timestamp']
| [
"[email protected]"
] | |
be5831beb31cdbef71082dd0850cefd69de4a4f8 | 7473d931134c444de3dfe61875e5245e9a4ba319 | /anchore_engine/util/users.py | b03e892b462844d4bcd0710ce2e93da2d4f8e4c2 | [
"Apache-2.0"
] | permissive | anniyanvr/anchore-engine | 7eb693a8761e11f9d1f1f40b998d36c7cb76a36d | f5ffac25aea536016dd08734b4a2aa2746be1f1d | refs/heads/master | 2023-03-08T23:46:30.663379 | 2023-01-26T23:58:10 | 2023-01-26T23:58:10 | 193,227,499 | 0 | 0 | Apache-2.0 | 2023-02-26T10:35:37 | 2019-06-22T11:42:50 | Python | UTF-8 | Python | false | false | 736 | py | SYSTEM_USER_ID = "admin" # The system user is always user '0'.
def is_system_user(user_id):
return user_id == SYSTEM_USER_ID
def user_ids_to_search(obj):
"""
Returns an ordered list of user_ids to search for finding related resources for the given object (typically an image or package).
By strength of match, first element is the same user_id as the given object if the given object has a user_id and the second element of
the result is the system user id.
:param obj:
:return:
"""
user_ids = []
if hasattr(obj, "user_id"):
user_ids.append(obj.user_id)
if is_system_user(obj.user_id):
return user_ids
user_ids.append(SYSTEM_USER_ID)
return user_ids
| [
"[email protected]"
] | |
6d2ad5baa76e2da122729fe58778cce39934a4e1 | b836471d39ea80003d2d952550c9a1f082d77ae0 | /Numpy高级应用/venv/bin/easy_install | 188f045a188ccfdf08a185c76a092ad5899cfca4 | [] | no_license | JH95-ai/Python-for-data-analysis | 112ed0874f71f4ccd8cdd24b852a5751fde070a9 | b33fefce26ba55e8620e5a3598b644451456f29b | refs/heads/master | 2022-10-31T13:44:36.349833 | 2018-08-14T08:31:52 | 2018-08-14T08:31:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 446 | #!/root/PycharmProjects/Numpy高级应用/venv/bin/python -x
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install')()
)
| [
"[email protected]"
] | ||
ed042fa10383d90f3442befe36848687e127f1ec | f445450ac693b466ca20b42f1ac82071d32dd991 | /generated_tempdir_2019_09_15_163300/generated_part009094.py | fb9ecacb3753ce85b5c502758f6a73e94447253c | [] | no_license | Upabjojr/rubi_generated | 76e43cbafe70b4e1516fb761cabd9e5257691374 | cd35e9e51722b04fb159ada3d5811d62a423e429 | refs/heads/master | 2020-07-25T17:26:19.227918 | 2019-09-15T15:41:48 | 2019-09-15T15:41:48 | 208,357,412 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,304 | py | from sympy.abc import *
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy import *
from matchpy.utils import VariableWithCount
from collections import deque
from multiset import Multiset
from sympy.integrals.rubi.constraints import *
from sympy.integrals.rubi.utility_function import *
from sympy.integrals.rubi.rules.miscellaneous_integration import *
from sympy import *
class CommutativeMatcher19127(CommutativeMatcher):
_instance = None
patterns = {
0: (0, Multiset({}), [
(VariableWithCount('i3.1.2.2.2.1.0', 1, 1, None), Mul),
(VariableWithCount('i3.1.2.2.2.1.0_1', 1, 1, S(1)), Mul)
])
}
subjects = {}
subjects_by_id = {}
bipartite = BipartiteGraph()
associative = Mul
max_optional_count = 1
anonymous_patterns = set()
def __init__(self):
self.add_subject(None)
@staticmethod
def get():
if CommutativeMatcher19127._instance is None:
CommutativeMatcher19127._instance = CommutativeMatcher19127()
return CommutativeMatcher19127._instance
@staticmethod
def get_match_iter(subject):
subjects = deque([subject]) if subject is not None else deque()
subst0 = Substitution()
# State 19126
return
yield
from collections import deque | [
"[email protected]"
] | |
739c09cc81197c1da29e787e71d299a75eafd6d3 | f931249f3766bd871eede76a950484701915c32d | /collective_decision/urls.py | d0ae17e7860ea982b2753b39b340e54169a7963a | [] | no_license | cleliofavoccia/Share | aa509c9cfa1aa3789237b411b2b94d952d848322 | cf0b982a6df2b8b4318d12d344ef0827394eedfd | refs/heads/main | 2023-07-11T08:29:59.016252 | 2021-08-09T10:13:53 | 2021-08-09T10:13:53 | 373,621,787 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 860 | py | """Manage collective_decision app urls"""
from django.urls import path
from . import views
app_name = 'collective_decision'
urlpatterns = [
path('delete_vote_group/',
views.GroupMemberDeleteVoteGroup.as_view(),
name='delete_vote_group'),
path('against_delete_vote_group/',
views.GroupMemberAgainstDeleteVoteGroup.as_view(),
name='against_delete_vote_group'),
path('modify_vote_group/',
views.GroupMemberModifyVoteGroup.as_view(),
name='modify_vote_group'),
path('against_modify_vote_group/',
views.GroupMemberAgainstModifyVoteGroup.as_view(),
name='against_modify_vote_group'),
path('vote/<int:pk>', views.GroupVoteView.as_view(), name='vote'),
path('estimation/<int:pk>/',
views.CostEstimationView.as_view(),
name='estimation'
),
]
| [
"[email protected]"
] | |
e740f200c941b20745ea18ccdaf3784e917aeaa5 | 197420c1f28ccb98059888dff214c9fd7226e743 | /happy_pythoning_cource/Part_11/11.1.2.List_sums_and_multiply/11.1.2.List_sums_and_multiply.py | 81079913b5e30bc778983e0b1cbae3246c890f63 | [] | no_license | Vovanuch/python-basics-1 | fc10b6f745defff31364b66c65a704a9cf05d076 | a29affec12e8b80a1d3beda3a50cde4867b1dee2 | refs/heads/master | 2023-07-06T17:10:46.341121 | 2021-08-06T05:38:19 | 2021-08-06T05:38:19 | 267,504,364 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 438 | py | '''
Дополните приведенный код, используя операторы конкатенации (+) и умножения списка на число (*), так чтобы он вывел список:
[1, 2, 3, 1, 2, 3, 6, 6, 6, 6, 6, 6, 6, 6, 6, 7, 8, 9, 10, 11, 12, 13].
'''
numbers1 = [1, 2, 3]
numbers2 = [6]
numbers3 = [7, 8, 9, 10, 11, 12, 13]
num4 = numbers1 * 2 + numbers2 * 9 + numbers3
print(num4)
| [
"[email protected]"
] | |
4db3cc4e9548add764c3db6f8713931066ec69c5 | 6b201605227f11880c1d32c9cad300f6e29ff4ae | /Python/Buch_Python3_Das_umfassende_Praxisbuch/Kapitel_07_Sequenzen_Mengen_und_Generatoren/05_quicksort_algorithm.py | 6697b4b4650a83138f8341e1252339a9c8352bea | [
"MIT"
] | permissive | Apop85/Scripts | e2e8e6ed0c0da08a4d7c895aa366c9305197137b | 467c34e59f2708f2d2f8bb369c36fd782d365e8b | refs/heads/master | 2022-12-08T08:11:04.566376 | 2022-05-13T13:17:04 | 2022-05-13T13:17:04 | 164,251,836 | 0 | 0 | MIT | 2022-12-08T01:50:22 | 2019-01-05T21:16:45 | Python | UTF-8 | Python | false | false | 1,223 | py | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
###
# File: 05_quicksort_algorithm.py
# Project: Kapitel_07_Sequenzen_Mengen_und_Generatoren
# Created Date: Sunday 03.03.2019, 20:10
# Author: Apop85
# -----
# Last Modified: Monday 04.03.2019, 12:15
# -----
# Copyright (c) 2019 Apop85
# This software is published under the MIT license.
# Check http://www.opensource.org/licenses/MIT for further informations
# -----
# Description: Chapter 7. Page 215. Quicksort-algorithm using recursive function.
###
def quick_sort(item_list):
if len(item_list) > 0:
print('Sorting...', item_list)
if len(item_list) <= 1:
return item_list
else:
# Split the list into 3 parts. First letter + any value which is greater + any value which is smaller
# and repeat this pattern until only one item remains in the list and return them in following pattern:
# any value which is smaller + first value + any value which is greater.
return quick_sort([x for x in item_list[1:] if x < item_list[0]]) + [item_list[0]] + quick_sort([y for y in item_list[1:] if y > item_list[0]])
unsorted_list=['m','g','w','h','l','z','b','c','y']
sorted_list=quick_sort(unsorted_list)
print(sorted_list) | [
"[email protected]"
] | |
ea0928d63adaf899e48c7f83a149f3dd146c11b9 | 70cdf0741a22c678401a306229003bf036ffe5a6 | /ocbind/interfaces/interface/subinterfaces/subinterface/ipv6/addresses/address/__init__.py | cf18475f2526447b2b8d7b88db545f998b85e6ed | [] | no_license | zsblevins/nanog81-hackathon | 5001e034339d6b0c6452ae2474f06916bcd715cf | 1b64fd207dd69837f947094fbd6d6c1cea3a1070 | refs/heads/main | 2023-03-03T09:39:28.460000 | 2021-02-15T13:41:38 | 2021-02-15T13:41:38 | 336,698,856 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,820 | py | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import config
from . import state
from . import vrrp
class address(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-interfaces - based on the path /interfaces/interface/subinterfaces/subinterface/ipv6/addresses/address. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: The list of configured IPv6 addresses on the interface.
"""
__slots__ = ('_path_helper', '_extmethods', '__ip','__config','__state','__vrrp',)
_yang_name = 'address'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__ip = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)
self.__vrrp = YANGDynClass(base=vrrp.vrrp, is_container='container', yang_name="vrrp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return ['interfaces', 'interface', 'subinterfaces', 'subinterface', 'ipv6', 'addresses', 'address']
def _get_ip(self):
"""
Getter method for ip, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv6/addresses/address/ip (leafref)
YANG Description: References the configured IP address
"""
return self.__ip
def _set_ip(self, v, load=False):
"""
Setter method for ip, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv6/addresses/address/ip (leafref)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip() directly.
YANG Description: References the configured IP address
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=six.text_type, is_leaf=True, yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """ip must be of a type compatible with leafref""",
'defined-type': "leafref",
'generated-type': """YANGDynClass(base=six.text_type, is_leaf=True, yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)""",
})
self.__ip = t
if hasattr(self, '_set'):
self._set()
def _unset_ip(self):
self.__ip = YANGDynClass(base=six.text_type, is_leaf=True, yang_name="ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, is_keyval=True, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='leafref', is_config=True)
def _get_config(self):
"""
Getter method for config, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv6/addresses/address/config (container)
YANG Description: Configuration data for each IPv6 address on
the interface
"""
return self.__config
def _set_config(self, v, load=False):
"""
Setter method for config, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv6/addresses/address/config (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_config is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_config() directly.
YANG Description: Configuration data for each IPv6 address on
the interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """config must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)""",
})
self.__config = t
if hasattr(self, '_set'):
self._set()
def _unset_config(self):
self.__config = YANGDynClass(base=config.config, is_container='container', yang_name="config", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv6/addresses/address/state (container)
YANG Description: State data for each IPv6 address on the
interface
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv6/addresses/address/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State data for each IPv6 address on the
interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """state must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)""",
})
self.__state = t
if hasattr(self, '_set'):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)
def _get_vrrp(self):
"""
Getter method for vrrp, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv6/addresses/address/vrrp (container)
YANG Description: Enclosing container for VRRP groups handled by this
IP interface
"""
return self.__vrrp
def _set_vrrp(self, v, load=False):
"""
Setter method for vrrp, mapped from YANG variable /interfaces/interface/subinterfaces/subinterface/ipv6/addresses/address/vrrp (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_vrrp is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vrrp() directly.
YANG Description: Enclosing container for VRRP groups handled by this
IP interface
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=vrrp.vrrp, is_container='container', yang_name="vrrp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vrrp must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=vrrp.vrrp, is_container='container', yang_name="vrrp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)""",
})
self.__vrrp = t
if hasattr(self, '_set'):
self._set()
def _unset_vrrp(self):
self.__vrrp = YANGDynClass(base=vrrp.vrrp, is_container='container', yang_name="vrrp", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/interfaces/ip', defining_module='openconfig-if-ip', yang_type='container', is_config=True)
ip = __builtin__.property(_get_ip, _set_ip)
config = __builtin__.property(_get_config, _set_config)
state = __builtin__.property(_get_state, _set_state)
vrrp = __builtin__.property(_get_vrrp, _set_vrrp)
_pyangbind_elements = OrderedDict([('ip', ip), ('config', config), ('state', state), ('vrrp', vrrp), ])
| [
"[email protected]"
] | |
9d1fb7b4ce24f9fcb9f66e6f268e7d001aeb308e | 2d9cedf0ed36dadca1ca2f696290c8261ef7851f | /000010/DataJoint/DJ-NWB-Li-2015b/scripts/ingest.py | 4975eba64deab64af06445f9c39c8092f1871b39 | [
"Apache-2.0"
] | permissive | dandi/example-notebooks | 4365285697d41fd383110b5af5c30860d72fad22 | be3a8b345dfa9c0145692a30087647bc47f865e8 | refs/heads/master | 2023-08-30T20:41:41.323355 | 2023-08-16T21:21:12 | 2023-08-16T21:21:12 | 231,629,025 | 5 | 8 | Apache-2.0 | 2023-09-12T19:53:10 | 2020-01-03T16:55:02 | Jupyter Notebook | UTF-8 | Python | false | false | 220 | py | from pipeline.ingest import ingest_lookup, ingest_meta_data, ingest_data
from pipeline import imaging
ingest_meta_data.main()
ingest_data.main()
imaging.RoiAnalyses.populate(suppress_errors=True, display_progress=True)
| [
"[email protected]"
] | |
0d511d8f1acf5ab27e7f677565d0a4424a91fcd7 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_209/287.py | dce3bd9df0ba24f4960a08f60546dcce19c18657 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,198 | py | import sys
import math
def pancake_value(top, side, max_top):
if top > max_top:
return top - max_top + side
else:
return side
for i in range(0, int(sys.stdin.readline())):
inputs = sys.stdin.readline()[:-1].split(" ")
n = int(inputs[0])
k = int(inputs[1])
pancakes = []
r = []
h = []
for j in range(0, n):
inputs = sys.stdin.readline()[:-1].split(" ")
r.append(int(inputs[0]))
h.append(int(inputs[1]))
pancakes.append( (r[j], h[j]) )
pancakes.sort(reverse=True)
sa_side = []
sa_top = []
for p in pancakes:
sa_side.append(2 * math.pi * p[0] * p[1])
sa_top.append(math.pi * p[0] ** 2)
sa = 0
used = []
max_top_used = 0
while len(used) < k:
best_value = -1
for j in range(0, n):
if j in used:
continue
if best_value == -1:
best_value = j
continue
if pancake_value(sa_top[j], sa_side[j], max_top_used) > pancake_value(sa_top[best_value], sa_side[best_value], max_top_used):
best_value = j
sa += pancake_value(sa_top[best_value], sa_side[best_value], max_top_used)
if sa_top[best_value] > max_top_used:
max_top_used = sa_top[best_value]
used.append(best_value)
print("Case #" + str(i + 1) + ": " + "{0:.15f}".format(sa))
| [
"[email protected]"
] | |
253dbc39b3361b2a7b7f30d0957a2a6ba6a396d3 | c97cac88118ebd0814dec123e164dc74fef5773e | /omero_python_libs/omero_model_SessionI.py | 0d0fa544752b970fd9d97de78702fe36ca88adaf | [
"Apache-2.0"
] | permissive | nseyedtalebi/django-uwsgi-nginx | 285a7ed2c66b0ca2f25dd4fc79018f8deac1472d | 3163e9c7a88ed1298312d6a69a0d9eaf2f007e97 | refs/heads/master | 2020-07-08T22:01:42.082634 | 2019-08-22T16:37:18 | 2019-08-22T16:37:18 | 203,791,310 | 0 | 0 | null | 2019-08-22T12:33:28 | 2019-08-22T12:33:28 | null | UTF-8 | Python | false | false | 25,691 | py | """
/*
** Generated by blitz/resources/templates/combined.vm
**
** Copyright 2007, 2008 Glencoe Software, Inc. All rights reserved.
** Use is subject to license terms supplied in LICENSE.txt
**
*/
"""
import Ice
import IceImport
import omero
IceImport.load("omero_model_DetailsI")
IceImport.load("omero_model_Session_ice")
from omero.rtypes import rlong
from collections import namedtuple
_omero = Ice.openModule("omero")
_omero_model = Ice.openModule("omero.model")
__name__ = "omero.model"
class SessionI(_omero_model.Session):
# Property Metadata
_field_info_data = namedtuple("FieldData", ["wrapper", "nullable"])
_field_info_type = namedtuple("FieldInfo", [
"node",
"uuid",
"owner",
"sudoer",
"timeToIdle",
"timeToLive",
"started",
"closed",
"message",
"defaultEventType",
"userAgent",
"userIP",
"events",
"annotationLinks",
"details",
])
_field_info = _field_info_type(
node=_field_info_data(wrapper=omero.proxy_to_instance, nullable=False),
uuid=_field_info_data(wrapper=omero.rtypes.rstring, nullable=False),
owner=_field_info_data(wrapper=omero.proxy_to_instance, nullable=False),
sudoer=_field_info_data(wrapper=omero.proxy_to_instance, nullable=True),
timeToIdle=_field_info_data(wrapper=omero.rtypes.rlong, nullable=False),
timeToLive=_field_info_data(wrapper=omero.rtypes.rlong, nullable=False),
started=_field_info_data(wrapper=omero.rtypes.rtime, nullable=False),
closed=_field_info_data(wrapper=omero.rtypes.rtime, nullable=True),
message=_field_info_data(wrapper=omero.rtypes.rstring, nullable=True),
defaultEventType=_field_info_data(wrapper=omero.rtypes.rstring, nullable=False),
userAgent=_field_info_data(wrapper=omero.rtypes.rstring, nullable=True),
userIP=_field_info_data(wrapper=omero.rtypes.rstring, nullable=True),
events=_field_info_data(wrapper=omero.proxy_to_instance, nullable=False),
annotationLinks=_field_info_data(wrapper=omero.proxy_to_instance, nullable=True),
details=_field_info_data(wrapper=omero.proxy_to_instance, nullable=True),
) # end _field_info
NODE = "ome.model.meta.Session_node"
UUID = "ome.model.meta.Session_uuid"
OWNER = "ome.model.meta.Session_owner"
SUDOER = "ome.model.meta.Session_sudoer"
TIMETOIDLE = "ome.model.meta.Session_timeToIdle"
TIMETOLIVE = "ome.model.meta.Session_timeToLive"
STARTED = "ome.model.meta.Session_started"
CLOSED = "ome.model.meta.Session_closed"
MESSAGE = "ome.model.meta.Session_message"
DEFAULTEVENTTYPE = "ome.model.meta.Session_defaultEventType"
USERAGENT = "ome.model.meta.Session_userAgent"
USERIP = "ome.model.meta.Session_userIP"
EVENTS = "ome.model.meta.Session_events"
ANNOTATIONLINKS = "ome.model.meta.Session_annotationLinks"
DETAILS = "ome.model.meta.Session_details"
def errorIfUnloaded(self):
if not self._loaded:
raise _omero.UnloadedEntityException("Object unloaded:"+str(self))
def throwNullCollectionException(self,propertyName):
raise _omero.UnloadedEntityException(""+
"Error updating collection:" + propertyName +"\n"+
"Collection is currently null. This can be seen\n" +
"by testing \""+ propertyName +"Loaded\". This implies\n"+
"that this collection was unloaded. Please refresh this object\n"+
"in order to update this collection.\n")
def _toggleCollectionsLoaded(self, load):
if load:
self._eventsSeq = []
self._eventsLoaded = True;
else:
self._eventsSeq = []
self._eventsLoaded = False;
if load:
self._annotationLinksSeq = []
self._annotationLinksLoaded = True;
else:
self._annotationLinksSeq = []
self._annotationLinksLoaded = False;
pass
def __init__(self, id=None, loaded=None):
super(SessionI, self).__init__()
if id is not None and isinstance(id, (str, unicode)) and ":" in id:
parts = id.split(":")
if len(parts) != 2:
raise Exception("Invalid proxy string: %s", id)
if parts[0] != self.__class__.__name__ and \
parts[0]+"I" != self.__class__.__name__:
raise Exception("Proxy class mismatch: %s<>%s" %
(self.__class__.__name__, parts[0]))
self._id = rlong(parts[1])
if loaded is None:
# If no loadedness was requested with
# a proxy string, then assume False.
loaded = False
else:
# Relying on omero.rtypes.rlong's error-handling
self._id = rlong(id)
if loaded is None:
loaded = True # Assume true as previously
self._loaded = loaded
if self._loaded:
self._details = _omero_model.DetailsI()
self._toggleCollectionsLoaded(True)
def unload(self, current = None):
self._loaded = False
self.unloadNode( )
self.unloadUuid( )
self.unloadOwner( )
self.unloadSudoer( )
self.unloadTimeToIdle( )
self.unloadTimeToLive( )
self.unloadStarted( )
self.unloadClosed( )
self.unloadMessage( )
self.unloadDefaultEventType( )
self.unloadUserAgent( )
self.unloadUserIP( )
self.unloadEvents( )
self.unloadAnnotationLinks( )
self.unloadDetails( )
def isLoaded(self, current = None):
return self._loaded
def unloadCollections(self, current = None):
self._toggleCollectionsLoaded( False )
def isGlobal(self, current = None):
return True ;
def isMutable(self, current = None):
return True ;
def isAnnotated(self, current = None):
return True ;
def isLink(self, current = None):
return False ;
def shallowCopy(self, current = None):
if not self._loaded: return self.proxy()
copy = SessionI()
copy._id = self._id;
copy._version = self._version;
copy._details = None # Unloading for the moment.
raise omero.ClientError("NYI")
def proxy(self, current = None):
if self._id is None: raise omero.ClientError("Proxies require an id")
return SessionI( self._id.getValue(), False )
def getDetails(self, current = None):
self.errorIfUnloaded()
return self._details
def unloadDetails(self, current = None):
self._details = None
def getId(self, current = None):
return self._id
def setId(self, _id, current = None):
self._id = _id
def checkUnloadedProperty(self, value, loadedField):
if value == None:
self.__dict__[loadedField] = False
else:
self.__dict__[loadedField] = True
def getVersion(self, current = None):
self.errorIfUnloaded()
return self._version
def setVersion(self, version, current = None):
self.errorIfUnloaded()
self._version = version
def unloadNode(self, ):
self._nodeLoaded = False
self._node = None;
def getNode(self, current = None):
self.errorIfUnloaded()
return self._node
def setNode(self, _node, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.node.wrapper is not None:
if _node is not None:
_node = self._field_info.node.wrapper(_node)
self._node = _node
pass
def unloadUuid(self, ):
self._uuidLoaded = False
self._uuid = None;
def getUuid(self, current = None):
self.errorIfUnloaded()
return self._uuid
def setUuid(self, _uuid, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.uuid.wrapper is not None:
if _uuid is not None:
_uuid = self._field_info.uuid.wrapper(_uuid)
self._uuid = _uuid
pass
def unloadOwner(self, ):
self._ownerLoaded = False
self._owner = None;
def getOwner(self, current = None):
self.errorIfUnloaded()
return self._owner
def setOwner(self, _owner, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.owner.wrapper is not None:
if _owner is not None:
_owner = self._field_info.owner.wrapper(_owner)
self._owner = _owner
pass
def unloadSudoer(self, ):
self._sudoerLoaded = False
self._sudoer = None;
def getSudoer(self, current = None):
self.errorIfUnloaded()
return self._sudoer
def setSudoer(self, _sudoer, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.sudoer.wrapper is not None:
if _sudoer is not None:
_sudoer = self._field_info.sudoer.wrapper(_sudoer)
self._sudoer = _sudoer
pass
def unloadTimeToIdle(self, ):
self._timeToIdleLoaded = False
self._timeToIdle = None;
def getTimeToIdle(self, current = None):
self.errorIfUnloaded()
return self._timeToIdle
def setTimeToIdle(self, _timeToIdle, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.timeToIdle.wrapper is not None:
if _timeToIdle is not None:
_timeToIdle = self._field_info.timeToIdle.wrapper(_timeToIdle)
self._timeToIdle = _timeToIdle
pass
def unloadTimeToLive(self, ):
self._timeToLiveLoaded = False
self._timeToLive = None;
def getTimeToLive(self, current = None):
self.errorIfUnloaded()
return self._timeToLive
def setTimeToLive(self, _timeToLive, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.timeToLive.wrapper is not None:
if _timeToLive is not None:
_timeToLive = self._field_info.timeToLive.wrapper(_timeToLive)
self._timeToLive = _timeToLive
pass
def unloadStarted(self, ):
self._startedLoaded = False
self._started = None;
def getStarted(self, current = None):
self.errorIfUnloaded()
return self._started
def setStarted(self, _started, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.started.wrapper is not None:
if _started is not None:
_started = self._field_info.started.wrapper(_started)
self._started = _started
pass
def unloadClosed(self, ):
self._closedLoaded = False
self._closed = None;
def getClosed(self, current = None):
self.errorIfUnloaded()
return self._closed
def setClosed(self, _closed, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.closed.wrapper is not None:
if _closed is not None:
_closed = self._field_info.closed.wrapper(_closed)
self._closed = _closed
pass
def unloadMessage(self, ):
self._messageLoaded = False
self._message = None;
def getMessage(self, current = None):
self.errorIfUnloaded()
return self._message
def setMessage(self, _message, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.message.wrapper is not None:
if _message is not None:
_message = self._field_info.message.wrapper(_message)
self._message = _message
pass
def unloadDefaultEventType(self, ):
self._defaultEventTypeLoaded = False
self._defaultEventType = None;
def getDefaultEventType(self, current = None):
self.errorIfUnloaded()
return self._defaultEventType
def setDefaultEventType(self, _defaultEventType, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.defaultEventType.wrapper is not None:
if _defaultEventType is not None:
_defaultEventType = self._field_info.defaultEventType.wrapper(_defaultEventType)
self._defaultEventType = _defaultEventType
pass
def unloadUserAgent(self, ):
self._userAgentLoaded = False
self._userAgent = None;
def getUserAgent(self, current = None):
self.errorIfUnloaded()
return self._userAgent
def setUserAgent(self, _userAgent, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.userAgent.wrapper is not None:
if _userAgent is not None:
_userAgent = self._field_info.userAgent.wrapper(_userAgent)
self._userAgent = _userAgent
pass
def unloadUserIP(self, ):
self._userIPLoaded = False
self._userIP = None;
def getUserIP(self, current = None):
self.errorIfUnloaded()
return self._userIP
def setUserIP(self, _userIP, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.userIP.wrapper is not None:
if _userIP is not None:
_userIP = self._field_info.userIP.wrapper(_userIP)
self._userIP = _userIP
pass
def unloadEvents(self, current = None):
self._eventsLoaded = False
self._eventsSeq = None;
def _getEvents(self, current = None):
self.errorIfUnloaded()
return self._eventsSeq
def _setEvents(self, _events, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.eventsSeq.wrapper is not None:
if _events is not None:
_events = self._field_info.eventsSeq.wrapper(_events)
self._eventsSeq = _events
self.checkUnloadedProperty(_events,'eventsLoaded')
def isEventsLoaded(self):
return self._eventsLoaded
def sizeOfEvents(self, current = None):
self.errorIfUnloaded()
if not self._eventsLoaded: return -1
return len(self._eventsSeq)
def copyEvents(self, current = None):
self.errorIfUnloaded()
if not self._eventsLoaded: self.throwNullCollectionException("eventsSeq")
return list(self._eventsSeq)
def iterateEvents(self):
self.errorIfUnloaded()
if not self._eventsLoaded: self.throwNullCollectionException("eventsSeq")
return iter(self._eventsSeq)
def addEvent(self, target, current = None):
self.errorIfUnloaded()
if not self._eventsLoaded: self.throwNullCollectionException("eventsSeq")
self._eventsSeq.append( target );
target.setSession( self )
def addAllEventSet(self, targets, current = None):
self.errorIfUnloaded()
if not self._eventsLoaded: self.throwNullCollectionException("eventsSeq")
self._eventsSeq.extend( targets )
for target in targets:
target.setSession( self )
def removeEvent(self, target, current = None):
self.errorIfUnloaded()
if not self._eventsLoaded: self.throwNullCollectionException("eventsSeq")
self._eventsSeq.remove( target )
target.setSession( None )
def removeAllEventSet(self, targets, current = None):
self.errorIfUnloaded()
if not self._eventsLoaded: self.throwNullCollectionException("eventsSeq")
for elt in targets:
elt.setSession( None )
self._eventsSeq.remove( elt )
def clearEvents(self, current = None):
self.errorIfUnloaded()
if not self._eventsLoaded: self.throwNullCollectionException("eventsSeq")
for elt in self._eventsSeq:
elt.setSession( None )
self._eventsSeq = list()
def reloadEvents(self, toCopy, current = None):
self.errorIfUnloaded()
if self._eventsLoaded:
raise omero.ClientError("Cannot reload active collection: eventsSeq")
if not toCopy:
raise omero.ClientError("Argument cannot be null")
if toCopy.getId().getValue() != self.getId().getValue():
raise omero.ClientError("Argument must have the same id as this instance")
if toCopy.getDetails().getUpdateEvent().getId().getValue() < self.getDetails().getUpdateEvent().getId().getValue():
raise omero.ClientError("Argument may not be older than this instance")
copy = toCopy.copyEvents() # May also throw
for elt in copy:
elt.setSession( self )
self._eventsSeq = copy
toCopy.unloadEvents()
self._eventsLoaded = True
def unloadAnnotationLinks(self, current = None):
self._annotationLinksLoaded = False
self._annotationLinksSeq = None;
def _getAnnotationLinks(self, current = None):
self.errorIfUnloaded()
return self._annotationLinksSeq
def _setAnnotationLinks(self, _annotationLinks, current = None, wrap=False):
self.errorIfUnloaded()
if wrap and self._field_info.annotationLinksSeq.wrapper is not None:
if _annotationLinks is not None:
_annotationLinks = self._field_info.annotationLinksSeq.wrapper(_annotationLinks)
self._annotationLinksSeq = _annotationLinks
self.checkUnloadedProperty(_annotationLinks,'annotationLinksLoaded')
def isAnnotationLinksLoaded(self):
return self._annotationLinksLoaded
def sizeOfAnnotationLinks(self, current = None):
self.errorIfUnloaded()
if not self._annotationLinksLoaded: return -1
return len(self._annotationLinksSeq)
def copyAnnotationLinks(self, current = None):
self.errorIfUnloaded()
if not self._annotationLinksLoaded: self.throwNullCollectionException("annotationLinksSeq")
return list(self._annotationLinksSeq)
def iterateAnnotationLinks(self):
self.errorIfUnloaded()
if not self._annotationLinksLoaded: self.throwNullCollectionException("annotationLinksSeq")
return iter(self._annotationLinksSeq)
def addSessionAnnotationLink(self, target, current = None):
self.errorIfUnloaded()
if not self._annotationLinksLoaded: self.throwNullCollectionException("annotationLinksSeq")
self._annotationLinksSeq.append( target );
target.setParent( self )
def addAllSessionAnnotationLinkSet(self, targets, current = None):
self.errorIfUnloaded()
if not self._annotationLinksLoaded: self.throwNullCollectionException("annotationLinksSeq")
self._annotationLinksSeq.extend( targets )
for target in targets:
target.setParent( self )
def removeSessionAnnotationLink(self, target, current = None):
self.errorIfUnloaded()
if not self._annotationLinksLoaded: self.throwNullCollectionException("annotationLinksSeq")
self._annotationLinksSeq.remove( target )
target.setParent( None )
def removeAllSessionAnnotationLinkSet(self, targets, current = None):
self.errorIfUnloaded()
if not self._annotationLinksLoaded: self.throwNullCollectionException("annotationLinksSeq")
for elt in targets:
elt.setParent( None )
self._annotationLinksSeq.remove( elt )
def clearAnnotationLinks(self, current = None):
self.errorIfUnloaded()
if not self._annotationLinksLoaded: self.throwNullCollectionException("annotationLinksSeq")
for elt in self._annotationLinksSeq:
elt.setParent( None )
self._annotationLinksSeq = list()
def reloadAnnotationLinks(self, toCopy, current = None):
self.errorIfUnloaded()
if self._annotationLinksLoaded:
raise omero.ClientError("Cannot reload active collection: annotationLinksSeq")
if not toCopy:
raise omero.ClientError("Argument cannot be null")
if toCopy.getId().getValue() != self.getId().getValue():
raise omero.ClientError("Argument must have the same id as this instance")
if toCopy.getDetails().getUpdateEvent().getId().getValue() < self.getDetails().getUpdateEvent().getId().getValue():
raise omero.ClientError("Argument may not be older than this instance")
copy = toCopy.copyAnnotationLinks() # May also throw
for elt in copy:
elt.setParent( self )
self._annotationLinksSeq = copy
toCopy.unloadAnnotationLinks()
self._annotationLinksLoaded = True
def getAnnotationLinksCountPerOwner(self, current = None):
return self._annotationLinksCountPerOwner
def linkAnnotation(self, addition, current = None):
self.errorIfUnloaded()
if not self._annotationLinksLoaded: self.throwNullCollectionException("annotationLinksSeq")
link = _omero_model.SessionAnnotationLinkI()
link.link( self, addition );
self.addSessionAnnotationLinkToBoth( link, True )
return link
def addSessionAnnotationLinkToBoth(self, link, bothSides):
self.errorIfUnloaded()
if not self._annotationLinksLoaded: self.throwNullCollectionException("annotationLinksSeq")
self._annotationLinksSeq.append( link )
def findSessionAnnotationLink(self, removal, current = None):
self.errorIfUnloaded()
if not self._annotationLinksLoaded: self.throwNullCollectionException("annotationLinksSeq")
result = list()
for link in self._annotationLinksSeq:
if link.getChild() == removal: result.append(link)
return result
def unlinkAnnotation(self, removal, current = None):
self.errorIfUnloaded()
if not self._annotationLinksLoaded: self.throwNullCollectionException("annotationLinksSeq")
toRemove = self.findSessionAnnotationLink(removal)
for next in toRemove:
self.removeSessionAnnotationLinkFromBoth( next, True )
def removeSessionAnnotationLinkFromBoth(self, link, bothSides, current = None):
self.errorIfUnloaded()
if not self._annotationLinksLoaded: self.throwNullCollectionException("annotationLinksSeq")
self._annotationLinksSeq.remove( link )
def linkedAnnotationList(self, current = None):
self.errorIfUnloaded()
if not self.annotationLinksLoaded: self.throwNullCollectionException("AnnotationLinks")
linked = []
for link in self._annotationLinksSeq:
linked.append( link.getChild() )
return linked
def ice_postUnmarshal(self):
"""
Provides additional initialization once all data loaded
"""
pass # Currently unused
def ice_preMarshal(self):
"""
Provides additional validation before data is sent
"""
pass # Currently unused
def __getattr__(self, name):
import __builtin__
"""
Reroutes all access to object.field through object.getField() or object.isField()
"""
if "_" in name: # Ice disallows underscores, so these should be treated normally.
return object.__getattribute__(self, name)
field = "_" + name
capitalized = name[0].capitalize() + name[1:]
getter = "get" + capitalized
questn = "is" + capitalized
try:
self.__dict__[field]
if hasattr(self, getter):
method = getattr(self, getter)
return method()
elif hasattr(self, questn):
method = getattr(self, questn)
return method()
except:
pass
raise AttributeError("'%s' object has no attribute '%s' or '%s'" % (self.__class__.__name__, getter, questn))
def __setattr__(self, name, value):
"""
Reroutes all access to object.field through object.getField(), with the caveat
that all sets on variables starting with "_" are permitted directly.
"""
if name.startswith("_"):
self.__dict__[name] = value
return
else:
field = "_" + name
setter = "set" + name[0].capitalize() + name[1:]
if hasattr(self, field) and hasattr(self, setter):
method = getattr(self, setter)
return method(value)
raise AttributeError("'%s' object has no attribute '%s'" % (self.__class__.__name__, setter))
_omero_model.SessionI = SessionI
| [
"[email protected]"
] | |
b60f7628b7edda141c54a4da61c18b9878fbee0a | b4b140bb107baebc50b310f1d79fdbe2a0382708 | /proj/lib/python3.7/site-packages/sqlalchemy/testing/plugin/plugin_base.py | 6581195dff252ee2cd8047c1abbf30e020fe88ea | [
"MIT"
] | permissive | shahedex/horizon_backend_flask | f642b99bf019050ff72896e455a85bd3f483cf39 | 7dce74fce0afdfa1cb6481e1d765e01a8ad3c5c4 | refs/heads/master | 2022-10-08T17:27:10.299450 | 2019-10-06T16:57:42 | 2019-10-06T16:57:42 | 207,140,462 | 0 | 1 | MIT | 2022-09-16T18:09:06 | 2019-09-08T16:26:35 | Python | UTF-8 | Python | false | false | 17,514 | py | # plugin/plugin_base.py
# Copyright (C) 2005-2016 the SQLAlchemy authors and contributors
# <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
"""Testing extensions.
this module is designed to work as a testing-framework-agnostic library,
so that we can continue to support nose and also begin adding new
functionality via py.test.
"""
from __future__ import absolute_import
import sys
import re
py3k = sys.version_info >= (3, 0)
if py3k:
import configparser
else:
import ConfigParser as configparser
# late imports
fixtures = None
engines = None
exclusions = None
warnings = None
profiling = None
assertions = None
requirements = None
config = None
testing = None
util = None
file_config = None
logging = None
include_tags = set()
exclude_tags = set()
options = None
def setup_options(make_option):
make_option("--log-info", action="callback", type="string", callback=_log,
help="turn on info logging for <LOG> (multiple OK)")
make_option("--log-debug", action="callback",
type="string", callback=_log,
help="turn on debug logging for <LOG> (multiple OK)")
make_option("--db", action="append", type="string", dest="db",
help="Use prefab database uri. Multiple OK, "
"first one is run by default.")
make_option('--dbs', action='callback', callback=_list_dbs,
help="List available prefab dbs")
make_option("--dburi", action="append", type="string", dest="dburi",
help="Database uri. Multiple OK, "
"first one is run by default.")
make_option("--dropfirst", action="store_true", dest="dropfirst",
help="Drop all tables in the target database first")
make_option("--backend-only", action="store_true", dest="backend_only",
help="Run only tests marked with __backend__")
make_option("--low-connections", action="store_true",
dest="low_connections",
help="Use a low number of distinct connections - "
"i.e. for Oracle TNS")
make_option("--write-idents", type="string", dest="write_idents",
help="write out generated follower idents to <file>, "
"when -n<num> is used")
make_option("--reversetop", action="store_true",
dest="reversetop", default=False,
help="Use a random-ordering set implementation in the ORM "
"(helps reveal dependency issues)")
make_option("--requirements", action="callback", type="string",
callback=_requirements_opt,
help="requirements class for testing, overrides setup.cfg")
make_option("--with-cdecimal", action="store_true",
dest="cdecimal", default=False,
help="Monkeypatch the cdecimal library into Python 'decimal' "
"for all tests")
make_option("--include-tag", action="callback", callback=_include_tag,
type="string",
help="Include tests with tag <tag>")
make_option("--exclude-tag", action="callback", callback=_exclude_tag,
type="string",
help="Exclude tests with tag <tag>")
make_option("--write-profiles", action="store_true",
dest="write_profiles", default=False,
help="Write/update failing profiling data.")
make_option("--force-write-profiles", action="store_true",
dest="force_write_profiles", default=False,
help="Unconditionally write/update profiling data.")
def configure_follower(follower_ident):
"""Configure required state for a follower.
This invokes in the parent process and typically includes
database creation.
"""
from sqlalchemy.testing import provision
provision.FOLLOWER_IDENT = follower_ident
def memoize_important_follower_config(dict_):
"""Store important configuration we will need to send to a follower.
This invokes in the parent process after normal config is set up.
This is necessary as py.test seems to not be using forking, so we
start with nothing in memory, *but* it isn't running our argparse
callables, so we have to just copy all of that over.
"""
dict_['memoized_config'] = {
'include_tags': include_tags,
'exclude_tags': exclude_tags
}
def restore_important_follower_config(dict_):
"""Restore important configuration needed by a follower.
This invokes in the follower process.
"""
global include_tags, exclude_tags
include_tags.update(dict_['memoized_config']['include_tags'])
exclude_tags.update(dict_['memoized_config']['exclude_tags'])
def read_config():
global file_config
file_config = configparser.ConfigParser()
file_config.read(['setup.cfg', 'test.cfg'])
def pre_begin(opt):
"""things to set up early, before coverage might be setup."""
global options
options = opt
for fn in pre_configure:
fn(options, file_config)
def set_coverage_flag(value):
options.has_coverage = value
_skip_test_exception = None
def set_skip_test(exc):
global _skip_test_exception
_skip_test_exception = exc
def post_begin():
"""things to set up later, once we know coverage is running."""
# Lazy setup of other options (post coverage)
for fn in post_configure:
fn(options, file_config)
# late imports, has to happen after config as well
# as nose plugins like coverage
global util, fixtures, engines, exclusions, \
assertions, warnings, profiling,\
config, testing
from sqlalchemy import testing # noqa
from sqlalchemy.testing import fixtures, engines, exclusions # noqa
from sqlalchemy.testing import assertions, warnings, profiling # noqa
from sqlalchemy.testing import config # noqa
from sqlalchemy import util # noqa
warnings.setup_filters()
def _log(opt_str, value, parser):
global logging
if not logging:
import logging
logging.basicConfig()
if opt_str.endswith('-info'):
logging.getLogger(value).setLevel(logging.INFO)
elif opt_str.endswith('-debug'):
logging.getLogger(value).setLevel(logging.DEBUG)
def _list_dbs(*args):
print("Available --db options (use --dburi to override)")
for macro in sorted(file_config.options('db')):
print("%20s\t%s" % (macro, file_config.get('db', macro)))
sys.exit(0)
def _requirements_opt(opt_str, value, parser):
_setup_requirements(value)
def _exclude_tag(opt_str, value, parser):
exclude_tags.add(value.replace('-', '_'))
def _include_tag(opt_str, value, parser):
include_tags.add(value.replace('-', '_'))
pre_configure = []
post_configure = []
def pre(fn):
pre_configure.append(fn)
return fn
def post(fn):
post_configure.append(fn)
return fn
@pre
def _setup_options(opt, file_config):
global options
options = opt
@pre
def _monkeypatch_cdecimal(options, file_config):
if options.cdecimal:
import cdecimal
sys.modules['decimal'] = cdecimal
@post
def _init_skiptest(options, file_config):
from sqlalchemy.testing import config
config._skip_test_exception = _skip_test_exception
@post
def _engine_uri(options, file_config):
from sqlalchemy.testing import config
from sqlalchemy import testing
from sqlalchemy.testing import provision
if options.dburi:
db_urls = list(options.dburi)
else:
db_urls = []
if options.db:
for db_token in options.db:
for db in re.split(r'[,\s]+', db_token):
if db not in file_config.options('db'):
raise RuntimeError(
"Unknown URI specifier '%s'. "
"Specify --dbs for known uris."
% db)
else:
db_urls.append(file_config.get('db', db))
if not db_urls:
db_urls.append(file_config.get('db', 'default'))
config._current = None
for db_url in db_urls:
cfg = provision.setup_config(
db_url, options, file_config, provision.FOLLOWER_IDENT)
if not config._current:
cfg.set_as_current(cfg, testing)
@post
def _requirements(options, file_config):
requirement_cls = file_config.get('sqla_testing', "requirement_cls")
_setup_requirements(requirement_cls)
def _setup_requirements(argument):
from sqlalchemy.testing import config
from sqlalchemy import testing
if config.requirements is not None:
return
modname, clsname = argument.split(":")
# importlib.import_module() only introduced in 2.7, a little
# late
mod = __import__(modname)
for component in modname.split(".")[1:]:
mod = getattr(mod, component)
req_cls = getattr(mod, clsname)
config.requirements = testing.requires = req_cls()
@post
def _prep_testing_database(options, file_config):
from sqlalchemy.testing import config, util
from sqlalchemy.testing.exclusions import against
from sqlalchemy import schema, inspect
if options.dropfirst:
for cfg in config.Config.all_configs():
e = cfg.db
inspector = inspect(e)
try:
view_names = inspector.get_view_names()
except NotImplementedError:
pass
else:
for vname in view_names:
e.execute(schema._DropView(
schema.Table(vname, schema.MetaData())
))
if config.requirements.schemas.enabled_for_config(cfg):
try:
view_names = inspector.get_view_names(
schema="test_schema")
except NotImplementedError:
pass
else:
for vname in view_names:
e.execute(schema._DropView(
schema.Table(vname, schema.MetaData(),
schema="test_schema")
))
util.drop_all_tables(e, inspector)
if config.requirements.schemas.enabled_for_config(cfg):
util.drop_all_tables(e, inspector, schema=cfg.test_schema)
if against(cfg, "postgresql"):
from sqlalchemy.dialects import postgresql
for enum in inspector.get_enums("*"):
e.execute(postgresql.DropEnumType(
postgresql.ENUM(
name=enum['name'],
schema=enum['schema'])))
@post
def _reverse_topological(options, file_config):
if options.reversetop:
from sqlalchemy.orm.util import randomize_unitofwork
randomize_unitofwork()
@post
def _post_setup_options(opt, file_config):
from sqlalchemy.testing import config
config.options = options
config.file_config = file_config
@post
def _setup_profiling(options, file_config):
from sqlalchemy.testing import profiling
profiling._profile_stats = profiling.ProfileStatsFile(
file_config.get('sqla_testing', 'profile_file'))
def want_class(cls):
if not issubclass(cls, fixtures.TestBase):
return False
elif cls.__name__.startswith('_'):
return False
elif config.options.backend_only and not getattr(cls, '__backend__',
False):
return False
else:
return True
def want_method(cls, fn):
if not fn.__name__.startswith("test_"):
return False
elif fn.__module__ is None:
return False
elif include_tags:
return (
hasattr(cls, '__tags__') and
exclusions.tags(cls.__tags__).include_test(
include_tags, exclude_tags)
) or (
hasattr(fn, '_sa_exclusion_extend') and
fn._sa_exclusion_extend.include_test(
include_tags, exclude_tags)
)
elif exclude_tags and hasattr(cls, '__tags__'):
return exclusions.tags(cls.__tags__).include_test(
include_tags, exclude_tags)
elif exclude_tags and hasattr(fn, '_sa_exclusion_extend'):
return fn._sa_exclusion_extend.include_test(include_tags, exclude_tags)
else:
return True
def generate_sub_tests(cls, module):
if getattr(cls, '__backend__', False):
for cfg in _possible_configs_for_cls(cls):
name = "%s_%s_%s" % (cls.__name__, cfg.db.name, cfg.db.driver)
subcls = type(
name,
(cls, ),
{
"__only_on__": ("%s+%s" % (cfg.db.name, cfg.db.driver)),
}
)
setattr(module, name, subcls)
yield subcls
else:
yield cls
def start_test_class(cls):
_do_skips(cls)
_setup_engine(cls)
def stop_test_class(cls):
#from sqlalchemy import inspect
#assert not inspect(testing.db).get_table_names()
engines.testing_reaper._stop_test_ctx()
try:
if not options.low_connections:
assertions.global_cleanup_assertions()
finally:
_restore_engine()
def _restore_engine():
config._current.reset(testing)
def final_process_cleanup():
engines.testing_reaper._stop_test_ctx_aggressive()
assertions.global_cleanup_assertions()
_restore_engine()
def _setup_engine(cls):
if getattr(cls, '__engine_options__', None):
eng = engines.testing_engine(options=cls.__engine_options__)
config._current.push_engine(eng, testing)
def before_test(test, test_module_name, test_class, test_name):
# like a nose id, e.g.:
# "test.aaa_profiling.test_compiler.CompileTest.test_update_whereclause"
name = test_class.__name__
suffix = "_%s_%s" % (config.db.name, config.db.driver)
if name.endswith(suffix):
name = name[0:-(len(suffix))]
id_ = "%s.%s.%s" % (test_module_name, name, test_name)
profiling._current_test = id_
def after_test(test):
engines.testing_reaper._after_test_ctx()
def _possible_configs_for_cls(cls, reasons=None):
all_configs = set(config.Config.all_configs())
if cls.__unsupported_on__:
spec = exclusions.db_spec(*cls.__unsupported_on__)
for config_obj in list(all_configs):
if spec(config_obj):
all_configs.remove(config_obj)
if getattr(cls, '__only_on__', None):
spec = exclusions.db_spec(*util.to_list(cls.__only_on__))
for config_obj in list(all_configs):
if not spec(config_obj):
all_configs.remove(config_obj)
if hasattr(cls, '__requires__'):
requirements = config.requirements
for config_obj in list(all_configs):
for requirement in cls.__requires__:
check = getattr(requirements, requirement)
skip_reasons = check.matching_config_reasons(config_obj)
if skip_reasons:
all_configs.remove(config_obj)
if reasons is not None:
reasons.extend(skip_reasons)
break
if hasattr(cls, '__prefer_requires__'):
non_preferred = set()
requirements = config.requirements
for config_obj in list(all_configs):
for requirement in cls.__prefer_requires__:
check = getattr(requirements, requirement)
if not check.enabled_for_config(config_obj):
non_preferred.add(config_obj)
if all_configs.difference(non_preferred):
all_configs.difference_update(non_preferred)
return all_configs
def _do_skips(cls):
reasons = []
all_configs = _possible_configs_for_cls(cls, reasons)
if getattr(cls, '__skip_if__', False):
for c in getattr(cls, '__skip_if__'):
if c():
config.skip_test("'%s' skipped by %s" % (
cls.__name__, c.__name__)
)
if not all_configs:
if getattr(cls, '__backend__', False):
msg = "'%s' unsupported for implementation '%s'" % (
cls.__name__, cls.__only_on__)
else:
msg = "'%s' unsupported on any DB implementation %s%s" % (
cls.__name__,
", ".join(
"'%s(%s)+%s'" % (
config_obj.db.name,
".".join(
str(dig) for dig in
config_obj.db.dialect.server_version_info),
config_obj.db.driver
)
for config_obj in config.Config.all_configs()
),
", ".join(reasons)
)
config.skip_test(msg)
elif hasattr(cls, '__prefer_backends__'):
non_preferred = set()
spec = exclusions.db_spec(*util.to_list(cls.__prefer_backends__))
for config_obj in all_configs:
if not spec(config_obj):
non_preferred.add(config_obj)
if all_configs.difference(non_preferred):
all_configs.difference_update(non_preferred)
if config._current not in all_configs:
_setup_config(all_configs.pop(), cls)
def _setup_config(config_obj, ctx):
config._current.push(config_obj, testing)
| [
"[email protected]"
] | |
bd1a740bf8f775851890759a7db88b5fd0cf0bba | 958d87cc3b77bb3308d0aa04b92fdef5f97d63ae | /AdvancedPythonObjectsAndDataStructures/AdvancedNumbers.py | 8ef05979c4197fe19a9dff75665c1dc35639738e | [] | no_license | parihar08/PythonJosePortilla | 6dec83519af78451c46e323928aedf19dbd908f1 | 6f47291908ad05daf5a505ba0e13687c46651bc2 | refs/heads/master | 2022-12-19T07:30:39.603468 | 2020-09-19T14:56:36 | 2020-09-19T14:56:36 | 292,650,778 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 568 | py | print('********Hexadecimal*************','\n')
#Hexadecimal
print(hex(246)) #0xf6
print(hex(512)) #0x200
print('********Binary*************','\n')
#Binary
print(bin(128)) #0b10000000
print(bin(512)) #0b1000000000
print('*********Power************','\n')
#Power
print(pow(2,4)) #16
print(pow(2,4,3)) #1 (2**4)%3
print('*********Absolute************','\n')
#Absolute
print(abs(2)) #2
print('*********Round************','\n')
#Round
print(round(3.1)) #3.0
print(round(3.9)) #4.0
print(round(3.141592,2)) #3.14
| [
"[email protected]"
] | |
0765dba6257528349620629e6fe1ae5ff5925d69 | deb8d06ec0f6fd6350512c36d3b79b934219b4f5 | /mimic3/utils.py | a3398cdb84ed120f9492ddf9c12e93bfd8a9c3b2 | [] | no_license | futianfan/preprocess | db1ef63367624275b9d184c5cda508391df4cc2b | 15aac957a737eaae441bb7eb4940e2ad4e9abb5a | refs/heads/master | 2020-04-15T01:18:26.178428 | 2019-01-26T21:04:13 | 2019-01-26T21:04:13 | 164,271,387 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,320 | py | from datetime import datetime
minimum_admission_to_throw = 2
today = datetime.strptime('2125-01-01', '%Y-%m-%d')
separate_symbol_in_visit = ' '
separate_symbol_between_visit = ','
separate_symbol = '\t'
def convert_to_icd9(dx_str):
if dx_str.startswith('E'):
if len(dx_str) > 4: return dx_str[:4] + '.' + dx_str[4:]
else: return dx_str
else:
if len(dx_str) > 3: return dx_str[:3] + '.' + dx_str[3:]
else: return dx_str
def convert_to_3digit_icd9(dx_str):
if dx_str.startswith('E'):
if len(dx_str) > 4: return dx_str[:4]
else: return dx_str
else:
if len(dx_str) > 3: return dx_str[:3]
else: return dx_str
'''
if __name__ == "__main__":
lst = ['E9352', '40391', '5781', 'V290', 'E915']
out_lst = list(map(convert_to_icd9, lst))
assert out_lst == ['E935.2', '403.91', '578.1', 'V29.0', 'E915']
out_lst2 = list(map(convert_to_3digit_icd9, lst))
assert out_lst2 == ['E935', '403', '578', 'V29', 'E915']
'''
### patient_id map to mortality label
def patientid_map_label(patient_files):
lines = open(patient_files, 'r').readlines()
lines = lines[1:]
f1 = lambda x: 1 if len(x) > 0 else 0
patient_id_2_label = {
int(line.strip().split(',')[1]): f1(line.strip().split(',')[5])
for line in lines
}
return patient_id_2_label
### key: SUBJECT_ID
### value 1 / 0
def patientid_map_admissionid_and_time(admission_file):
from collections import defaultdict
patient_id_2_admission = defaultdict(lambda: [])
admission_id_2_time = {}
lines = open(admission_file, 'r').readlines()
lines = lines[1:]
for line in lines:
tokens = line.strip().split(',')
patient_id = int(tokens[1])
admission_id = int(tokens[2])
admission_time = datetime.strptime(tokens[3], '%Y-%m-%d %H:%M:%S')
admission_id_2_time[admission_id] = admission_time
patient_id_2_admission[patient_id] += [admission_id]
return patient_id_2_admission, admission_id_2_time
def admissionid_map_icdcode(diagnosis_file):
from collections import defaultdict
admission_id_2_icd = defaultdict(lambda: [])
admission_id_2_icd_3digit = defaultdict(lambda: [])
lines = open(diagnosis_file, 'r').readlines()
lines = lines[1:]
for line in lines:
tokens = line.strip().split(',')
admission_id = int(tokens[2])
icd9_code = 'D_' + convert_to_icd9(tokens[4][1:-1])
icd9_3_digits_code = 'D_' + convert_to_3digit_icd9(tokens[4][1:-1])
admission_id_2_icd[admission_id] += [icd9_code]
admission_id_2_icd_3digit[admission_id] += [icd9_3_digits_code]
return admission_id_2_icd, admission_id_2_icd_3digit
def admissionid_map_ccs(diagnosis_file):
from collections import defaultdict
from ccs import icdcode2idx
admission_id_2_ccscode = defaultdict(lambda:[])
lines = open(diagnosis_file, 'r').readlines()
lines = lines[1:]
for line in lines:
tokens = line.strip().split(',')
admission_id = int(tokens[2])
if tokens[4][1:-1] == '':
continue
ccs_code = icdcode2idx[tokens[4][1:-1]]
admission_id_2_ccscode[admission_id] += [ccs_code]
return admission_id_2_ccscode
def patientid_map_icdcode_and_time(
patient_id_2_admission,
admission_id_2_time,
admission_id_2_icd,
admission_id_2_icd_3digit):
from collections import defaultdict
patient_id_2_icd_and_time = defaultdict(lambda: [])
patient_id_2_icd3digit_and_time = defaultdict(lambda: [])
for patient_id, admission_id_lst in patient_id_2_admission.items():
if len(admission_id_lst) < minimum_admission_to_throw:
continue
patient_id_2_icd_and_time[patient_id] = sorted([
(admission_id_2_time[admission_id], admission_id_2_icd[admission_id])
for admission_id in admission_id_lst
])
patient_id_2_icd3digit_and_time[patient_id] = sorted([
(admission_id_2_time[admission_id], admission_id_2_icd_3digit[admission_id])
for admission_id in admission_id_lst
])
return patient_id_2_icd_and_time, patient_id_2_icd3digit_and_time
def patientid_map_ccscode_and_time(
patient_id_2_admission,
admission_id_2_time,
admission_id_2_ccscode):
from collections import defaultdict
patient_id_2_ccs_and_time = defaultdict(lambda: [])
for patient_id, admission_id_lst in patient_id_2_admission.items():
if len(admission_id_lst) < minimum_admission_to_throw:
continue
patient_id_2_ccs_and_time[patient_id] = sorted([
(admission_id_2_time[admission_id], admission_id_2_ccscode[admission_id])
for admission_id in admission_id_lst
])
return patient_id_2_ccs_and_time
def generate_whole_list(patient_id_2_icd_and_time, patient_id_2_icd3digit_and_time, patient_id_2_label):
patient_id_lst = []
time_list = []
seq_lst = []
label_lst = []
seq3digit_lst = []
for patient_id, visits in patient_id_2_icd_and_time.items():
patient_id_lst.append(patient_id)
label_lst.append(patient_id_2_label[patient_id])
seq = [i[1] for i in visits]
times = [i[0] for i in visits]
seq_lst.append(seq)
time_list.append(times)
for patient_id, visits in patient_id_2_icd3digit_and_time.items():
seq = [i[1] for i in visits]
seq3digit_lst.append(seq)
return patient_id_lst, time_list, seq_lst, seq3digit_lst, label_lst
def generate_whole_list_ccs(patient_id_2_ccs_and_time, patient_id_2_label):
patient_id_lst = []
time_list = []
seq_lst = []
label_lst = []
for patient_id, visits in patient_id_2_ccs_and_time.items():
patient_id_lst.append(patient_id)
label_lst.append(patient_id_2_label[patient_id])
seq = [i[1] for i in visits]
times = [i[0] for i in visits]
seq_lst.append(seq)
time_list.append(times)
return patient_id_lst, time_list, seq_lst, label_lst
def update_seq(seqs):
"""
old seq are composed of ICD9 code.
new seq are composed of index, from 0, 1, 2, ...
"""
from collections import defaultdict
icdcode2idx = defaultdict(lambda: len(icdcode2idx))
new_seqs = [[[icdcode2idx[j] for j in admis] for admis in seq] for seq in seqs]
print('number of code is {}'.format(len(icdcode2idx)))
idx2icdcode = {v:k for k,v in icdcode2idx.items()}
fout = open('result/code_map.txt', 'w')
for i in range(len(icdcode2idx)):
fout.write(idx2icdcode[i] + '\n')
fout.close()
#print(new_seqs)
return new_seqs
def date_to_time(time_list):
return [[(today - date).days for date in j] for j in time_list]
def lst_to_string(seq_idx_lst):
f1 = lambda x: separate_symbol_in_visit.join(list(map(lambda y:str(y), x)))
"""
f1: [1,2,3] => '1 2 3'
"""
f2 = lambda x: separate_symbol_between_visit.join(list(map(f1, x)))
"""
[[1,2,3], [2,3,4]] => '1 2 3,2 3 4'
"""
return list(map(f2, seq_idx_lst))
'''
if __name__ == "__main__":
a = [[[1,2,3], [2,3,4]], [[1,2,3], [2,3,4]]]
print(lst_to_string(a))
'''
'''
print('Making additional modifications to the data')
#Compute time to today as to_event column
today = datetime.strptime('2025-01-01', '%Y-%m-%d')
to_event = [[(today-date).days for date in patient] for patient in dates]
#Compute time of the day when the person was admitted as the numeric column of size 1
numerics = [[[date.hour * 60 + date.minute - 720] for date in patient] for patient in dates]
#Add this feature to dictionary but leave 1 index empty for PADDING
types['Time of visit'] = len(types)+1
types_3digit['Time of visit'] = len(types_3digit)+1
#Compute sorting indicies
sort_indicies = np.argsort(list(map(len, to_event)))
#Create the dataframes of data and sort them according to number of visits per patient
all_data = pd.DataFrame(data={'codes': new_seqs,
'to_event': to_event,
'numerics': numerics}
,columns=['codes', 'to_event', 'numerics'])\
.iloc[sort_indicies].reset_index()
all_data_3digit = pd.DataFrame(data={'codes': new_seqs_3digit,
'to_event': to_event,
'numerics': numerics}
,columns=['codes', 'to_event', 'numerics'])\
.iloc[sort_indicies].reset_index()
all_targets = pd.DataFrame(data={'target': morts}
,columns=['target'])\
.iloc[sort_indicies].reset_index()
'''
| [
"[email protected]"
] | |
cbfd75a923ee6ced4ac597da44b7dce3d0a8c350 | a4c04117685c3d28dd60bdfc45654cb2c935f746 | /read_gedi_l2b.py | c5897d51ba3ac9e0fc2d517a732a03472d101b74 | [] | no_license | DKnapp64/General_Python_Codes | 1ca40779bb381d526d61c5d5fedcc76ae797c590 | 8d4669c82c17455640a0a3123f92760cd65cc26a | refs/heads/main | 2023-02-28T05:55:46.018482 | 2021-02-01T21:55:16 | 2021-02-01T21:55:16 | 335,077,354 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,098 | py | #!/usr/bin/env python3
import h5py
import pandas as pd
import numpy as np
from shapely.geometry import Point
import geopandas as gpd
import os, sys
fname = sys.argv[1]
f = h5py.File(fname, 'r')
## haydi_bre(f)
## print(list(f.keys()))
groups = [t for t in f.keys()]
print("File: %s" % (fname))
## read in GeoJSON of Peru, Ecuador, Colombia envelope
pec_env = gpd.read_file('pec_env2.geojson')
for group in groups:
if (group == 'METADATA' or group == 'BEAM0000' or group == 'BEAM0001' \
or group == 'BEAM0010' or group == 'BEAM0011'):
continue
df = pd.DataFrame()
## quality flags
algorun = f[group]['algorithmrun_flag']
l2aqual = f[group]['l2a_quality_flag']
l2bqual = f[group]['l2b_quality_flag']
degrade = f[group]['geolocation']['degrade_flag']
surface_flag = f[group]['surface_flag']
## other data
elev_high = f[group]['geolocation']['elev_highestreturn']
elev_low = f[group]['geolocation']['elev_lowestmode']
elev_bin0 = f[group]['geolocation']['elevation_bin0']
elev_bin0_error = f[group]['geolocation']['elevation_bin0_error']
elev_last = f[group]['geolocation']['elevation_lastbin']
elev_last_error = f[group]['geolocation']['elevation_lastbin_error']
hgt_bin0 = f[group]['geolocation']['height_bin0']
hgt_lastbin = f[group]['geolocation']['height_lastbin']
pgap_theta = f[group]['pgap_theta']
pgap_theta_error = f[group]['pgap_theta_error']
rh100 = f[group]['rh100']
beam = f[group]['beam']
lons = f[group]['geolocation']['longitude_bin0']
lats = f[group]['geolocation']['latitude_bin0']
shotnum = f[group]['geolocation']['shot_number']
sensi = f[group]['sensitivity']
solarelev = f[group]['geolocation']['solar_elevation']
cover = f[group]['cover']
landsattreecov = f[group]['land_cover_data']['landsat_treecover']
modistreecov = f[group]['land_cover_data']['modis_treecover']
pai = f[group]['pai']
inside = np.zeros((pai.shape[0]), dtype=np.bool)
## good = np.all(goodstack, axis=0)
for j in range(pai.shape[0]):
if (pec_env.geometry[0].contains(Point(lons[j], lats[j])) \
and (algorun[j] > 0) and (l2aqual[j] > 0) and (l2bqual[j] > 0) \
and (solarelev[j] < 0.0)):
inside[j] = True
if (np.sum(inside) == 0):
print("Shapefile for %s with %d surviving shots" % (group, np.sum(inside)))
continue
algorun = algorun[inside]
l2aqual = l2aqual[inside]
l2bqual = l2bqual[inside]
degrade = degrade[inside]
surface_flag = surface_flag[inside]
elev_high = elev_high[inside]
elev_low = elev_low[inside]
elev_last = elev_last[inside]
elev_last_error = elev_last_error[inside]
elev_bin0 = elev_bin0[inside]
elev_bin0_error = elev_bin0_error[inside]
hgt_bin0 = hgt_bin0[inside]
hgt_lastbin = hgt_lastbin[inside]
beam = beam[inside]
lons = lons[inside]
lats = lats[inside]
shotnum = shotnum[inside]
cover = cover[inside]
landsattreecov = landsattreecov[inside]
modistreecov = modistreecov[inside]
sensi = sensi[inside]
solarelev = solarelev[inside]
pai = pai[inside]
pgap_theta = pgap_theta[inside]
pgap_theta_error = pgap_theta_error[inside]
rh100 = rh100[inside]
df['beam'] = beam
## put them in the data frame
df['shot_number'] = shotnum
df['elev_high'] = elev_high
df['elev_low'] = elev_low
df['height'] = elev_high - elev_low
df['elev_last'] = elev_last
df['elev_last_error'] = elev_last_error
df['elev_bin0'] = elev_bin0
df['elev_bin0_error'] = elev_bin0_error
df['height2'] = elev_bin0 - elev_last
df['hgt_bin0'] = hgt_bin0
df['hgt_lastbin'] = hgt_lastbin
df['height3'] = hgt_bin0 - hgt_lastbin
df['cover'] = cover
df['pai'] = pai
df['pgap_theta'] = pgap_theta
df['pgap_theta_error'] = pgap_theta_error
df['rh100'] = rh100/100.0
df['lstreecov'] = landsattreecov
df['modtreecov'] = modistreecov
df['l2a_qual'] = l2aqual
df['l2b_qual'] = l2bqual
df['degrade'] = degrade
df['sensi'] = sensi
df['solarelev'] = solarelev
df['algorun'] = algorun
df.astype({'beam':'int32', 'shot_number':'uint64', 'elev_high':'float32', 'elev_low':'float32', \
'height':'float32', 'elev_last':'float32', 'elev_last_error':'float32', 'elev_bin0':'float32',\
'elev_bin0_error':'float32', 'height2':'float32', 'hgt_bin0':'float32', \
'hgt_lastbin':'float32', 'height3':'float32', 'cover':'float32', 'pai':'float32', \
'pgap_theta':'float32', 'pgap_theta_error':'float32', \
'rh100':'float32', 'lstreecov':'float32', \
'modtreecov':'float32', 'l2a_qual':'uint8', 'l2b_qual':'uint8', 'degrade':'uint8', \
'sensi':'float32', 'solarelev':'float32', 'algorun':'uint8'})
geometries = gpd.points_from_xy(lons, lats)
gdf = gpd.GeoDataFrame(df, geometry=geometries)
gdf.crs = '+init=epsg:4326' # WGS84
gdf.to_file(os.path.splitext(fname)[0]+'_PEC_'+group+'.shp')
print("Finished Shapefile for %s with %d shots" % (group, np.sum(inside)))
f.close()
| [
"[email protected]"
] | |
090048756a8aeb5e5d027527c1844d1ed1266ff8 | a3d6556180e74af7b555f8d47d3fea55b94bcbda | /testing/run_pytype.py | fdb0b0cebeda89bac4c61d94946153c6f53a4443 | [
"BSD-3-Clause"
] | permissive | chromium/chromium | aaa9eda10115b50b0616d2f1aed5ef35d1d779d6 | a401d6cf4f7bf0e2d2e964c512ebb923c3d8832c | refs/heads/main | 2023-08-24T00:35:12.585945 | 2023-08-23T22:01:11 | 2023-08-23T22:01:11 | 120,360,765 | 17,408 | 7,102 | BSD-3-Clause | 2023-09-10T23:44:27 | 2018-02-05T20:55:32 | null | UTF-8 | Python | false | false | 1,165 | py | #!/usr/bin/env vpython3
# Copyright 2022 The Chromium Authors
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Simple helper script to run pytype on //testing code."""
import os
import sys
from pytype_common import pytype_runner
TESTING_DIR = os.path.abspath(os.path.dirname(__file__))
CHROMIUM_SRC_DIR = os.path.realpath(os.path.join(TESTING_DIR, '..'))
EXTRA_PATHS_COMPONENTS = [
('third_party', 'catapult', 'third_party', 'typ'),
]
EXTRA_PATHS = [
os.path.join(CHROMIUM_SRC_DIR, *p) for p in EXTRA_PATHS_COMPONENTS
]
EXTRA_PATHS.append(TESTING_DIR)
FILES_AND_DIRECTORIES_TO_CHECK = [
'unexpected_passes_common',
'flake_suppressor_common',
]
FILES_AND_DIRECTORIES_TO_CHECK = [
os.path.join(TESTING_DIR, f) for f in FILES_AND_DIRECTORIES_TO_CHECK
]
TEST_NAME = 'testing_pytype'
TEST_LOCATION = "//testing/run_pytype.py"
def main() -> int:
return pytype_runner.run_pytype(TEST_NAME, TEST_LOCATION,
FILES_AND_DIRECTORIES_TO_CHECK,
EXTRA_PATHS, TESTING_DIR)
if __name__ == '__main__':
sys.exit(main()) | [
"[email protected]"
] | |
0445697b424a8cf920dd80be998d7abdf21f8014 | 7f1a316ad1b19481e378953d7ffdd27fa435b1a4 | /00-lecture/python2/week6/day1/dojodesk/apps/comments/migrations/0001_initial.py | 829b4a81631d6fc3cca20f93d50036ba2c3c1743 | [] | no_license | Python-November-2018/wes_harper | 03eaed6d840038473339102ab9b8b85e31084555 | c5bcef58bb322d5487a7595f3f4f4fa7ae498a69 | refs/heads/master | 2020-04-03T18:12:44.212347 | 2018-12-20T03:38:38 | 2018-12-20T03:38:38 | 155,474,954 | 1 | 4 | null | null | null | null | UTF-8 | Python | false | false | 1,056 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-12-04 01:50
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('tickets', '0001_initial'),
('users', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('ticket', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='tickets.Ticket')),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comments', to='users.User')),
],
),
]
| [
"[email protected]"
] | |
2b75e38eaab3622916e1d4a48df9fc6747581892 | ff6248be9573caec94bea0fa2b1e4b6bf0aa682b | /log-20190927/132.230.102.123-10.21.11.38/1569575015.py | 103d484cea235b66a481b6ba8b712737d192faa4 | [] | no_license | LennartElbe/codeEvo | 0e41b1a7705204e934ef71a5a28c047366c10f71 | e89b329bc9edd37d5d9986f07ca8a63d50686882 | refs/heads/master | 2020-12-21T17:28:25.150352 | 2020-03-26T10:22:35 | 2020-03-26T10:22:35 | 236,498,032 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,635 | py | import functools
import typing
import string
import random
import pytest
def leap(n: int) -> bool:
"""this functions checks whether a year is a leapyear
args: n, a year number
returns: true if n is a leapyear, false if n is not a leapyear"""
if n % 4 == 0:
return True
elif n % 100 == 0 and n % 400 != 0:
return False
######################################################################
## hidden code
def mk_coverage():
covered = set()
target = set(range(4))
count = 0
def coverage(func):
nonlocal covered, target, count
def wrapper(year):
nonlocal covered, count
if year % 4 != 0:
covered.add(0)
elif year % 100 != 0:
covered.add(1)
elif year % 400 != 0:
covered.add(2)
else:
covered.add(3)
r = func (year)
count += 1
return r
if func == "achieved": return len(covered)
if func == "required": return len(target)
if func == "count" : return count
functools.update_wrapper(wrapper, func)
return wrapper
return coverage
coverage = mk_coverage ()
try:
leap = coverage(leap)
except:
pass
## Lösung Teil 2 (Tests)
def test_leap():
"""this function tests leap()"""
assert leap(2000) == True
assert leap(2001) == False
######################################################################
## hidden tests
pytest.main (["-v", "--assert=plain", "-p", "no:cacheprovider"])
from inspect import getfullargspec
class TestNames:
def test_leap (self):
assert leap
assert 'year' in getfullargspec(leap).args
class TestGrades:
def test_docstring_present(self):
assert leap.__doc__ is not None
def test_typing_present(self):
assert leap.__hints__ == typing.get_type_hints(self.leap_oracle)
def test_coverage(self):
assert coverage("achieved") == coverage("required")
def leap_oracle(self, year :int) -> bool:
if year % 4 != 0:
return False
elif year % 100 != 0:
return True
elif year % 400 == 0:
return True
else:
return False
def check_leap (self, year):
assert leap (year) == self.leap_oracle (year)
def test_correctness(self):
for i in range (100):
year = random.randrange (1582,2500)
self.check_leap (year)
for i in range (100):
year = random.randrange (1600,3000, 100)
self.check_leap (year)
| [
"[email protected]"
] | |
d593bba8d01a2062a54ce89cd99e5c1ad0533d5a | 006b7c62cc6682bfb0c69bede62145f4c5cd0eb1 | /art_bms/bacnet/api_urls.py | 338cd242284c628708606eb4cc2218fae34e254d | [] | no_license | ArtInfrastructure/art-bms | eb2961207c827b5fa1c0af8cdde08dcf5bc7fed2 | 2b384f371160feb0005b39af8cbaca9855c5c235 | refs/heads/master | 2016-09-06T02:35:27.920647 | 2010-03-03T21:51:16 | 2010-03-03T21:51:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 873 | py | # Copyright 2009 GORBET + BANERJEE (http://www.gorbetbanerjee.com/) Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License.
from django.conf.urls.defaults import *
from django.conf import settings
from models import *
urlpatterns = patterns('',
(r'^device/(?P<device_id>[\d]+)/$', 'bacnet.api_views.device'),
(r'^device/(?P<device_id>[\d]+)/(?P<property_id>[\d]+)/$', 'bacnet.api_views.device_property'),
)
| [
"[email protected]"
] | |
d1d613fc7dc70dec5d314468c4b3a8102ab32086 | d3006a069f12a9c7a3cb49e412a9e679930bc94a | /backend/apps/shop/migrations/0006_product_image.py | e03a19444a740073bae96a37811c92f765aebdd3 | [] | no_license | alexmon1989/keiko | 906e60328bc86f58cae9d9a0f9266869ed29f99a | 45cc0d770ddd93129b8c51727a683cd15d0afad4 | refs/heads/master | 2022-12-12T08:00:31.843565 | 2019-08-13T11:50:17 | 2019-08-13T11:50:17 | 168,360,214 | 0 | 0 | null | 2022-12-08T11:58:54 | 2019-01-30T14:53:11 | CSS | UTF-8 | Python | false | false | 484 | py | # Generated by Django 2.1.5 on 2019-02-11 14:42
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('shop', '0005_auto_20190209_2110'),
]
operations = [
migrations.AddField(
model_name='product',
name='image',
field=models.ImageField(blank=True, help_text='Размер: 450px * 450px', null=True, upload_to='', verbose_name='Изображение'),
),
]
| [
"[email protected]"
] | |
9428b181597b43198e6c6924a54a87aec98ab83e | 12ebcf4d3bdc074c12aceac3818365585a325094 | /citest/base/journal_logger.py | 83d529da248a32dbe841561791b6ff264154a71f | [
"Apache-2.0"
] | permissive | VimalME/citest | 0bcb07053be2395856c0ac5c0f97d98ab14dedd4 | 44a101e0051016e1baacfb8cfa8c870a8f8c4e7a | refs/heads/master | 2020-08-29T02:14:14.118493 | 2017-06-14T21:22:23 | 2017-06-14T21:22:23 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,803 | py | # Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Specialized logging.Logger and logging.LogHandler to write into journals."""
import json as json_module
import logging
from .global_journal import (get_global_journal, new_global_journal_with_path)
def _to_json_if_possible(value):
"""Render value as JSON string if it is json, otherwise as a normal string.
Args:
value: [any] The value to render into a string.
Returns:
formatted string
"""
try:
if isinstance(value, basestring):
tmp = json_module.JSONDecoder(encoding='utf-8').decode(value)
return json_module.JSONEncoder(indent=2,
encoding='utf-8',
separators=(',', ': ')).encode(tmp)
else:
return json_module.JSONEncoder(indent=2,
encoding='utf-8',
separators=(',', ': ')).encode(value)
except (ValueError, UnicodeEncodeError):
return str(value)
class JournalLogger(logging.Logger):
"""This class is only providing Journal-aware convienence functions."""
@staticmethod
def delegate(method, *positional_args, **kwargs):
"""Call the method in the underling journal, if there is one.
This has no effect if we do not have a journal.
Args:
method: The method name to call in the logging journal.
positional_args: The positional args to pass to the method
kwargs The keyword args to pass to the method.
"""
journal = get_global_journal()
if not journal:
return
getattr(journal, method)(*positional_args, **kwargs)
@staticmethod
def store_or_log(_obj, levelno=logging.INFO,
_module=None, _alwayslog=False, **kwargs):
"""Store the object into the underlying journal, or log it if no journal.
Args:
_obj: The JsonSnapshotable object to store.
levelno: [int] The logging debug level.
_module: [string] The logging module name, or none for this.
_alwayslog [bool] If True then always log.
Otherwise only journal but only log if there is no journal.
kwargs: Additional metadata to pass through to the journal.
"""
journal = get_global_journal()
if journal is not None:
journal.store(_obj)
if _alwayslog or journal is None:
logging.getLogger(_module or __name__).log(
levelno, repr(_obj), extra={'citest_journal': kwargs})
@staticmethod
def journal_or_log(_msg, levelno=logging.DEBUG,
_module=None, _alwayslog=False, **kwargs):
"""Writes a log message into the journal (if there is one) or logging API.
This API is an alternative to logger.log that permits Journal metadata
to be added. If there is no journal, then the message will be written into
the normal logger API without the additional metadata.
Args:
_msg: [string] The log message to write
levelno: [int] The logging debug level.
_module: [string] The logging module name, or none for this.
_alwayslog [bool] If True then always log.
Otherwise only journal but only log if there is no journal.
kwargs: Additional metadata to pass through to the journal.
"""
if 'format' not in kwargs:
# If a format was not specified, then default to 'pre'
kwargs = dict(kwargs)
kwargs['format'] = 'pre'
JournalLogger._helper(
_msg, levelno=levelno, _alwayslog=_alwayslog, _module=_module,
metadata=kwargs)
@staticmethod
def _helper(_msg, levelno, _alwayslog, _module, metadata):
"""Helper class for log()"""
journal = get_global_journal()
if _alwayslog or journal is None:
logging.getLogger(_module or __name__).log(
levelno, _msg, extra={'citest_journal': metadata})
else:
journal.write_message(_msg, _level=levelno, **metadata)
@staticmethod
def journal_or_log_detail(_msg, _detail, levelno=logging.DEBUG,
_module=None, _alwayslog=False, **kwargs):
"""Log a message and detail.
If there is a global journal and not _alwayslog then writes this there.
Otherwise log it. The reason for the distinction is so that we can filter
down normal logs.
Args:
_msg: [string] The log message to write.
_detail: [any] The data detail to log.
levelno: [int] The logging debug level.
_module: [string] The logging module name, or none for this.
_alwayslog [bool] If True then always log.
Otherwise only journal but only log if there is no journal.
kwargs: Additional metadata to pass through to the journal.
"""
json_text = _to_json_if_possible(_detail)
JournalLogger.journal_or_log(
_msg='{0}\n{1}'.format(_msg, json_text), levelno=levelno,
_module=_module, _alwayslog=_alwayslog, **kwargs)
@staticmethod
def begin_context(_title, **kwargs):
"""
Mark the beginning of a context in the journal.
Future entries will be associated with this context until end_context()
is called. Contexts can be nested.
Args:
_title: [string] The title of the context.
"""
logging.getLogger(__name__).debug(
'+context %s', _title, extra={'citest_journal':{'nojournal':True}})
journal = get_global_journal()
if journal is not None:
journal.begin_context(_title, **kwargs)
@staticmethod
def end_context(**kwargs):
"""Mark the ending of the current context within the journal."""
logging.getLogger(__name__).debug(
'-context',
extra={'citest_journal':{'nojournal':True}})
journal = get_global_journal()
if journal is not None:
journal.end_context(**kwargs)
class JournalLogHandler(logging.StreamHandler):
"""A standard log handler that will write journal entries.
This handler is intended to be plugged into the normal python logging
framework to mirror normal logging messages into the journal.
Note that log messages are unstructured text, but the journal prefers
structured data so that rendering can be more intelligent. Sometimes a
call site may wish to log structured messages into the journal and
unstructured to other loggers. Sometimes it may wish to log only to
the journal (e.g. highly detailed data) and sometimes only to other loggers
(e.g. because the journal was already given highly detailed data).
The handler recognizes LogRecord attribute 'citest_journal', which is
a dictionary that can be used for passing metadata to the journal and other
parameters to this handler. The [optional] parameters stripped by the handler
are:
nojournal [bool]: If True do not log the message into the journal.
_joural_message [string]: Journal this instead of the LogRecord message.
"""
def __init__(self, path):
"""Construct a handler using the global journal.
Ideally we'd like to inject a journal in here.
But the logging config takes a string specification so we'd need
a complicated way to say we want to use the global journal. So lets just
do that.
Args:
path: [string] Specifies the path for the global journal, if it does not
already exist.
"""
super(JournalLogHandler, self).__init__()
self.__journal = get_global_journal()
if self.__journal is None:
self.__journal = new_global_journal_with_path(path)
def emit(self, record):
"""Emit the record to the journal."""
journal_extra = getattr(record, 'citest_journal', {})
if journal_extra.get('nojournal', False):
# See class description
return
journal_extra.pop('nojournal', None)
journal_extra.setdefault('format', 'pre')
message = record.getMessage()
message = journal_extra.pop('_journal_message', message)
self.__journal.write_message(message,
_level=record.levelno,
_thread=record.thread,
**journal_extra)
def flush(self):
"""Implements the LogHandler interface."""
# The journal always flushes. Since we are using the global journal,
# which is accessable outside this logger, it needs to already be flushed
# to allow interleaving writers to preserve ordering.
pass
| [
"[email protected]"
] | |
b1b1772431492fcab55c92ae1d4fd03704fa9afe | 2442d073434d463cede4a79ae8f9fd31c62174f8 | /procedural-programming/io/without-readlines.py | d9a356fabba9a323ba50dd514ed32d2a93cd8438 | [] | no_license | grbalmeida/hello-python | 3630d75cfdde15223dc1c3a714fd562f6cda0505 | 4d9ddf2f7d104fdbc3aed2c88e50af19a39c1b63 | refs/heads/master | 2020-07-10T10:04:38.982256 | 2020-02-26T00:37:36 | 2020-02-26T00:37:36 | 204,237,527 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 136 | py | from FILE import FILE
file = open(FILE, 'r')
for index, line in enumerate(file):
print(f'{index + 1} {line}', end='')
file.close()
| [
"[email protected]"
] | |
9fd8d20f8d484b4a863e15f52bf93ca314cbe5c9 | c0bf1f7ca6d9d7562f72b4a668e97a2d5ffe7c88 | /tests/extension/types_/axi_/write_lite/test_types_axi_write_lite.py | 1c98a783d3f415821c00d1bca7f8f49b3bfd4486 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | 00mjk/veriloggen | cee0da16182c3c9bd95340a966d6a3febc0e7ad1 | 9d0af9638470b3b85cbf9cb53f16b853932571c8 | refs/heads/master | 2023-06-23T07:10:20.645734 | 2021-07-18T14:53:13 | 2021-07-18T14:53:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 488 | py | from __future__ import absolute_import
from __future__ import print_function
import os
import veriloggen
import types_axi_write_lite
def test(request):
veriloggen.reset()
simtype = request.config.getoption('--sim')
rslt = types_axi_write_lite.run(filename=None, simtype=simtype,
outputfile=os.path.splitext(os.path.basename(__file__))[0] + '.out')
verify_rslt = rslt.splitlines()[-1]
assert(verify_rslt == '# verify: PASSED')
| [
"[email protected]"
] | |
67b77c60ffcd0ca9797a34eb6b56830ac5b0b355 | ef4e046b3521c97345b1b1bcf58a1f16a2eaf603 | /mysql/demo.py | ccd8cbce22b045f34eb4b706a7049ffdb8a6794b | [
"MIT"
] | permissive | wasit7/tutorials | 0e23b6ffc55519df80fa47473f13baf55e1573ef | 83499821266c8debac05cb5d6d5f6da0f0abd68f | refs/heads/master | 2020-12-11T01:06:35.532592 | 2017-04-29T15:13:01 | 2017-04-29T15:13:01 | 37,713,995 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 986 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Dec 30 22:09:44 2015
@author: Nun
edited by Wasit
"""
from datetime import datetime
import pytz
import random
#if using additinal connector
#import mysql.connector
#conn = mysql.connector.Connect(host='146.148.37.209 ',user='root',password='',database='testdb')
#if using >>conda install mysql-python
import MySQLdb
#db = MySQLdb.connect(host= "146.148.37.209",user="root",passwd="",db="weather")
db = MySQLdb.connect(host= "173.194.246.163",user="root",passwd="",db="weather")
c = db.cursor()
#c.execute("""drop table if exists weather""")
#conn.commit()
c.execute("""create table if not exists weather (
time DATETIME NOT NULL PRIMARY KEY,
temp real NOT NULL,
humi real NOT NULL,
israin BOOLEAN NOT NULL)""")
cmd="insert into weather values ('%s', %.1f, %.1f, 0)"%(
datetime.now(pytz.timezone('Asia/Bangkok')).isoformat(),
random.randint(30,40),
random.randint(70,100))
print cmd
c.execute(cmd)
db.commit()
| [
"[email protected]"
] | |
8bb82ebd7fdb6277c645454dc99a826e43499c6d | 444a9480bce2035565332d4d4654244c0b5cd47b | /research/cv/DDM/utils/serialization.py | 4ac849f4896f01da60667af490919a156e345991 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license"
] | permissive | mindspore-ai/models | 7ede9c6454e77e995e674628204e1c6e76bd7b27 | eab643f51336dbf7d711f02d27e6516e5affee59 | refs/heads/master | 2023-07-20T01:49:34.614616 | 2023-07-17T11:43:18 | 2023-07-17T11:43:18 | 417,393,380 | 301 | 92 | Apache-2.0 | 2023-05-17T11:22:28 | 2021-10-15T06:38:37 | Python | UTF-8 | Python | false | false | 894 | py | # Copyright 2021 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""load info."""
import json
import yaml
def json_load(file_path):
with open(file_path, 'r') as fp:
return json.load(fp)
def yaml_load(file_path):
with open(file_path, 'r') as f:
return yaml.load(f)
| [
"[email protected]"
] | |
70439002a7dca17027ae86e3744e998ceda32a0b | a479a5773fd5607f96c3b84fed57733fe39c3dbb | /napalm_yang/models/openconfig/network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs_/utilized_bandwidth/__init__.py | 7ab2641b1214d0b1a89c518837da459fd503b9b3 | [
"Apache-2.0"
] | permissive | napalm-automation/napalm-yang | 839c711e9294745534f5fbbe115e0100b645dbca | 9148e015b086ebe311c07deb92e168ea36fd7771 | refs/heads/develop | 2021-01-11T07:17:20.226734 | 2019-05-15T08:43:03 | 2019-05-15T08:43:03 | 69,226,025 | 65 | 64 | Apache-2.0 | 2019-05-15T08:43:24 | 2016-09-26T07:48:42 | Python | UTF-8 | Python | false | false | 12,450 | py | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
from . import state
class utilized_bandwidth(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-isn/neighbors/neighbor/subTLVs/subTLVs/utilized-bandwidth. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container defines unidirectional utilized bandwidth.
"""
__slots__ = ("_path_helper", "_extmethods", "__state")
_yang_name = "utilized-bandwidth"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-isn",
"neighbors",
"neighbor",
"subTLVs",
"subTLVs",
"utilized-bandwidth",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/utilized_bandwidth/state (container)
YANG Description: State parameters of IS Extended Reachability sub-TLV 39.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/utilized_bandwidth/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of IS Extended Reachability sub-TLV 39.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([("state", state)])
from . import state
class utilized_bandwidth(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/protocols/protocol/isis/levels/level/link-state-database/lsp/tlvs/tlv/mt-isn/neighbors/neighbor/subTLVs/subTLVs/utilized-bandwidth. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: This container defines unidirectional utilized bandwidth.
"""
__slots__ = ("_path_helper", "_extmethods", "__state")
_yang_name = "utilized-bandwidth"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"protocols",
"protocol",
"isis",
"levels",
"level",
"link-state-database",
"lsp",
"tlvs",
"tlv",
"mt-isn",
"neighbors",
"neighbor",
"subTLVs",
"subTLVs",
"utilized-bandwidth",
]
def _get_state(self):
"""
Getter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/utilized_bandwidth/state (container)
YANG Description: State parameters of IS Extended Reachability sub-TLV 39.
"""
return self.__state
def _set_state(self, v, load=False):
"""
Setter method for state, mapped from YANG variable /network_instances/network_instance/protocols/protocol/isis/levels/level/link_state_database/lsp/tlvs/tlv/mt_isn/neighbors/neighbor/subTLVs/subTLVs/utilized_bandwidth/state (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_state is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_state() directly.
YANG Description: State parameters of IS Extended Reachability sub-TLV 39.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """state must be of a type compatible with container""",
"defined-type": "container",
"generated-type": """YANGDynClass(base=state.state, is_container='container', yang_name="state", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions=None, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='container', is_config=False)""",
}
)
self.__state = t
if hasattr(self, "_set"):
self._set()
def _unset_state(self):
self.__state = YANGDynClass(
base=state.state,
is_container="container",
yang_name="state",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
extensions=None,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="container",
is_config=False,
)
state = __builtin__.property(_get_state)
_pyangbind_elements = OrderedDict([("state", state)])
| [
"[email protected]"
] | |
1f2f6f6b6e2ce1d00611571c0f7bb6e894ea97ef | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_078/ch20_2020_04_11_20_17_50_548987.py | a96fc9803b658c8c63e6829c6f9b06960b007e43 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 251 | py | distancia=float(input('Qual distância deseja percorrer? '))
if distancia<=200:
print ('O preço da passagem é {0}R$'.format(distancia*0,50))
else:
print ('O preço da passagem é {0}R$'.format((distancia*0,50)+(distancia-200)*0,45))
| [
"[email protected]"
] | |
6ac8b762b137bfd7afa381c029c405f96348233c | 3679daa10ea95e90889e07e96e6c98c98f3751ea | /ipu/ipu/celery.py | e0092f1239df3677d4f604a596110f5cdb4e2224 | [] | no_license | rmn5124/ggsipu-placement-cell-portal | 0a8fef69c75ea444588046fcc7b38d7cf5c8e8e5 | 11876c2171bb07308719b205a69cd8330eb08052 | refs/heads/master | 2023-09-01T12:01:47.475984 | 2019-09-02T21:49:01 | 2019-09-02T21:49:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 553 | py | from __future__ import absolute_import
import os
from celery import Celery
# set the default Django settings module for the 'celery' program.
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'ipu.settings')
app = Celery('ipu')
from django.conf import settings
# Using a string here means the worker will not have to
# pickle the object when using Windows.
app.config_from_object('django.conf:settings')
app.autodiscover_tasks(lambda: settings.INSTALLED_APPS)
@app.task(bind=True)
def debug_task(self):
print('Request: {0!r}'.format(self.request))
| [
"[email protected]"
] | |
68e09f36ad8392a64d4ae77f98aa3cda11a6d3ec | 32dda10669e459cf37c31f426fa709001d2c75b0 | /leetcode_cn/solved/pg_760.py | 95eb11fb22234cfa2dd1887f120267b93b1fdfbb | [] | no_license | fastso/learning-python | 3300f50d06871245d0bfcbe9d201224580f70852 | d21dbd1b9f31017cdb1ed9b9ffd1e53ffe326572 | refs/heads/master | 2023-02-10T14:43:53.726247 | 2023-01-26T10:14:59 | 2023-01-26T10:14:59 | 193,454,718 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 145 | py | from typing import List
class Solution:
def anagramMappings(self, A: List[int], B: List[int]) -> List[int]:
return map(B.index, A)
| [
"[email protected]"
] | |
5f1d4560358d08cd78ed0b84edddcf92bef5e718 | d7016f69993570a1c55974582cda899ff70907ec | /sdk/compute/azure-mgmt-compute/azure/mgmt/compute/v2021_10_01/aio/operations/_galleries_operations.py | 83664872416614ce1489ca5f493e0ae712665525 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 40,271 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._galleries_operations import (
build_create_or_update_request,
build_delete_request,
build_get_request,
build_list_by_resource_group_request,
build_list_request,
build_update_request,
)
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class GalleriesOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.compute.v2021_10_01.aio.ComputeManagementClient`'s
:attr:`galleries` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
async def _create_or_update_initial(
self, resource_group_name: str, gallery_name: str, gallery: Union[_models.Gallery, IO], **kwargs: Any
) -> _models.Gallery:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Gallery] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(gallery, (IO, bytes)):
_content = gallery
else:
_json = self._serialize.body(gallery, "Gallery")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
gallery_name=gallery_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 201, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize("Gallery", pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize("Gallery", pipeline_response)
if response.status_code == 202:
deserialized = self._deserialize("Gallery", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {}) # type: ignore
return deserialized # type: ignore
_create_or_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}"
}
@overload
async def begin_create_or_update(
self,
resource_group_name: str,
gallery_name: str,
gallery: _models.Gallery,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.Gallery]:
"""Create or update a Shared Image Gallery.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery. The allowed characters are alphabets
and numbers with dots and periods allowed in the middle. The maximum length is 80 characters.
Required.
:type gallery_name: str
:param gallery: Parameters supplied to the create or update Shared Image Gallery operation.
Required.
:type gallery: ~azure.mgmt.compute.v2021_10_01.models.Gallery
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Gallery or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2021_10_01.models.Gallery]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_create_or_update(
self,
resource_group_name: str,
gallery_name: str,
gallery: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.Gallery]:
"""Create or update a Shared Image Gallery.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery. The allowed characters are alphabets
and numbers with dots and periods allowed in the middle. The maximum length is 80 characters.
Required.
:type gallery_name: str
:param gallery: Parameters supplied to the create or update Shared Image Gallery operation.
Required.
:type gallery: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Gallery or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2021_10_01.models.Gallery]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_create_or_update(
self, resource_group_name: str, gallery_name: str, gallery: Union[_models.Gallery, IO], **kwargs: Any
) -> AsyncLROPoller[_models.Gallery]:
"""Create or update a Shared Image Gallery.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery. The allowed characters are alphabets
and numbers with dots and periods allowed in the middle. The maximum length is 80 characters.
Required.
:type gallery_name: str
:param gallery: Parameters supplied to the create or update Shared Image Gallery operation. Is
either a model type or a IO type. Required.
:type gallery: ~azure.mgmt.compute.v2021_10_01.models.Gallery or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Gallery or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2021_10_01.models.Gallery]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Gallery] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery=gallery,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("Gallery", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}"
}
async def _update_initial(
self, resource_group_name: str, gallery_name: str, gallery: Union[_models.GalleryUpdate, IO], **kwargs: Any
) -> _models.Gallery:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Gallery] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(gallery, (IO, bytes)):
_content = gallery
else:
_json = self._serialize.body(gallery, "GalleryUpdate")
request = build_update_request(
resource_group_name=resource_group_name,
gallery_name=gallery_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("Gallery", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}"
}
@overload
async def begin_update(
self,
resource_group_name: str,
gallery_name: str,
gallery: _models.GalleryUpdate,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.Gallery]:
"""Update a Shared Image Gallery.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery. The allowed characters are alphabets
and numbers with dots and periods allowed in the middle. The maximum length is 80 characters.
Required.
:type gallery_name: str
:param gallery: Parameters supplied to the update Shared Image Gallery operation. Required.
:type gallery: ~azure.mgmt.compute.v2021_10_01.models.GalleryUpdate
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Gallery or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2021_10_01.models.Gallery]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def begin_update(
self,
resource_group_name: str,
gallery_name: str,
gallery: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> AsyncLROPoller[_models.Gallery]:
"""Update a Shared Image Gallery.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery. The allowed characters are alphabets
and numbers with dots and periods allowed in the middle. The maximum length is 80 characters.
Required.
:type gallery_name: str
:param gallery: Parameters supplied to the update Shared Image Gallery operation. Required.
:type gallery: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Gallery or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2021_10_01.models.Gallery]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def begin_update(
self, resource_group_name: str, gallery_name: str, gallery: Union[_models.GalleryUpdate, IO], **kwargs: Any
) -> AsyncLROPoller[_models.Gallery]:
"""Update a Shared Image Gallery.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery. The allowed characters are alphabets
and numbers with dots and periods allowed in the middle. The maximum length is 80 characters.
Required.
:type gallery_name: str
:param gallery: Parameters supplied to the update Shared Image Gallery operation. Is either a
model type or a IO type. Required.
:type gallery: ~azure.mgmt.compute.v2021_10_01.models.GalleryUpdate or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Gallery or the result of
cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.compute.v2021_10_01.models.Gallery]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.Gallery] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = await self._update_initial(
resource_group_name=resource_group_name,
gallery_name=gallery_name,
gallery=gallery,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("Gallery", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}"
}
@distributed_trace_async
async def get(
self,
resource_group_name: str,
gallery_name: str,
select: Optional[Union[str, _models.SelectPermissions]] = None,
expand: Optional[Union[str, _models.GalleryExpandParams]] = None,
**kwargs: Any
) -> _models.Gallery:
"""Retrieves information about a Shared Image Gallery.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery. Required.
:type gallery_name: str
:param select: The select expression to apply on the operation. "Permissions" Default value is
None.
:type select: str or ~azure.mgmt.compute.v2021_10_01.models.SelectPermissions
:param expand: The expand query option to apply on the operation. "SharingProfile/Groups"
Default value is None.
:type expand: str or ~azure.mgmt.compute.v2021_10_01.models.GalleryExpandParams
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Gallery or the result of cls(response)
:rtype: ~azure.mgmt.compute.v2021_10_01.models.Gallery
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
cls: ClsType[_models.Gallery] = kwargs.pop("cls", None)
request = build_get_request(
resource_group_name=resource_group_name,
gallery_name=gallery_name,
subscription_id=self._config.subscription_id,
select=select,
expand=expand,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("Gallery", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}"
}
async def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, gallery_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_delete_request(
resource_group_name=resource_group_name,
gallery_name=gallery_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}"
}
@distributed_trace_async
async def begin_delete(self, resource_group_name: str, gallery_name: str, **kwargs: Any) -> AsyncLROPoller[None]:
"""Delete a Shared Image Gallery.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:param gallery_name: The name of the Shared Image Gallery to be deleted. Required.
:type gallery_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling. Pass in False for
this operation to not poll, or pass in your own initialized polling object for a personal
polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, AsyncPollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = await self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
gallery_name=gallery_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: AsyncPollingMethod = cast(AsyncPollingMethod, AsyncARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(AsyncPollingMethod, AsyncNoPolling())
else:
polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries/{galleryName}"
}
@distributed_trace
def list_by_resource_group(self, resource_group_name: str, **kwargs: Any) -> AsyncIterable["_models.Gallery"]:
"""List galleries under a resource group.
:param resource_group_name: The name of the resource group. Required.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Gallery or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2021_10_01.models.Gallery]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
cls: ClsType[_models.GalleryList] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_resource_group_request(
resource_group_name=resource_group_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_resource_group.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("GalleryList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_by_resource_group.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Compute/galleries"
}
@distributed_trace
def list(self, **kwargs: Any) -> AsyncIterable["_models.Gallery"]:
"""List galleries under a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either Gallery or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.compute.v2021_10_01.models.Gallery]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: Literal["2021-10-01"] = kwargs.pop("api_version", _params.pop("api-version", "2021-10-01"))
cls: ClsType[_models.GalleryList] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("GalleryList", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response: PipelineResponse = await self._client._pipeline.run( # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Compute/galleries"}
| [
"[email protected]"
] | |
52e2e83c0b62474742d839853d740707a0b7d12e | 951e433b25a25afeea4d9b45994a57e0a6044144 | /LeetCode/动态规划_416_分割等和子集_01背包.py | 133d61bbb2b69d6c32fa2c0755d0dd8b5988b897 | [] | no_license | EricaEmmm/CodePython | 7c401073e0a9b7cd15f9f4a553f0aa3db1a951a3 | d52aa2a0bf71b5e7934ee7bff70d593a41b7e644 | refs/heads/master | 2020-05-31T14:00:34.266117 | 2019-09-22T09:48:23 | 2019-09-22T09:48:23 | 190,318,878 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,174 | py | '''
给定一个只包含正整数的非空数组。是否可以将这个数组分割成两个子集,使得两个子集的元素和相等。
注意:每个数组中的元素不会超过100,数组的大小不会超过200
示例1:
输入: [1, 5, 11, 5]
输出: true
解释: 数组可以分割成[1, 5, 5]和[11].
示例2:
输入: [1, 2, 3, 5]
输出: false
解释: 数组不能分割成两个元素和相等的子集.
'''
'''
问题转化:给定一个只包含正整数的非空数组,是否可以从这个数组中挑选出一些正整数,
使得这些数的和等于整个数组元素的和的一半。
'''
class Solution(object):
def canPartition1(self, nums):
"""
dp[i][j]表示前i个数中部分数的和是否等于j
状态转移:dp[i][j] = dp[i-1][j] | dp[i-1][j-nums[i]]
时间复杂度:O(NC),空间复杂度:O(NC)
"""
Sum = sum(nums)
if Sum % 2 == 1:
return False
mid = Sum // 2
nums.insert(0, 0)
dp = [[False for _ in range(mid+1)] for j in range(len(nums))]
for i in range(len(nums)):
dp[i][0] = True
for i in range(1, len(nums)):
for j in range(mid+1):
if j >= nums[i]:
dp[i][j] = dp[i-1][j] | dp[i-1][j-nums[i]]
else:
dp[i][j] = dp[i-1][j]
return dp[-1][-1]
def canPartition2(self, nums):
"""
dp[j]表示部分数的和是否等于j
状态转移:dp[j] = dp[j] | dp[j-nums[i]]
时间复杂度:O(NC),空间复杂度:O(C)
"""
Sum = sum(nums)
if Sum % 2 == 1:
return False
mid = Sum // 2
nums.insert(0, 0)
dp = [False for _ in range(mid+1)]
dp[0] = True
for i in range(1, len(nums)):
for j in range(mid, nums[i]-1, -1):
dp[j] = dp[j] | dp[j-nums[i]]
return dp[-1]
def canPartition(self, nums):
return self.canPartition2(nums)
if __name__ == '__main__':
s = Solution()
nums = [1, 5, 11, 5] #[1, 2, 3, 5] #
print(s.canPartition(nums))
| [
"[email protected]"
] | |
d5ec5a773a92e6f187149f90d6b37bbd36997335 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_physiotherapists.py | 28647da34786753adc621542f6a82cc0400f34bb | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py |
#calss header
class _PHYSIOTHERAPISTS():
def __init__(self,):
self.name = "PHYSIOTHERAPISTS"
self.definitions = physiotherapist
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['physiotherapist']
| [
"[email protected]"
] | |
ca9f1453462fb30398cb816eb03aee011bc9590f | 9b20161b91400238b0c6e6ee3282a328d42935e2 | /tensorflow_datasets/text/goemotions.py | ba592ec175c2b47c840ccc53603110ff9efa364a | [
"Apache-2.0"
] | permissive | okyanusoz/datasets | 61c0ced07c420d7e900080e851890def74a37d94 | 8997c4140cd4fc145f0693787b1da78691930459 | refs/heads/master | 2023-05-31T23:19:30.153499 | 2021-05-06T19:56:49 | 2021-05-06T19:58:56 | 365,308,067 | 1 | 1 | Apache-2.0 | 2021-07-04T11:15:13 | 2021-05-07T17:32:53 | null | UTF-8 | Python | false | false | 5,173 | py | # coding=utf-8
# Copyright 2021 The TensorFlow Datasets Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""goemotions dataset."""
import csv
import tensorflow.compat.v2 as tf
import tensorflow_datasets.public_api as tfds
_CITATION = """
@inproceedings{demszky-2020-goemotions,
title = "{G}o{E}motions: A Dataset of Fine-Grained Emotions",
author = "Demszky, Dorottya and
Movshovitz-Attias, Dana and
Ko, Jeongwoo and
Cowen, Alan and
Nemade, Gaurav and
Ravi, Sujith",
booktitle = "Proceedings of the 58th Annual Meeting of the Association for Computational Linguistics",
month = jul,
year = "2020",
address = "Online",
publisher = "Association for Computational Linguistics",
url = "https://www.aclweb.org/anthology/2020.acl-main.372",
pages = "4040--4054",
}
"""
_DESCRIPTION = """
The GoEmotions dataset contains 58k carefully curated Reddit comments labeled
for 27 emotion categories or Neutral. The emotion categories are admiration,
amusement, anger, annoyance, approval, caring, confusion, curiosity, desire,
disappointment, disapproval, disgust, embarrassment, excitement, fear,
gratitude, grief, joy, love, nervousness, optimism, pride, realization, relief,
remorse, sadness, surprise.
"""
_URL_TRAIN = 'https://github.com/google-research/google-research/raw/master/goemotions/data/train.tsv'
_URL_DEV = 'https://github.com/google-research/google-research/raw/master/goemotions/data/dev.tsv'
_URL_TEST = 'https://github.com/google-research/google-research/raw/master/goemotions/data/test.tsv'
_TEXT_LABEL = 'comment_text'
_EMOTION_LABELS = [
'admiration', 'amusement', 'anger', 'annoyance', 'approval', 'caring',
'confusion', 'curiosity', 'desire', 'disappointment', 'disapproval',
'disgust', 'embarrassment', 'excitement', 'fear', 'gratitude', 'grief',
'joy', 'love', 'nervousness', 'optimism', 'pride', 'realization', 'relief',
'remorse', 'sadness', 'surprise', 'neutral'
]
class Goemotions(tfds.core.GeneratorBasedBuilder):
"""Dataset of Reddit comments with one or more emotion labels."""
VERSION = tfds.core.Version('0.1.0')
def _info(self):
"""Returns information on the GoEmotions dataset."""
features = {_TEXT_LABEL: tfds.features.Text()}
for label in _EMOTION_LABELS:
features[label] = tf.bool
return tfds.core.DatasetInfo(
builder=self,
description=_DESCRIPTION,
features=tfds.features.FeaturesDict(features),
# Each emotion can be used for single-label classification.
supervised_keys=None,
homepage='https://github.com/google-research/google-research/tree/master/goemotions',
citation=_CITATION,
)
def _split_generators(self, dl_manager):
"""Returns SplitGenerators."""
# Download the data.
dl_paths = dl_manager.download({
'train': _URL_TRAIN,
'test': _URL_TEST,
'dev': _URL_DEV,
})
# Specify the splits.
return [
tfds.core.SplitGenerator(
name=tfds.Split.TRAIN,
gen_kwargs={
'filename': dl_paths['train'],
},
),
tfds.core.SplitGenerator(
name=tfds.Split.VALIDATION,
gen_kwargs={
'filename': dl_paths['dev'],
},
),
tfds.core.SplitGenerator(
name=tfds.Split.TEST,
gen_kwargs={
'filename': dl_paths['test'],
},
),
]
def _parse_row_as_example(self, row):
example = {}
if len(row) != 3:
return example
example[_TEXT_LABEL] = row['comment_text']
emotion_ids = row['emotion_ids'].split(',')
for emotion_id in emotion_ids:
emotion_id = int(emotion_id)
example[_EMOTION_LABELS[emotion_id]] = True
for i in range(len(_EMOTION_LABELS)):
if _EMOTION_LABELS[i] not in example.keys():
example[_EMOTION_LABELS[i]] = False
return example
def _generate_examples(self, filename):
"""Yields examples.
Each example contains a text input with the relevant emotion labels.
Args:
filename: the path of the file to be read for this split.
Yields:
A dictionary of features, containing the comment text and, for each
emotions label, 0/1 depending on whether is it a label for the input.
"""
fieldnames = ['comment_text', 'emotion_ids', 'comment_id']
with tf.io.gfile.GFile(filename) as f:
reader = csv.DictReader(f, fieldnames=fieldnames, delimiter='\t')
for row in reader:
example = self._parse_row_as_example(row)
if example:
yield row['comment_id'], example
| [
"[email protected]"
] | |
73d1e09672f5b4289a29a9587a67c4a445a4ee17 | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_206/622.py | 1beb0d17b042c1152df8dc35b3110f891c66ab3e | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | t = int(input())
for i in range(t):
d,n = map(int,raw_input().strip().split())
ls = list()
for j in range(n):
k,s = map(int,raw_input().strip().split())
t = float(float(d-k)/float(s))
ls.append(t)
spd = float(float(d)/float(max(ls)))
print "Case #%d:"%(i+1),spd
| [
"[email protected]"
] | |
4c119749b12f3c1896f9fdb5035d43f4af714600 | 137832600734c4a3a16966bbaba19d3540378f9a | /msoffcrypto-crack.py | 172628155e208b2e9791c2750708fc6dd59a2421 | [] | no_license | DidierStevens/DidierStevensSuite | e824354c80f5b7aae4dfb6e55f60178eb9ae208c | 8190354314d6f42c9ddc477a795029dc446176c5 | refs/heads/master | 2023-09-01T20:11:55.341694 | 2023-08-29T10:26:39 | 2023-08-29T10:26:39 | 35,275,445 | 1,670 | 554 | null | 2023-06-04T22:54:40 | 2015-05-08T11:21:00 | Python | UTF-8 | Python | false | false | 81,533 | py | #!/usr/bin/env python
from __future__ import print_function
__description__ = 'Crack MS Office document password'
__author__ = 'Didier Stevens'
__version__ = '0.0.5'
__date__ = '2020/03/29'
"""
Source code put in the public domain by Didier Stevens, no Copyright
https://DidierStevens.com
Use at your own risk
History:
2018/12/30: start
2019/01/05: 0.0.2 added option -c and -e; password VelvetSweatshop
2019/01/22: 0.0.3 fixed agile decryption (Crypto version 4.4: Agile Encryption) bug by adding file.decrypt ...
2019/08/31: 0.0.4 added option -r
2020/03/29: 0.0.5 added -p #f
Todo:
"""
import optparse
import gzip
import time
import zipfile
import sys
import os
import textwrap
if sys.version_info[0] >= 3:
from io import BytesIO as DataIO
else:
from cStringIO import StringIO as DataIO
try:
import msoffcrypto
except:
print('This program requires module msoffcrypto.')
print("You can get it from GitHub: https://github.com/nolze/msoffcrypto-tool\n")
exit(-1)
MALWARE_PASSWORD = 'infected'
def PrintManual():
manual = '''
Manual:
This tool relies completely on Python module msoffcrypto (https://github.com/nolze/msoffcrypto-tool) to decrypt MS Office documents.
It takes one input file (an encrypted MS Office documents): it can be provided as argument or piped via stdin. This input file can also be a password protected ZIP file containing an encrypted MS Office document.
The password for this ZIP file is "infected" (without double quotes) by default, but can be changed with option --password.
Without any further options, the tool will proceed with a dictonary attack to recover the password of the encrypted MS Office document. The passwords for this dictionary are taken from an internal list.
When a matching password is found, it will be printed and the tool will stop the dictionary attack.
To provide your own password list for the dictionary attack, use option -p to provide the filename of a text file with passwords. This text file may be compressed with gzip, and the tool will decompress the file in memory.
To generate a password list based on the filename, use option -p #f. This will generate a dictionary of all possible substrings of the filename.
Another method to provide potential passwords, is using option -e: extractpasswords. You use this option with a text file, and the tool will extract all potential passwords from this text file and use as a dictionary. Potential passwords are space-delimited strings found inside the text file. Potential passwords that are surrounded by quotes (single and double) and/or follow after word "password", are put at the beginning of the list of potential passwords to be tested in the dictionay attack.
One use case for option -e, is an email with password protected attachment: the password is probably mentioned in the message of the email, option -e can be used to generate a dictionary of passwords to try from this message.
Option -r will apply rules to the list of passwords to create derived passwords. The only rule for the moment is swapcase: swap the case of the password.
For example, list [Secret PASSWORD] becomes list [Secret PASSWORD sECRET password] when option -r is used.
When a password has been found, option -c can be used to run the program again with the cracked password, and thus avoid the delay caused by the dictionary attack.
The tool can also decrypt the provided MS Office document if the password is recovered: use option -o to decrypt the document and give the filename for the decrypted document. If you provide - as filename, the decrypted document will be outputed to stdout.
Since this is a Python tool based on a Python library, don't except fast password recovery. This is more a convenience program.
'''
for line in manual.split('\n'):
print(textwrap.fill(line))
#Convert 2 Bytes If Python 3
def C2BIP3(string):
if sys.version_info[0] > 2:
return bytes([ord(x) for x in string])
else:
return string
def IfWIN32SetBinary(io):
if sys.platform == 'win32':
import msvcrt
msvcrt.setmode(io.fileno(), os.O_BINARY)
def File2Strings(filename):
try:
if os.path.splitext(filename)[1].lower() == '.gz':
f = gzip.GzipFile(filename, 'rb')
else:
f = open(filename, 'r')
except:
return None
try:
return map(lambda line:line.rstrip('\n\r'), f.readlines())
except:
return None
finally:
f.close()
def GetDictionary(passwordfile, filename):
if passwordfile == '#f':
result = []
for iIter1 in range(len(filename)):
password = filename[:iIter1 + 1]
for iIter2 in range(len(password)):
result.append(password[iIter2:])
return result
elif passwordfile != '':
return File2Strings(passwordfile)
else:
# https://github.com/magnumripper/JohnTheRipper/blob/bleeding-jumbo/run/password.lst
return [
'infected',
'P@ssw0rd',
'VelvetSweatshop',
'123456',
'12345',
'password',
'password1',
'123456789',
'12345678',
'1234567890',
'abc123',
'computer',
'tigger',
'1234',
'qwerty',
'money',
'carmen',
'mickey',
'secret',
'summer',
'internet',
'a1b2c3',
'123',
'service',
'canada',
'hello',
'ranger',
'shadow',
'baseball',
'donald',
'harley',
'hockey',
'letmein',
'maggie',
'mike',
'mustang',
'snoopy',
'buster',
'dragon',
'jordan',
'michael',
'michelle',
'mindy',
'patrick',
'123abc',
'andrew',
'bear',
'calvin',
'changeme',
'diamond',
'fuckme',
'fuckyou',
'matthew',
'miller',
'tiger',
'trustno1',
'alex',
'apple',
'avalon',
'brandy',
'chelsea',
'coffee',
'falcon',
'freedom',
'gandalf',
'green',
'helpme',
'linda',
'magic',
'merlin',
'newyork',
'soccer',
'thomas',
'wizard',
'asdfgh',
'bandit',
'batman',
'boris',
'butthead',
'dorothy',
'eeyore',
'fishing',
'football',
'george',
'happy',
'iloveyou',
'jennifer',
'jonathan',
'love',
'marina',
'master',
'missy',
'monday',
'monkey',
'natasha',
'ncc1701',
'pamela',
'pepper',
'piglet',
'poohbear',
'pookie',
'rabbit',
'rachel',
'rocket',
'rose',
'smile',
'sparky',
'spring',
'steven',
'success',
'sunshine',
'victoria',
'whatever',
'zapata',
'8675309',
'amanda',
'andy',
'angel',
'august',
'barney',
'biteme',
'boomer',
'brian',
'casey',
'cowboy',
'delta',
'doctor',
'fisher',
'island',
'john',
'joshua',
'karen',
'marley',
'orange',
'please',
'rascal',
'richard',
'sarah',
'scooter',
'shalom',
'silver',
'skippy',
'stanley',
'taylor',
'welcome',
'zephyr',
'111111',
'aaaaaa',
'access',
'albert',
'alexander',
'andrea',
'anna',
'anthony',
'asdfjkl;',
'ashley',
'basketball',
'beavis',
'black',
'bob',
'booboo',
'bradley',
'brandon',
'buddy',
'caitlin',
'camaro',
'charlie',
'chicken',
'chris',
'cindy',
'cricket',
'dakota',
'dallas',
'daniel',
'david',
'debbie',
'dolphin',
'elephant',
'emily',
'friend',
'fucker',
'ginger',
'goodluck',
'hammer',
'heather',
'iceman',
'jason',
'jessica',
'jesus',
'joseph',
'jupiter',
'justin',
'kevin',
'knight',
'lacrosse',
'lakers',
'lizard',
'madison',
'mary',
'mother',
'muffin',
'murphy',
'nirvana',
'paris',
'pentium',
'phoenix',
'picture',
'rainbow',
'sandy',
'saturn',
'scott',
'shannon',
'shithead',
'skeeter',
'sophie',
'special',
'stephanie',
'stephen',
'steve',
'sweetie',
'teacher',
'tennis',
'test',
'test123',
'tommy',
'topgun',
'tristan',
'wally',
'william',
'wilson',
'1q2w3e',
'654321',
'666666',
'a12345',
'a1b2c3d4',
'alpha',
'amber',
'angela',
'angie',
'archie',
'asdf',
'blazer',
'bond007',
'booger',
'charles',
'christin',
'claire',
'control',
'danny',
'david1',
'dennis',
'digital',
'disney',
'edward',
'elvis',
'felix',
'flipper',
'franklin',
'frodo',
'honda',
'horses',
'hunter',
'indigo',
'james',
'jasper',
'jeremy',
'julian',
'kelsey',
'killer',
'lauren',
'marie',
'maryjane',
'matrix',
'maverick',
'mayday',
'mercury',
'mitchell',
'morgan',
'mountain',
'niners',
'nothing',
'oliver',
'peace',
'peanut',
'pearljam',
'phantom',
'popcorn',
'princess',
'psycho',
'pumpkin',
'purple',
'randy',
'rebecca',
'reddog',
'robert',
'rocky',
'roses',
'salmon',
'samson',
'sharon',
'sierra',
'smokey',
'startrek',
'steelers',
'stimpy',
'sunflower',
'superman',
'support',
'sydney',
'techno',
'walter',
'willie',
'willow',
'winner',
'ziggy',
'zxcvbnm',
'alaska',
'alexis',
'alice',
'animal',
'apples',
'barbara',
'benjamin',
'billy',
'blue',
'bluebird',
'bobby',
'bonnie',
'bubba',
'camera',
'chocolate',
'clark',
'claudia',
'cocacola',
'compton',
'connect',
'cookie',
'cruise',
'douglas',
'dreamer',
'dreams',
'duckie',
'eagles',
'eddie',
'einstein',
'enter',
'explorer',
'faith',
'family',
'ferrari',
'flamingo',
'flower',
'foxtrot',
'francis',
'freddy',
'friday',
'froggy',
'giants',
'gizmo',
'global',
'goofy',
'happy1',
'hendrix',
'henry',
'herman',
'homer',
'honey',
'house',
'houston',
'iguana',
'indiana',
'insane',
'inside',
'irish',
'ironman',
'jake',
'jasmin',
'jeanne',
'jerry',
'joey',
'justice',
'katherine',
'kermit',
'kitty',
'koala',
'larry',
'leslie',
'logan',
'lucky',
'mark',
'martin',
'matt',
'minnie',
'misty',
'mitch',
'mouse',
'nancy',
'nascar',
'nelson',
'pantera',
'parker',
'penguin',
'peter',
'piano',
'pizza',
'prince',
'punkin',
'pyramid',
'raymond',
'robin',
'roger',
'rosebud',
'route66',
'royal',
'running',
'sadie',
'sasha',
'security',
'sheena',
'sheila',
'skiing',
'snapple',
'snowball',
'sparrow',
'spencer',
'spike',
'star',
'stealth',
'student',
'sunny',
'sylvia',
'tamara',
'taurus',
'teresa',
'theresa',
'thunderbird',
'tigers',
'tony',
'toyota',
'travel',
'tuesday',
'victory',
'viper1',
'wesley',
'whisky',
'winnie',
'winter',
'wolves',
'xyz123',
'zorro',
'123123',
'1234567',
'696969',
'888888',
'Anthony',
'Joshua',
'Matthew',
'Tigger',
'aaron',
'abby',
'abcdef',
'adidas',
'adrian',
'alfred',
'arthur',
'athena',
'austin',
'awesome',
'badger',
'bamboo',
'beagle',
'bears',
'beatles',
'beautiful',
'beaver',
'benny',
'bigmac',
'bingo',
'bitch',
'blonde',
'boogie',
'boston',
'brenda',
'bright',
'bubba1',
'bubbles',
'buffy',
'button',
'buttons',
'cactus',
'candy',
'captain',
'carlos',
'caroline',
'carrie',
'casper',
'catch22',
'chance',
'charity',
'charlotte',
'cheese',
'cheryl',
'chloe',
'chris1',
'clancy',
'compaq',
'conrad',
'cooper',
'cooter',
'copper',
'cosmos',
'cougar',
'cracker',
'crawford',
'crystal',
'curtis',
'cyclone',
'dance',
'diablo',
'dollars',
'dookie',
'dumbass',
'dundee',
'elizabeth',
'eric',
'europe',
'farmer',
'firebird',
'fletcher',
'fluffy',
'france',
'freak1',
'friends',
'fuckoff',
'gabriel',
'galaxy',
'gambit',
'garden',
'garfield',
'garnet',
'genesis',
'genius',
'godzilla',
'golfer',
'goober',
'grace',
'greenday',
'groovy',
'grover',
'guitar',
'hacker',
'harry',
'hazel',
'hector',
'herbert',
'horizon',
'hornet',
'howard',
'icecream',
'imagine',
'impala',
'jack',
'janice',
'jasmine',
'jason1',
'jeanette',
'jeffrey',
'jenifer',
'jenni',
'jesus1',
'jewels',
'joker',
'julie',
'julie1',
'junior',
'justin1',
'kathleen',
'keith',
'kelly',
'kelly1',
'kennedy',
'kevin1',
'knicks',
'larry1',
'leonard',
'lestat',
'library',
'lincoln',
'lionking',
'london',
'louise',
'lucky1',
'lucy',
'maddog',
'margaret',
'mariposa',
'marlboro',
'martin1',
'marty',
'master1',
'mensuck',
'mercedes',
'metal',
'midori',
'mikey',
'millie',
'mirage',
'molly',
'monet',
'money1',
'monica',
'monopoly',
'mookie',
'moose',
'moroni',
'music',
'naomi',
'nathan',
'nguyen',
'nicholas',
'nicole',
'nimrod',
'october',
'olive',
'olivia',
'online',
'oscar',
'oxford',
'pacific',
'painter',
'peaches',
'penelope',
'pepsi',
'petunia',
'philip',
'phoenix1',
'photo',
'pickle',
'player',
'poiuyt',
'porsche',
'porter',
'puppy',
'python',
'quality',
'raquel',
'raven',
'remember',
'robbie',
'robert1',
'roman',
'rugby',
'runner',
'russell',
'ryan',
'sailing',
'sailor',
'samantha',
'savage',
'scarlett',
'school',
'sean',
'seven',
'shadow1',
'sheba',
'shelby',
'shit',
'shoes',
'simba',
'simple',
'skipper',
'smiley',
'snake',
'snickers',
'sniper',
'snoopdog',
'snowman',
'sonic',
'spitfire',
'sprite',
'spunky',
'starwars',
'station',
'stella',
'stingray',
'storm',
'stormy',
'stupid',
'sunny1',
'sunrise',
'surfer',
'susan',
'tammy',
'tango',
'tanya',
'teddy1',
'theboss',
'theking',
'thumper',
'tina',
'tintin',
'tomcat',
'trebor',
'trevor',
'tweety',
'unicorn',
'valentine',
'valerie',
'vanilla',
'veronica',
'victor',
'vincent',
'viper',
'warrior',
'warriors',
'weasel',
'wheels',
'wilbur',
'winston',
'wisdom',
'wombat',
'xavier',
'yellow',
'zeppelin',
'1111',
'1212',
'Andrew',
'Family',
'Friends',
'Michael',
'Michelle',
'Snoopy',
'abcd1234',
'abcdefg',
'abigail',
'account',
'adam',
'alex1',
'alice1',
'allison',
'alpine',
'andre1',
'andrea1',
'angel1',
'anita',
'annette',
'antares',
'apache',
'apollo',
'aragorn',
'arizona',
'arnold',
'arsenal',
'asdfasdf',
'asdfg',
'asdfghjk',
'avenger',
'baby',
'babydoll',
'bailey',
'banana',
'barry',
'basket',
'batman1',
'beaner',
'beast',
'beatrice',
'bella',
'bertha',
'bigben',
'bigdog',
'biggles',
'bigman',
'binky',
'biology',
'bishop',
'blondie',
'bluefish',
'bobcat',
'bosco',
'braves',
'brazil',
'bruce',
'bruno',
'brutus',
'buffalo',
'bulldog',
'bullet',
'bullshit',
'bunny',
'business',
'butch',
'butler',
'butter',
'california',
'carebear',
'carol',
'carol1',
'carole',
'cassie',
'castle',
'catalina',
'catherine',
'cccccc',
'celine',
'center',
'champion',
'chanel',
'chaos',
'chelsea1',
'chester1',
'chicago',
'chico',
'christian',
'christy',
'church',
'cinder',
'colleen',
'colorado',
'columbia',
'commander',
'connie',
'cookies',
'cooking',
'corona',
'cowboys',
'coyote',
'craig',
'creative',
'cuddles',
'cuervo',
'cutie',
'daddy',
'daisy',
'daniel1',
'danielle',
'davids',
'death',
'denis',
'derek',
'design',
'destiny',
'diana',
'diane',
'dickhead',
'digger',
'dodger',
'donna',
'dougie',
'dragonfly',
'dylan',
'eagle',
'eclipse',
'electric',
'emerald',
'etoile',
'excalibur',
'express',
'fender',
'fiona',
'fireman',
'flash',
'florida',
'flowers',
'foster',
'francesco',
'francine',
'francois',
'frank',
'french',
'fuckface',
'gemini',
'general',
'gerald',
'germany',
'gilbert',
'goaway',
'golden',
'goldfish',
'goose',
'gordon',
'graham',
'grant',
'gregory',
'gretchen',
'gunner',
'hannah',
'harold',
'harrison',
'harvey',
'hawkeye',
'heaven',
'heidi',
'helen',
'helena',
'hithere',
'hobbit',
'ibanez',
'idontknow',
'integra',
'ireland',
'irene',
'isaac',
'isabel',
'jackass',
'jackie',
'jackson',
'jaguar',
'jamaica',
'japan',
'jenny1',
'jessie',
'johan',
'johnny',
'joker1',
'jordan23',
'judith',
'julia',
'jumanji',
'kangaroo',
'karen1',
'kathy',
'keepout',
'keith1',
'kenneth',
'kimberly',
'kingdom',
'kitkat',
'kramer',
'kristen',
'laura',
'laurie',
'lawrence',
'lawyer',
'legend',
'liberty',
'light',
'lindsay',
'lindsey',
'lisa',
'liverpool',
'lola',
'lonely',
'louis',
'lovely',
'loveme',
'lucas',
'madonna',
'malcolm',
'malibu',
'marathon',
'marcel',
'maria1',
'mariah',
'mariah1',
'marilyn',
'mario',
'marvin',
'maurice',
'maxine',
'maxwell',
'me',
'meggie',
'melanie',
'melissa',
'melody',
'mexico',
'michael1',
'michele',
'midnight',
'mike1',
'miracle',
'misha',
'mishka',
'molly1',
'monique',
'montreal',
'moocow',
'moore',
'morris',
'mouse1',
'mulder',
'nautica',
'nellie',
'newton',
'nick',
'nirvana1',
'nissan',
'norman',
'notebook',
'ocean',
'olivier',
'ollie',
'oranges',
'oregon',
'orion',
'panda',
'pandora',
'panther',
'passion',
'patricia',
'pearl',
'peewee',
'pencil',
'penny',
'people',
'percy',
'person',
'peter1',
'petey',
'picasso',
'pierre',
'pinkfloyd',
'polaris',
'police',
'pookie1',
'poppy',
'power',
'predator',
'preston',
'q1w2e3',
'queen',
'queenie',
'quentin',
'ralph',
'random',
'rangers',
'raptor',
'reality',
'redrum',
'remote',
'reynolds',
'rhonda',
'ricardo',
'ricardo1',
'ricky',
'river',
'roadrunner',
'robinhood',
'rocknroll',
'rocky1',
'ronald',
'roxy',
'ruthie',
'sabrina',
'sakura',
'sally',
'sampson',
'samuel',
'sandra',
'santa',
'sapphire',
'scarlet',
'scorpio',
'scott1',
'scottie',
'scruffy',
'seattle',
'serena',
'shanti',
'shark',
'shogun',
'simon',
'singer',
'skull',
'skywalker',
'slacker',
'smashing',
'smiles',
'snowflake',
'snuffy',
'soccer1',
'soleil',
'sonny',
'spanky',
'speedy',
'spider',
'spooky',
'stacey',
'star69',
'start',
'steven1',
'stinky',
'strawberry',
'stuart',
'sugar',
'sundance',
'superfly',
'suzanne',
'suzuki',
'swimmer',
'swimming',
'system',
'taffy',
'tarzan',
'teddy',
'teddybear',
'terry',
'theatre',
'thunder',
'thursday',
'tinker',
'tootsie',
'tornado',
'tracy',
'tricia',
'trident',
'trojan',
'truman',
'trumpet',
'tucker',
'turtle',
'tyler',
'utopia',
'voyager',
'warcraft',
'warlock',
'warren',
'water',
'wayne',
'wendy',
'williams',
'willy',
'winona',
'woody',
'woofwoof',
'wrangler',
'wright',
'xfiles',
'xxxxxx',
'yankees',
'yvonne',
'zebra',
'zenith',
'zigzag',
'zombie',
'zxc123',
'zxcvb',
'000000',
'007007',
'11111',
'11111111',
'123321',
'171717',
'181818',
'1a2b3c',
'1chris',
'4runner',
'54321',
'55555',
'6969',
'7777777',
'789456',
'88888888',
'Alexis',
'Bailey',
'Charlie',
'Chris',
'Daniel',
'Dragon',
'Elizabeth',
'HARLEY',
'Heather',
'Jennifer',
'Jessica',
'Jordan',
'KILLER',
'Nicholas',
'Password',
'Princess',
'Purple',
'Rebecca',
'Robert',
'Shadow',
'Steven',
'Summer',
'Sunshine',
'Superman',
'Taylor',
'Thomas',
'Victoria',
'abcd123',
'abcde',
'accord',
'active',
'africa',
'airborne',
'alfaro',
'alicia',
'aliens',
'alina',
'aline',
'alison',
'allen',
'aloha',
'alpha1',
'althea',
'altima',
'amanda1',
'amazing',
'america',
'amour',
'anderson',
'andre',
'andrew1',
'andromeda',
'angels',
'angie1',
'annie',
'anything',
'apple1',
'apple2',
'applepie',
'april',
'aquarius',
'ariane',
'ariel',
'arlene',
'artemis',
'asdf1234',
'asdfjkl',
'ashley1',
'ashraf',
'ashton',
'asterix',
'attila',
'autumn',
'avatar',
'babes',
'bambi',
'barbie',
'barney1',
'barrett',
'bball',
'beaches',
'beanie',
'beans',
'beauty',
'becca',
'belize',
'belle',
'belmont',
'benji',
'benson',
'bernardo',
'berry',
'betsy',
'betty',
'bigboss',
'bigred',
'billy1',
'birdie',
'birthday',
'biscuit',
'bitter',
'blackjack',
'blah',
'blanche',
'blood',
'blowjob',
'blowme',
'blueeyes',
'blues',
'bogart',
'bombay',
'boobie',
'boots',
'bootsie',
'boxers',
'brandi',
'brent',
'brewster',
'bridge',
'bronco',
'bronte',
'brooke',
'brother',
'bryan',
'bubble',
'buddha',
'budgie',
'burton',
'butterfly',
'byron',
'calendar',
'calvin1',
'camel',
'camille',
'campbell',
'camping',
'cancer',
'canela',
'cannon',
'carbon',
'carnage',
'carolyn',
'carrot',
'cascade',
'catfish',
'cathy',
'catwoman',
'cecile',
'celica',
'change',
'chantal',
'charger',
'cherry',
'chiara',
'chiefs',
'china',
'chris123',
'christ1',
'christmas',
'christopher',
'chuck',
'cindy1',
'cinema',
'civic',
'claude',
'clueless',
'cobain',
'cobra',
'cody',
'colette',
'college',
'colors',
'colt45',
'confused',
'cool',
'corvette',
'cosmo',
'country',
'crusader',
'cunningham',
'cupcake',
'cynthia',
'dagger',
'dammit',
'dancer',
'daphne',
'darkstar',
'darren',
'darryl',
'darwin',
'deborah',
'december',
'deedee',
'deeznuts',
'delano',
'delete',
'demon',
'denise',
'denny',
'desert',
'deskjet',
'detroit',
'devil',
'devine',
'devon',
'dexter',
'dianne',
'diesel',
'director',
'dixie',
'dodgers',
'doggy',
'dollar',
'dolly',
'dominique',
'domino',
'dontknow',
'doogie',
'doudou',
'downtown',
'dragon1',
'driver',
'dude',
'dudley',
'dutchess',
'dwight',
'eagle1',
'easter',
'eastern',
'edith',
'edmund',
'eight',
'element',
'elissa',
'ellen',
'elliot',
'empire',
'enigma',
'enterprise',
'erin',
'escort',
'estelle',
'eugene',
'evelyn',
'explore',
'family1',
'fatboy',
'felipe',
'ferguson',
'ferret',
'ferris',
'fireball',
'fishes',
'fishie',
'flight',
'florida1',
'flowerpot',
'forward',
'freddie',
'freebird',
'freeman',
'frisco',
'fritz',
'froggie',
'froggies',
'frogs',
'fucku',
'future',
'gabby',
'games',
'garcia',
'gaston',
'gateway',
'george1',
'georgia',
'german',
'germany1',
'getout',
'ghost',
'gibson',
'giselle',
'gmoney',
'goblin',
'goblue',
'gollum',
'grandma',
'gremlin',
'grizzly',
'grumpy',
'guess',
'guitar1',
'gustavo',
'haggis',
'haha',
'hailey',
'halloween',
'hamilton',
'hamlet',
'hanna',
'hanson',
'happy123',
'happyday',
'hardcore',
'harley1',
'harriet',
'harris',
'harvard',
'health',
'heart',
'heather1',
'heather2',
'hedgehog',
'helene',
'hello1',
'hello123',
'hellohello',
'hermes',
'heythere',
'highland',
'hilda',
'hillary',
'history',
'hitler',
'hobbes',
'holiday',
'holly',
'honda1',
'hongkong',
'hootie',
'horse',
'hotrod',
'hudson',
'hummer',
'huskies',
'idiot',
'iforget',
'iloveu',
'impact',
'indonesia',
'irina',
'isabelle',
'israel',
'italia',
'italy',
'jackie1',
'jacob',
'jakey',
'james1',
'jamesbond',
'jamie',
'jamjam',
'jeffrey1',
'jennie',
'jenny',
'jensen',
'jesse',
'jesse1',
'jester',
'jethro',
'jimbob',
'jimmy',
'joanna',
'joelle',
'john316',
'jordie',
'jorge',
'josh',
'journey',
'joyce',
'jubilee',
'jules',
'julien',
'juliet',
'junebug',
'juniper',
'justdoit',
'karin',
'karine',
'karma',
'katerina',
'katie',
'katie1',
'kayla',
'keeper',
'keller',
'kendall',
'kenny',
'ketchup',
'kings',
'kissme',
'kitten',
'kittycat',
'kkkkkk',
'kristi',
'kristine',
'labtec',
'laddie',
'ladybug',
'lance',
'laurel',
'lawson',
'leader',
'leland',
'lemon',
'lester',
'letter',
'letters',
'lexus1',
'libra',
'lights',
'lionel',
'little',
'lizzy',
'lolita',
'lonestar',
'longhorn',
'looney',
'loren',
'lorna',
'loser',
'lovers',
'loveyou',
'lucia',
'lucifer',
'lucky14',
'maddie',
'madmax',
'magic1',
'magnum',
'maiden',
'maine',
'management',
'manson',
'manuel',
'marcus',
'maria',
'marielle',
'marine',
'marino',
'marshall',
'martha',
'maxmax',
'meatloaf',
'medical',
'megan',
'melina',
'memphis',
'mermaid',
'miami',
'michel',
'michigan',
'mickey1',
'microsoft',
'mikael',
'milano',
'miles',
'millenium',
'million',
'miranda',
'miriam',
'mission',
'mmmmmm',
'mobile',
'monkey1',
'monroe',
'montana',
'monty',
'moomoo',
'moonbeam',
'morpheus',
'motorola',
'movies',
'mozart',
'munchkin',
'murray',
'mustang1',
'nadia',
'nadine',
'napoleon',
'nation',
'national',
'nestle',
'newlife',
'newyork1',
'nichole',
'nikita',
'nikki',
'nintendo',
'nokia',
'nomore',
'normal',
'norton',
'noway',
'nugget',
'number9',
'numbers',
'nurse',
'nutmeg',
'ohshit',
'oicu812',
'omega',
'openup',
'orchid',
'oreo',
'orlando',
'packard',
'packers',
'paloma',
'pancake',
'panic',
'parola',
'parrot',
'partner',
'pascal',
'patches',
'patriots',
'paula',
'pauline',
'payton',
'peach',
'peanuts',
'pedro1',
'peggy',
'perfect',
'perry',
'peterpan',
'philips',
'phillips',
'phone',
'pierce',
'pigeon',
'pink',
'pioneer',
'piper1',
'pirate',
'pisces',
'playboy',
'pluto',
'poetry',
'pontiac',
'pookey',
'popeye',
'prayer',
'precious',
'prelude',
'premier',
'puddin',
'pulsar',
'pussy',
'pussy1',
'qwert',
'qwerty12',
'qwertyui',
'rabbit1',
'rachelle',
'racoon',
'rambo',
'randy1',
'ravens',
'redman',
'redskins',
'reggae',
'reggie',
'renee',
'renegade',
'rescue',
'revolution',
'richard1',
'richards',
'richmond',
'riley',
'ripper',
'robby',
'roberts',
'rock',
'rocket1',
'rockie',
'rockon',
'roger1',
'rogers',
'roland',
'rommel',
'rookie',
'rootbeer',
'rosie',
'rufus',
'rusty',
'ruthless',
'sabbath',
'sabina',
'safety',
'saint',
'samiam',
'sammie',
'sammy',
'samsam',
'sandi',
'sanjose',
'saphire',
'sarah1',
'saskia',
'sassy',
'saturday',
'science',
'scooby',
'scoobydoo',
'scooter1',
'scorpion',
'scotty',
'scouts',
'search',
'september',
'server',
'seven7',
'sexy',
'shaggy',
'shanny',
'shaolin',
'shasta',
'shayne',
'shelly',
'sherry',
'shirley',
'shorty',
'shotgun',
'sidney',
'simba1',
'sinatra',
'sirius',
'skate',
'skipper1',
'skyler',
'slayer',
'sleepy',
'slider',
'smile1',
'smitty',
'smoke',
'snakes',
'snapper',
'snoop',
'solomon',
'sophia',
'space',
'sparks',
'spartan',
'spike1',
'sponge',
'spurs',
'squash',
'stargate',
'starlight',
'stars',
'steph1',
'steve1',
'stevens',
'stewart',
'stone',
'stranger',
'stretch',
'strong',
'studio',
'stumpy',
'sucker',
'suckme',
'sultan',
'summit',
'sunfire',
'sunset',
'super',
'superstar',
'surfing',
'susan1',
'sutton',
'sweden',
'sweetpea',
'sweety',
'swordfish',
'tabatha',
'tacobell',
'taiwan',
'tamtam',
'tanner',
'target',
'tasha',
'tattoo',
'tequila',
'terry1',
'texas',
'thankyou',
'theend',
'thompson',
'thrasher',
'tiger2',
'timber',
'timothy',
'tinkerbell',
'topcat',
'topher',
'toshiba',
'tototo',
'travis',
'treasure',
'trees',
'tricky',
'trish',
'triton',
'trombone',
'trouble',
'trucker',
'turbo',
'twins',
'tyler1',
'ultimate',
'unique',
'united',
'ursula',
'vacation',
'valley',
'vampire',
'vanessa',
'venice',
'venus',
'vermont',
'vicki',
'vicky',
'victor1',
'vincent1',
'violet',
'violin',
'virgil',
'virginia',
'vision',
'volley',
'voodoo',
'vortex',
'waiting',
'wanker',
'warner',
'water1',
'wayne1',
'webster',
'weezer',
'wendy1',
'western',
'white',
'whitney',
'whocares',
'wildcat',
'william1',
'wilma',
'window',
'winniethepooh',
'wolfgang',
'wolverine',
'wonder',
'xxxxxxxx',
'yamaha',
'yankee',
'yogibear',
'yolanda',
'yomama',
'yvette',
'zachary',
'zebras',
'zxcvbn',
'00000000',
'121212',
'1234qwer',
'131313',
'13579',
'90210',
'99999999',
'ABC123',
'action',
'amelie',
'anaconda',
'apollo13',
'artist',
'asshole',
'benoit',
'bernard',
'bernie',
'bigbird',
'blizzard',
'bluesky',
'bonjour',
'caesar',
'cardinal',
'carolina',
'cesar',
'chandler',
'chapman',
'charlie1',
'chevy',
'chiquita',
'chocolat',
'coco',
'cougars',
'courtney',
'dolphins',
'dominic',
'donkey',
'dusty',
'eminem',
'energy',
'fearless',
'forest',
'forever',
'glenn',
'guinness',
'hotdog',
'indian',
'jared',
'jimbo',
'johnson',
'jojo',
'josie',
'kristin',
'lloyd',
'lorraine',
'lynn',
'maxime',
'memory',
'mimi',
'mirror',
'nebraska',
'nemesis',
'network',
'nigel',
'oatmeal',
'patton',
'pedro',
'planet',
'players',
'portland',
'praise',
'psalms',
'qwaszx',
'raiders',
'rambo1',
'rancid',
'shawn',
'shelley',
'softball',
'speedo',
'sports',
'ssssss',
'steele',
'steph',
'stephani',
'sunday',
'tiffany',
'tigre',
'toronto',
'trixie',
'undead',
'valentin',
'velvet',
'viking',
'walker',
'watson',
'young',
'babygirl',
'pretty',
'hottie',
'teamo',
'987654321',
'naruto',
'spongebob',
'daniela',
'princesa',
'christ',
'blessed',
'single',
'qazwsx',
'pokemon',
'iloveyou1',
'iloveyou2',
'fuckyou1',
'hahaha',
'poop',
'blessing',
'blahblah',
'blink182',
'123qwe',
'trinity',
'passw0rd',
'google',
'looking',
'spirit',
'iloveyou!',
'qwerty1',
'onelove',
'mylove',
'222222',
'ilovegod',
'football1',
'loving',
'emmanuel',
'1q2w3e4r',
'red123',
'blabla',
'112233',
'hallo',
'spiderman',
'simpsons',
'monster',
'november',
'brooklyn',
'poopoo',
'darkness',
'159753',
'pineapple',
'chester',
'1qaz2wsx',
'drowssap',
'monkey12',
'wordpass',
'q1w2e3r4',
'coolness',
'11235813',
'something',
'alexandra',
'estrella',
'miguel',
'iloveme',
'sayang',
'princess1',
'555555',
'999999',
'alejandro',
'brittany',
'alejandra',
'tequiero',
'antonio',
'987654',
'00000',
'fernando',
'corazon',
'cristina',
'kisses',
'myspace',
'rebelde',
'babygurl',
'alyssa',
'mahalkita',
'gabriela',
'pictures',
'hellokitty',
'babygirl1',
'angelica',
'mahalko',
'mariana',
'eduardo',
'andres',
'ronaldo',
'inuyasha',
'adriana',
'celtic',
'samsung',
'angelo',
'456789',
'sebastian',
'karina',
'hotmail',
'0123456789',
'barcelona',
'cameron',
'slipknot',
'cutiepie',
'50cent',
'bonita',
'maganda',
'babyboy',
'natalie',
'cuteako',
'javier',
'789456123',
'123654',
'bowwow',
'portugal',
'777777',
'volleyball',
'january',
'cristian',
'bianca',
'chrisbrown',
'101010',
'sweet',
'panget',
'benfica',
'love123',
'lollipop',
'camila',
'qwertyuiop',
'harrypotter',
'ihateyou',
'christine',
'lorena',
'andreea',
'charmed',
'rafael',
'brianna',
'aaliyah',
'johncena',
'lovelove',
'gangsta',
'333333',
'hiphop',
'mybaby',
'sergio',
'metallica',
'myspace1',
'babyblue',
'badboy',
'fernanda',
'westlife',
'sasuke',
'steaua',
'roberto',
'slideshow',
'asdfghjkl',
'santiago',
'jayson',
'5201314',
'jerome',
'gandako',
'gatita',
'babyko',
'246810',
'sweetheart',
'chivas',
'alberto',
'valeria',
'nicole1',
'12345678910',
'leonardo',
'jayjay',
'liliana',
'sexygirl',
'232323',
'amores',
'anthony1',
'bitch1',
'fatima',
'miamor',
'lover',
'lalala',
'252525',
'skittles',
'colombia',
'159357',
'manutd',
'123456a',
'britney',
'katrina',
'christina',
'pasaway',
'mahal',
'tatiana',
'cantik',
'0123456',
'teiubesc',
'147258369',
'natalia',
'francisco',
'amorcito',
'paola',
'angelito',
'manchester',
'mommy1',
'147258',
'amigos',
'marlon',
'linkinpark',
'147852',
'diego',
'444444',
'iverson',
'andrei',
'justine',
'frankie',
'pimpin',
'fashion',
'bestfriend',
'england',
'hermosa',
'456123',
'102030',
'sporting',
'hearts',
'potter',
'iloveu2',
'number1',
'212121',
'truelove',
'jayden',
'savannah',
'hottie1',
'ganda',
'scotland',
'ilovehim',
'shakira',
'estrellita',
'brandon1',
'sweets',
'familia',
'love12',
'omarion',
'monkeys',
'loverboy',
'elijah',
'ronnie',
'mamita',
'999999999',
'broken',
'rodrigo',
'westside',
'mauricio',
'amigas',
'preciosa',
'shopping',
'flores',
'isabella',
'martinez',
'elaine',
'friendster',
'cheche',
'gracie',
'connor',
'valentina',
'darling',
'santos',
'joanne',
'fuckyou2',
'pebbles',
'sunshine1',
'gangster',
'gloria',
'darkangel',
'bettyboop',
'jessica1',
'cheyenne',
'dustin',
'iubire',
'a123456',
'purple1',
'bestfriends',
'inlove',
'batista',
'karla',
'chacha',
'marian',
'sexyme',
'pogiako',
'jordan1',
'010203',
'daddy1',
'daddysgirl',
'billabong',
'pinky',
'erika',
'skater',
'nenita',
'tigger1',
'gatito',
'lokita',
'maldita',
'buttercup',
'bambam',
'glitter',
'123789',
'sister',
'zacefron',
'tokiohotel',
'loveya',
'lovebug',
'bubblegum',
'marissa',
'cecilia',
'lollypop',
'nicolas',
'puppies',
'ariana',
'chubby',
'sexybitch',
'roxana',
'mememe',
'susana',
'baller',
'hotstuff',
'carter',
'babylove',
'angelina',
'playgirl',
'sweet16',
'012345',
'bhebhe',
'marcos',
'loveme1',
'milagros',
'lilmama',
'beyonce',
'lovely1',
'catdog',
'armando',
'margarita',
'151515',
'loves',
'202020',
'gerard',
'undertaker',
'amistad',
'capricorn',
'delfin',
'cheerleader',
'password2',
'PASSWORD',
'lizzie',
'matthew1',
'enrique',
'badgirl',
'141414',
'dancing',
'cuteme',
'amelia',
'skyline',
'angeles',
'janine',
'carlitos',
'justme',
'legolas',
'michelle1',
'cinderella',
'jesuschrist',
'ilovejesus',
'tazmania',
'tekiero',
'thebest',
'princesita',
'lucky7',
'jesucristo',
'buddy1',
'regina',
'myself',
'lipgloss',
'jazmin',
'rosita',
'chichi',
'pangit',
'mierda',
'741852963',
'hernandez',
'arturo',
'silvia',
'melvin',
'celeste',
'pussycat',
'gorgeous',
'honeyko',
'mylife',
'babyboo',
'loveu',
'lupita',
'panthers',
'hollywood',
'alfredo',
'musica',
'hawaii',
'sparkle',
'kristina',
'sexymama',
'crazy',
'scarface',
'098765',
'hayden',
'micheal',
'242424',
'0987654321',
'marisol',
'jeremiah',
'mhine',
'isaiah',
'lolipop',
'butterfly1',
'xbox360',
'madalina',
'anamaria',
'yourmom',
'jasmine1',
'bubbles1',
'beatriz',
'diamonds',
'friendship',
'sweetness',
'desiree',
'741852',
'hannah1',
'bananas',
'julius',
'leanne',
'marie1',
'lover1',
'twinkle',
'february',
'bebita',
'87654321',
'twilight',
'imissyou',
'pollito',
'ashlee',
'cookie1',
'147852369',
'beckham',
'simone',
'nursing',
'torres',
'damian',
'123123123',
'joshua1',
'babyface',
'dinamo',
'mommy',
'juliana',
'cassandra',
'redsox',
'gundam',
'0000',
'ou812',
'dave',
'golf',
'molson',
'Monday',
'newpass',
'thx1138',
'1',
'Internet',
'coke',
'foobar',
'abc',
'fish',
'fred',
'help',
'ncc1701d',
'newuser',
'none',
'pat',
'dog',
'duck',
'duke',
'floyd',
'guest',
'joe',
'kingfish',
'micro',
'sam',
'telecom',
'test1',
'7777',
'absolut',
'babylon5',
'backup',
'bill',
'bird33',
'deliver',
'fire',
'flip',
'galileo',
'gopher',
'hansolo',
'jane',
'jim',
'mom',
'passwd',
'phil',
'phish',
'porsche911',
'rain',
'red',
'sergei',
'training',
'truck',
'video',
'volvo',
'007',
'1969',
'5683',
'Bond007',
'Friday',
'Hendrix',
'October',
'Taurus',
'aaa',
'alexandr',
'catalog',
'challenge',
'clipper',
'coltrane',
'cyrano',
'dan',
'dawn',
'dean',
'deutsch',
'dilbert',
'e-mail',
'export',
'ford',
'fountain',
'fox',
'frog',
'gabriell',
'garlic',
'goforit',
'grateful',
'hoops',
'lady',
'ledzep',
'lee',
'mailman',
'mantra',
'market',
'mazda1',
'metallic',
'ncc1701e',
'nesbitt',
'open',
'pete',
'quest',
'republic',
'research',
'supra',
'tara',
'testing',
'xanadu',
'xxxx',
'zaphod',
'zeus',
'0007',
'1022',
'10sne1',
'1973',
'1978',
'2000',
'2222',
'3bears',
'Broadway',
'Fisher',
'Jeanne',
'Killer',
'Knight',
'Master',
'Pepper',
'Sierra',
'Tennis',
'abacab',
'abcd',
'ace',
'acropolis',
'amy',
'anders',
'avenir',
'basil',
'bass',
'beer',
'ben',
'bliss',
'blowfish',
'boss',
'bridges',
'buck',
'bugsy',
'bull',
'cannondale',
'canon',
'catnip',
'chip',
'civil',
'content',
'cook',
'cordelia',
'crack1',
'cyber',
'daisie',
'dark1',
'database',
'deadhead',
'denali',
'depeche',
'dickens',
'emmitt',
'entropy',
'farout',
'farside',
'feedback',
'fidel',
'firenze',
'fish1',
'fletch',
'fool',
'fozzie',
'fun',
'gargoyle',
'gasman',
'gold',
'graphic',
'hell',
'image',
'intern',
'intrepid',
'jeff',
'jkl123',
'joel',
'johanna1',
'kidder',
'kim',
'king',
'kirk',
'kris',
'lambda',
'leon',
'logical',
'lorrie',
'major',
'mariner',
'mark1',
'max',
'media',
'merlot',
'midway',
'mine',
'mmouse',
'moon',
'mopar',
'mortimer',
'nermal',
'nina',
'olsen',
'opera',
'overkill',
'pacers',
'packer',
'picard',
'polar',
'polo',
'primus',
'prometheus',
'public',
'radio',
'rastafarian',
'reptile',
'rob',
'robotech',
'rodeo',
'rolex',
'rouge',
'roy',
'ruby',
'salasana',
'scarecrow',
'scout',
'scuba1',
'sergey',
'skibum',
'skunk',
'sound',
'starter',
'sting1',
'sunbird',
'tbird',
'teflon',
'temporal',
'terminal',
'the',
'thejudge',
'time',
'toby',
'today',
'tokyo',
'tree',
'trout',
'vader',
'val',
'valhalla',
'windsurf',
'wolf',
'wolf1',
'xcountry',
'yoda',
'yukon',
'1213',
'1214',
'1225',
'1313',
'1818',
'1975',
'1977',
'1991',
'1kitty',
'2001',
'2020',
'2112',
'2kids',
'333',
'4444',
'5050',
'57chevy',
'7dwarfs',
'Animals',
'Ariel',
'Bismillah',
'Booboo',
'Boston',
'Carol',
'Computer',
'Creative',
'Curtis',
'Denise',
'Eagles',
'Esther',
'Fishing',
'Freddy',
'Gandalf',
'Golden',
'Goober',
'Hacker',
'Harley',
'Henry',
'Hershey',
'Jackson',
'Jersey',
'Joanna',
'Johnson',
'Katie',
'Kitten',
'Liberty',
'Lindsay',
'Lizard',
'Madeline',
'Margaret',
'Maxwell',
'Money',
'Monster',
'Pamela',
'Peaches',
'Peter',
'Phoenix',
'Piglet',
'Pookie',
'Rabbit',
'Raiders',
'Random',
'Russell',
'Sammy',
'Saturn',
'Skeeter',
'Smokey',
'Sparky',
'Speedy',
'Sterling',
'Theresa',
'Thunder',
'Vincent',
'Willow',
'Winnie',
'Wolverine',
'aaaa',
'aardvark',
'abbott',
'acura',
'admin',
'admin1',
'adrock',
'aerobics',
'agent',
'airwolf',
'ali',
'alien',
'allegro',
'allstate',
'altamira',
'altima1',
'andrew!',
'ann',
'anne',
'anneli',
'aptiva',
'arrow',
'asdf;lkj',
'assmunch',
'baraka',
'barnyard',
'bart',
'bartman',
'beasty',
'beavis1',
'bebe',
'belgium',
'beowulf',
'beryl',
'best',
'bharat',
'bichon',
'bigal',
'biker',
'bilbo',
'bills',
'bimmer',
'biochem',
'birdy',
'blinds',
'blitz',
'bluejean',
'bogey',
'bogus',
'boulder',
'bourbon',
'boxer',
'brain',
'branch',
'britain',
'broker',
'bucks',
'buffett',
'bugs',
'bulls',
'burns',
'buzz',
'c00per',
'calgary',
'camay',
'carl',
'cat',
'cement',
'cessna',
'chad',
'chainsaw',
'chameleon',
'chang',
'chess',
'chinook',
'chouette',
'chronos',
'cicero',
'circuit',
'cirque',
'cirrus',
'clapton',
'clarkson',
'class',
'claudel',
'cleo',
'cliff',
'clock',
'color',
'comet',
'concept',
'concorde',
'coolbean',
'corky',
'cornflake',
'corwin',
'cows',
'crescent',
'cross',
'crowley',
'cthulhu',
'cunt',
'current',
'cutlass',
'daedalus',
'dagger1',
'daily',
'dale',
'dana',
'daytek',
'dead',
'decker',
'dharma',
'dillweed',
'dipper',
'disco',
'dixon',
'doitnow',
'doors',
'dork',
'doug',
'dutch',
'effie',
'ella',
'elsie',
'engage',
'eric1',
'ernie1',
'escort1',
'excel',
'faculty',
'fairview',
'faust',
'fenris',
'finance',
'first',
'fishhead',
'flanders',
'fleurs',
'flute',
'flyboy',
'flyer',
'franka',
'frederic',
'free',
'front242',
'frontier',
'fugazi',
'funtime',
'gaby',
'gaelic',
'gambler',
'gammaphi',
'garfunkel',
'garth',
'gary',
'gateway2',
'gator1',
'gibbons',
'gigi',
'gilgamesh',
'goat',
'godiva',
'goethe',
'gofish',
'good',
'gramps',
'gravis',
'gray',
'greed',
'greg',
'greg1',
'greta',
'gretzky',
'guido',
'gumby',
'h2opolo',
'hamid',
'hank',
'hawkeye1',
'health1',
'hello8',
'help123',
'helper',
'homerj',
'hoosier',
'hope',
'huang',
'hugo',
'hydrogen',
'ib6ub9',
'insight',
'instructor',
'integral',
'iomega',
'iris',
'izzy',
'jazz',
'jean',
'jeepster',
'jetta1',
'joanie',
'josee',
'joy',
'julia2',
'jumbo',
'jump',
'justice4',
'kalamazoo',
'kali',
'kat',
'kate',
'kerala',
'kids',
'kiwi',
'kleenex',
'kombat',
'lamer',
'laser',
'laserjet',
'lassie1',
'leblanc',
'legal',
'leo',
'life',
'lions',
'liz',
'logger',
'logos',
'loislane',
'loki',
'longer',
'lori',
'lost',
'lotus',
'lou',
'macha',
'macross',
'madoka',
'makeitso',
'mallard',
'marc',
'math',
'mattingly',
'mechanic',
'meister',
'mercer',
'merde',
'merrill',
'michal',
'michou',
'mickel',
'minou',
'mobydick',
'modem',
'mojo',
'montana3',
'montrose',
'motor',
'mowgli',
'mulder1',
'muscle',
'neil',
'neutrino',
'newaccount',
'nicklaus',
'nightshade',
'nightwing',
'nike',
'none1',
'nopass',
'nouveau',
'novell',
'oaxaca',
'obiwan',
'obsession',
'orville',
'otter',
'ozzy',
'packrat',
'paint',
'papa',
'paradigm',
'pass',
'pavel',
'peterk',
'phialpha',
'phishy',
'piano1',
'pianoman',
'pianos',
'pipeline',
'plato',
'play',
'poetic',
'print',
'printing',
'provider',
'qqq111',
'quebec',
'qwer',
'racer',
'racerx',
'radar',
'rafiki',
'raleigh',
'rasta1',
'redcloud',
'redfish',
'redwing',
'redwood',
'reed',
'rene',
'reznor',
'rhino',
'ripple',
'rita',
'robocop',
'robotics',
'roche',
'roni',
'rossignol',
'rugger',
'safety1',
'saigon',
'satori',
'saturn5',
'schnapps',
'scotch',
'scuba',
'secret3',
'seeker',
'services',
'sex',
'shanghai',
'shazam',
'shelter',
'sigmachi',
'signal',
'signature',
'simsim',
'skydive',
'slick',
'smegma',
'smiths',
'smurfy',
'snow',
'sober1',
'sonics',
'sony',
'spazz',
'sphynx',
'spock',
'spoon',
'spot',
'sprocket',
'starbuck',
'steel',
'stephi',
'sting',
'stocks',
'storage',
'strat',
'strato',
'stud',
'student2',
'susanna',
'swanson',
'swim',
'switzer',
'system5',
't-bone',
'talon',
'tarheel',
'tata',
'tazdevil',
'tester',
'testtest',
'thisisit',
'thorne',
'tightend',
'tim',
'tom',
'tool',
'total',
'toucan',
'transfer',
'transit',
'transport',
'trapper',
'trash',
'trophy',
'tucson',
'turbo2',
'unity',
'upsilon',
'vedder',
'vette',
'vikram',
'virago',
'visual',
'volcano',
'walden',
'waldo',
'walleye',
'webmaster',
'wedge',
'whale1',
'whit',
'whoville',
'wibble',
'will',
'wombat1',
'word',
'world',
'x-files',
'xxx123',
'zack',
'zepplin',
'zoltan',
'zoomer',
'123go',
'21122112',
'5555',
'911',
'FuckYou',
'Fuckyou',
'Gizmo',
'Hello',
'Michel',
'Qwerty',
'Windows',
'angus',
'aspen',
'ass',
'bird',
'booster',
'byteme',
'cats',
'changeit',
'christia',
'christoph',
'classroom',
'cloclo',
'corrado',
'dasha',
'fiction',
'french1',
'fubar',
'gator',
'gilles',
'gocougs',
'hilbert',
'hola',
'home',
'judy',
'koko',
'lulu',
'mac',
'macintosh',
'mailer',
'mars',
'meow',
'ne1469',
'niki',
'paul',
'politics',
'pomme',
'property',
'ruth',
'sales',
'salut',
'scrooge',
'skidoo',
'spain',
'surf',
'sylvie',
'symbol',
'forum',
'rotimi',
'god',
'saved',
'2580',
'1998',
'xxx',
'1928',
'777',
'info',
'a',
'netware',
'sun',
'tech',
'doom',
'mmm',
'one',
'ppp',
'1911',
'1948',
'1996',
'5252',
'Champs',
'Tuesday',
'bach',
'crow',
'don',
'draft',
'hal9000',
'herzog',
'huey',
'jethrotull',
'jussi',
'mail',
'miki',
'nicarao',
'snowski',
'1316',
'1412',
'1430',
'1952',
'1953',
'1955',
'1956',
'1960',
'1964',
'1qw23e',
'22',
'2200',
'2252',
'3010',
'3112',
'4788',
'6262',
'Alpha',
'Bastard',
'Beavis',
'Cardinal',
'Celtics',
'Cougar',
'Darkman',
'Figaro',
'Fortune',
'Geronimo',
'Hammer',
'Homer',
'Janet',
'Mellon',
'Merlot',
'Metallic',
'Montreal',
'Newton',
'Paladin',
'Peanuts',
'Service',
'Vernon',
'Waterloo',
'Webster',
'aki123',
'aqua',
'aylmer',
'beta',
'bozo',
'car',
'chat',
'chinacat',
'cora',
'courier',
'dogbert',
'eieio',
'elina1',
'fly',
'funguy',
'fuzz',
'ggeorge',
'glider1',
'gone',
'hawk',
'heikki',
'histoire',
'hugh',
'if6was9',
'ingvar',
'jan',
'jedi',
'jimi',
'juhani',
'khan',
'lima',
'midvale',
'neko',
'nesbit',
'nexus6',
'nisse',
'notta1',
'pam',
'park',
'pole',
'pope',
'pyro',
'ram',
'reliant',
'rex',
'rush',
'seoul',
'skip',
'stan',
'sue',
'suzy',
'tab',
'testi',
'thelorax',
'tika',
'tnt',
'toto1',
'tre',
'wind',
'x-men',
'xyz',
'zxc',
'369',
'Abcdef',
'Asdfgh',
'Changeme',
'NCC1701',
'Zxcvbnm',
'demo',
'doom2',
'e',
'good-luck',
'homebrew',
'm1911a1',
'nat',
'ne1410s',
'ne14a69',
'zhongguo',
'sample123',
'0852',
'basf',
'OU812',
'!@#$%',
'informix',
'majordomo',
'news',
'temp',
'trek',
'!@#$%^',
'!@#$%^&*',
'Pentium',
'Raistlin',
'adi',
'bmw',
'law',
'm',
'new',
'opus',
'plus',
'visa',
'www',
'y',
'zzz',
'1332',
'1950',
'3141',
'3533',
'4055',
'4854',
'6301',
'Bonzo',
'ChangeMe',
'Front242',
'Gretel',
'Michel1',
'Noriko',
'Sidekick',
'Sverige',
'Swoosh',
'Woodrow',
'aa',
'ayelet',
'barn',
'betacam',
'biz',
'boat',
'cuda',
'doc',
'hal',
'hallowell',
'haro',
'hosehead',
'i',
'ilmari',
'irmeli',
'j1l2t3',
'jer',
'kcin',
'kerrya',
'kissa2',
'leaf',
'lissabon',
'mart',
'matti1',
'mech',
'morecats',
'paagal',
'performa',
'prof',
'ratio',
'ship',
'slip',
'stivers',
'tapani',
'targas',
'test2',
'test3',
'tula',
'unix',
'user1',
'xanth',
'!@#$%^&',
'1701d',
'@#$%^&',
'Qwert',
'allo',
'dirk',
'go',
'newcourt',
'nite',
'notused',
'sss']
def FormatTime(epoch=None):
if epoch == None:
epoch = time.time()
return '%04d%02d%02d-%02d%02d%02d' % time.localtime(epoch)[0:6]
def RemoveQuotes(word):
if len(word) < 3:
return ''
if word[0] != word[-1]:
return ''
if not word[0] in ['"', "'"]:
return ''
return word[1:-1]
def DeduplicateAndPreserveOrder(list):
result = []
for element in list:
if not element in result:
result.append(element)
return result
def Unquoted(list):
return [element for element in [RemoveQuotes(element) for element in list] if element != '']
def ExtractPasswords(filename):
words = [word for line in File2Strings(filename) for word in line.split(' ') if len(word) > 0]
probablyPasswords = []
for index in range(len(words)):
if words[index].lower() == 'password':
probablyPasswords.extend(words[index+1:index+5])
return DeduplicateAndPreserveOrder(Unquoted(probablyPasswords) + probablyPasswords + Unquoted(words) + words)
def ApplyRules(passwords):
result = []
for password in passwords:
result.append(password)
result.append(password.swapcase())
return result
def Crack(filename, options):
if filename == '':
IfWIN32SetBinary(sys.stdin)
if hasattr(sys.stdin, 'buffer'): # For Python 2
oDataIO = DataIO(sys.stdin.buffer.read())
else:
oDataIO = DataIO(sys.stdin.read())
elif filename.lower().endswith('.zip'):
oZipfile = zipfile.ZipFile(filename, 'r')
oZipContent = oZipfile.open(oZipfile.infolist()[0], 'r', C2BIP3(options.password))
oDataIO = DataIO(oZipContent.read())
oZipContent.close()
oZipfile.close()
else:
oDataIO = DataIO(open(filename, 'rb').read())
file = msoffcrypto.OfficeFile(oDataIO)
if options.crackedpassword == '':
if options.extractpasswords == '':
passwords = GetDictionary(options.passwordlist, os.path.basename(filename))
else:
passwords = ExtractPasswords(options.extractpasswords)
if options.rules:
passwords = ApplyRules(passwords)
total = len(passwords)
starttime = time.time()
for index, password in enumerate(passwords):
if index == 0:
eta = ''
else:
seconds = int(float((time.time() - starttime) / float(index)) * float(total - index))
eta = 'estimation %d seconds left, finished %s' % (seconds, FormatTime(time.time() + seconds))
if (index + 1) % 100 == 0 and options.output != '-':
print('%d/%d %s' % (index + 1, total, eta))
try:
file.load_key(password=password)
file.decrypt(DataIO())
except KeyboardInterrupt:
raise
except:
continue
if options.output != '-':
print('Password found: %s' % password)
break
else:
file.load_key(password=options.crackedpassword)
if options.output == '':
pass
elif options.output == '-':
IfWIN32SetBinary(sys.stdout)
if hasattr(sys.stdout, 'buffer'): # For Python 2
file.decrypt(sys.stdout.buffer)
else:
file.decrypt(sys.stdout)
else:
file.decrypt(open(options.output, 'wb'))
def Main():
moredesc = '''
Source code put in the public domain by Didier Stevens, no Copyright
Use at your own risk
https://DidierStevens.com'''
oParser = optparse.OptionParser(usage='usage: %prog [options] [file]\n' + __description__ + moredesc, version='%prog ' + __version__)
oParser.add_option('-m', '--man', action='store_true', default=False, help='Print manual')
oParser.add_option('-p', '--passwordlist', default='', help='The password list to use')
oParser.add_option('-e', '--extractpasswords', default='', help='A text file to extract passwords from')
oParser.add_option('-c', '--crackedpassword', default='', help='The password to use')
oParser.add_option('-r', '--rules', action='store_true', default=False, help='Apply password rules')
oParser.add_option('--password', default=MALWARE_PASSWORD, help='The ZIP password to be used for the malware ZIP container (default %s)' % MALWARE_PASSWORD)
oParser.add_option('-o', '--output', default='', help='Output filename for decrypted file (- for stdout)')
(options, args) = oParser.parse_args()
if options.man:
oParser.print_help()
PrintManual()
return
if len(args) == 0:
Crack('', options)
elif len(args) == 1:
Crack(args[0], options)
else:
oParser.print_help()
return
if __name__ == '__main__':
Main()
| [
"[email protected]"
] | |
c688fdb0b0df6f828c608726a6a2344e9ea59346 | 3d4cd68400eb5429282b23bf6a1b7226851b731a | /spddo/micro/func/format_date.py | c89fe79b54b1ef99441575344c516189548b9b64 | [] | no_license | blueshed/spddo-chat | 2dee16478e9a30ed0196d76d450e0772147aa208 | d7ba492162ba95c0d2b8ed78370366eb96e39c3a | refs/heads/master | 2021-01-17T11:29:48.959086 | 2016-05-21T18:33:08 | 2016-05-21T18:33:08 | 43,005,167 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | py | from blueshed.micro.utils.date_utils import parse_date
def format_date(date: str, date_format: str='%b, %d %Y'):
'''
Expects date to be in the format: %Y-%m-%dT%H:%M:%S.%fZ
or just: "%Y-%m-%d"
'''
date_value = parse_date(date)
return date_value.strftime(date_format)
| [
"[email protected]"
] | |
fbcf6403ced9e1507e9f51fb874d1f4e9f3b6e13 | 7d4d6dc3c897ec7c297bb67f30c3f4e39509b250 | /Python/DailyFlash/28feb2020/MySolutions/program1.py | ab5f70d98b97f7e7edad5810c80f531aef298d58 | [] | no_license | kumbharswativ/Core2Web | 48a6ec0275466f4179c502097b1314d04a29e63e | 60949e5461ef103a4ad2c7c39ee9be0be101ec11 | refs/heads/master | 2022-12-24T06:11:45.096063 | 2020-08-09T12:04:07 | 2020-08-09T12:09:13 | 286,219,590 | 0 | 1 | null | 2022-12-11T10:57:50 | 2020-08-09T11:02:18 | Python | UTF-8 | Python | false | false | 686 | py | '''
Write a Program that prints whether a number entered by user is
Disarium Number or not.
{Note: A number can be termed as Disarium number if the sum of every digits
raised by their position in that number is equal to that number. E.g. 135, 1 is at
position 1, 3 is at position 2 & 5 is at position 3, then 1^1 + 3^2 + 5^3 = 1 + 9 +
125 = 135, so 135 is a Disarium Number}
Input: 89
Output: 89 is a Disarium Number.
'''
list1=[]
z=int(input("Input:"))
num=z
while(z>0):
u=z%10
list1.append(u)
z=z//10
list1.reverse()
n=1
x=len(list1)
sum=0
for i in range(x):
sum=sum+list1[i]**n
n=n+1
if(num==sum):
print(num,"is disarium number")
else:
print(num,"is not disarium number")
| [
"“[email protected]”"
] | |
dfa30e0fd9fbef00668d7f2b5968d50393b8aa2a | df6ec5d6f5c6beb30c1f4bb6a4c2969e2ef25c31 | /ngo_npo_profile/migrations/0001_initial.py | c086b0111ba7c3edcb1d71d260e8259dcead1fcd | [] | no_license | heritiermwalila/wecanchangetheworld | 0f820f89878107b002c10fa724a39d025e6a5cfc | 30c48a02d78d366afe6739606b342f6bcefcd576 | refs/heads/master | 2023-05-01T20:24:29.925947 | 2019-06-05T09:21:41 | 2019-06-05T09:21:41 | 190,364,316 | 0 | 0 | null | 2023-04-21T20:33:04 | 2019-06-05T09:16:51 | JavaScript | UTF-8 | Python | false | false | 2,141 | py | # Generated by Django 2.2.1 on 2019-06-04 19:06
import ckeditor_uploader.fields
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Organisation',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(help_text='Organisation name', max_length=100)),
('slug', models.SlugField(unique=True)),
('email', models.EmailField(blank=True, help_text='Organisation Email', max_length=254)),
('phone', models.CharField(blank=True, max_length=50)),
('ceo', models.CharField(blank=True, max_length=50)),
('logo', models.FileField(default='static/images/noprofile.png', upload_to='ngo-npo/', verbose_name='Profile logo')),
('expect', models.TextField(blank=True, help_text='Expect text', max_length=255)),
('website_url', models.URLField(blank=True, help_text='website Address')),
('background_image', models.FileField(default='static/images/defaultbg.jpg', upload_to='ngo-npo/')),
('date_posted', models.DateTimeField(default=django.utils.timezone.now)),
],
options={
'verbose_name': 'Organisation',
'verbose_name_plural': 'Organisations',
},
),
migrations.CreateModel(
name='Page',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('slug', models.SlugField()),
('content', ckeditor_uploader.fields.RichTextUploadingField(blank=True)),
('organisation', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='ngo_npo_profile.Organisation')),
],
),
]
| [
"[email protected]"
] | |
d89bf8a620d9998a3cd783110496cbf721c4ee6f | 6da19be45ff986768eb820f11691977cb3c84772 | /Python/3_DB_interactions/303_Book_store_project_with_json_format_file_for_storage/main_logic.py | 5b9f54629f33cf31d9cd4b915529561295178d45 | [] | no_license | alexp01/trainings | 9e72f3a571292b79d2b1518f564d2dc0a774ef41 | 9d8daee16f15e0d7851fab12ab3d2505386a686c | refs/heads/master | 2023-05-04T23:37:13.243691 | 2023-05-02T08:02:53 | 2023-05-02T08:02:53 | 272,425,687 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,058 | py | """
API with logic for file storage in JSON format
"""
import json
file_to_store = 'database.json'
print (file_to_store)
def check_if_file_exists():
with open(file_to_store, 'w'): # this will create the file in case it does not exist
pass
def read_from_file():
with open(file_to_store, 'r') as file:
books = json.load(file)
return books
def add_a_book(name,author):
books = read_from_file()
books.append(dict([('name', name), ('author', author), ('read', '0')]))
# We can skip the dict and just add a dict as an list element like : {'name' = name, 'author' = author, 'read' = '0' }
write_to_file(books)
def mark_as_read(name):
books = read_from_file()
for book in books:
if book['name'] == name:
book['read'] = '1'
write_to_file(books)
def delete_a_book(name):
books = read_from_file()
books = [book for book in books if book['name'] != name]
write_to_file(books)
def write_to_file(books):
with open(file_to_store, 'w') as file:
json.dump(books,file)
| [
"[email protected]"
] | |
8356db6b9815a42116fb0868e62babfe13a5daa7 | 221d1ad342677d2fac8aa3f8d5c60e059a6316c9 | /pm4py/algo/discovery/heuristics/__init__.py | ba5a92db4c2ebcbf19c175b258658a4d211042c4 | [] | no_license | niklasadams/explainable_concept_drift_pm | 06ff651fbdebece4adf96f94bfb4d1026da14c48 | 6bf84d727ab0bae76716a04ad28c7de73250c89d | refs/heads/main | 2023-08-26T18:21:49.955080 | 2021-10-29T18:53:48 | 2021-10-29T18:53:48 | 314,514,571 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 64 | py | from pm4py.algo.discovery.heuristics import variants, algorithm
| [
"[email protected]"
] | |
47901753475b364c0fa61c68da06ac7dd0ce77d0 | 4178f2916d2da72cbb45454fbed941dcfe8f6460 | /POM_test/TestCase/Predict/TC_007.py | d31487df7537765b6187688a9253b66573b8e426 | [] | no_license | maxcrup007/Selenium_Webdriver_Python | 15196cb04ba5cafdc5b776c26d167f0b48fb0e14 | 6be7f0b9f53df1ba592957029e8a4d22e409d1c4 | refs/heads/main | 2023-03-24T21:04:31.976451 | 2021-03-22T09:16:04 | 2021-03-22T09:16:04 | 349,379,454 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,189 | py |
# ทดสอบการเข้าใช้งานของ "ประเมิน" (กรอกจำนวนที่ประเมินให้เป็นศูนย์)
import time
import unittest
import sys
from selenium import webdriver
from selenium.webdriver import ActionChains
from POM_test.login import *
from POM_test.predictPage import *
import os
sys.path.append(os.path.join(os.path.dirname(__file__), "...", "..."))
class TestPredict_7(unittest.TestCase):
@classmethod
def setUpClass(self):
self.driver = webdriver.Chrome(executable_path="C:/Users/voraw/Downloads/Compressed/webdriver/chromedriver/chromedriver")
self.driver.implicitly_wait(10)
self.driver.maximize_window()
def test_login_valid(self):
driver = self.driver
self.driver.get("https://top-upstream-client.mulberrysoft.com/#/older/activity")
login = LoginPage(driver)
login.enter_username("demo005")
login.enter_password("123456")
login.click_login()
time.sleep(2)
predict = PredictPage(driver)
predict.into_predictPage()
time.sleep(2)
predict.upload_image()
time.sleep(2)
predict.predict_plant()
time.sleep(2)
predict.predict_select()
time.sleep(2)
predict.predict_value_selected("0")
# กรอกจำนวนที่ประเมินให้เป็นศูนย์
time.sleep(2)
scroll = driver.find_element_by_xpath("//ion-item[2]/ion-select")
action = ActionChains(driver)
action.move_to_element(scroll).perform()
# action object creation to scroll round 1
predict.predict_unit_selected()
time.sleep(2)
predict.predict_submit_value()
time.sleep(2)
@classmethod
def tearDownClass(cls):
cls.driver.close()
cls.driver.quit()
print("Test Completed")
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
f8b9076be3b1fda65f70ba1b970c0b313998e494 | d554b1aa8b70fddf81da8988b4aaa43788fede88 | /5 - Notebooks e Data/1 - Análises numéricas/Arquivos David/Atualizados/logDicas-master/data/2019-1/222/users/4057/codes/1642_2728.py | a35b596d3befd8180e3f47a678f1a0b4d5b6c69c | [] | no_license | JosephLevinthal/Research-projects | a3bc3ca3b09faad16f5cce5949a2279cf14742ba | 60d5fd6eb864a5181f4321e7a992812f3c2139f9 | refs/heads/master | 2022-07-31T06:43:02.686109 | 2020-05-23T00:24:26 | 2020-05-23T00:24:26 | 266,199,309 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 247 | py | percurso = float(input("o percurso de uma viagem (em quilometros): "))
tipo = input("o tipo do carro (A ou B): ")
if tipo.upper() == "A":
consumo = percurso / 8
print(round(consumo, 2))
else :
consumo = percurso / 12
print(round(consumo, 2)) | [
"[email protected]"
] | |
3884eb4248bc7e8244797a0c63713adc83ee57e0 | 8e24e8bba2dd476f9fe612226d24891ef81429b7 | /geeksforgeeks/python/python_all/103_14.py | f3f8bf7d10d83beb51333e3d691467e855b5fbdb | [] | no_license | qmnguyenw/python_py4e | fb56c6dc91c49149031a11ca52c9037dc80d5dcf | 84f37412bd43a3b357a17df9ff8811eba16bba6e | refs/heads/master | 2023-06-01T07:58:13.996965 | 2021-06-15T08:39:26 | 2021-06-15T08:39:26 | 349,059,725 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,703 | py | Python – Custom dictionary initialization in list
While working with Python, we can have a problem in which we need to
initialize a list of a particular size with custom dictionaries. This task has
it’s utility in web development to store records. Let’s discuss certain ways
in which this task can be performed.
**Method #1 : Using{dict} + "*" operator**
This task can be performed using the “*” operator. We can create a list
containing single custom dictionary and then multiply it by Number that is
size of list. The drawback is that similar reference dictionaries will be made
which will point to similar memory location.
__
__
__
__
__
__
__
# Python3 code to demonstrate working of
# Custom dictionary initialization in list
# using {dict} + "*" operator
# Initialize dict
test_dict = {'gfg' : 1, 'is' : 2, 'best' : 3}
# Custom dictionary initialization in list
# using {dict} + "*" operator
res = [test_dict] * 6
print("The list of custom dictionaries is : " + str(res))
---
__
__
**Output :**
The list of custom dictionaries is : [{'gfg': 1, 'best': 3, 'is': 2}, {'gfg': 1, 'best': 3, 'is': 2}, {'gfg': 1, 'best': 3, 'is': 2}, {'gfg': 1, 'best': 3, 'is': 2}, {'gfg': 1, 'best': 3, 'is': 2}, {'gfg': 1, 'best': 3, 'is': 2}]
**Method #2 : Using {dict} + list comprehension**
This is perhaps the better and correct way to perform this task. We initialize
the each index of list with dictionary, this way, we have independently
referring dictionaries and don’t point to single reference.
__
__
__
__
__
__
__
# Python3 code to demonstrate working of
# Custom dictionary initialization in list
# using {dict} + list comprehension
# Initialize dict
test_dict = {'gfg' : 1, 'is' : 2, 'best' : 3}
# Custom dictionary initialization in list
# using {dict} + list comprehension
res = [test_dict for sub in range(6)]
print("The list of custom dictionaries is : " + str(res))
---
__
__
**Output :**
The list of custom dictionaries is : [{'gfg': 1, 'best': 3, 'is': 2}, {'gfg': 1, 'best': 3, 'is': 2}, {'gfg': 1, 'best': 3, 'is': 2}, {'gfg': 1, 'best': 3, 'is': 2}, {'gfg': 1, 'best': 3, 'is': 2}, {'gfg': 1, 'best': 3, 'is': 2}]
Attention geek! Strengthen your foundations with the **Python Programming
Foundation** Course and learn the basics.
To begin with, your interview preparations Enhance your Data Structures
concepts with the **Python DS** Course.
My Personal Notes _arrow_drop_up_
Save
| [
"[email protected]"
] | |
cb781d9d988c44c11a77c73ac37e5299553ac34a | c74b29b68211a51d7283d57b24d7cf83422a8ceb | /classertest.py | ffa1e7ece13d8dbad8941e217e8ee1bf19fb8681 | [] | no_license | proycon/nlpsandbox | 63359e7cdd709dd81d66aed9bf1437f8ecf706a0 | 22e5f85852b7b2a658c6b94c3dedd425a5d6396f | refs/heads/master | 2020-12-09T19:37:10.040962 | 2019-04-23T17:17:15 | 2019-04-23T17:17:15 | 2,347,265 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 857 | py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
from pynlpl.statistics import FrequencyList
from pynlpl.textprocessors import crude_tokenizer, Classer
import sys
import codecs
import asizeof
freqlist = FrequencyList()
f = codecs.open(sys.argv[1], 'r','utf-8')
for line in f:
line = crude_tokenizer(line.strip())
freqlist.append(line)
f.close()
print "FREQLIST: " ,asizeof.asizeof(freqlist)
classer = Classer(freqlist)
print "CLASSER: " ,asizeof.asizeof(classer)
classer2 = Classer(freqlist, False,True)
print "CLASSER (ONLY DECODER): " ,asizeof.asizeof(classer2)
freqlist2 = FrequencyList()
f = codecs.open(sys.argv[1], 'r','utf-8')
for line in f:
line = crude_tokenizer(line.strip())
freqlist2.append(classer.encodeseq(line))
f.close()
print "FREQLIST-AFTER-CLASSER: " ,asizeof.asizeof(freqlist2)
| [
"[email protected]"
] | |
b152b59d05c3e3d5cdabcc403e4d35db597bceab | 235fb362b5af1f7dbd90dc3819fe63f18e074e9d | /learn_pyqt/pyqt5-cv2-multithreaded-master/SharedImageBuffer.py | faf4fedcd3df098ddfccf082c84eb6906e8874fe | [] | no_license | cener-1999/learn_about_python | 74c9b8c6a546224261d5577183a946a78ca7e84f | 86cfc0a5621f86fc8a1885a39847d40b33137c49 | refs/heads/master | 2023-04-30T06:38:34.459506 | 2021-05-18T14:20:29 | 2021-05-18T14:20:29 | 368,473,253 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,162 | py | from PyQt5.QtCore import QMutexLocker, QMutex, QWaitCondition
class SharedImageBuffer(object):
def __init__(self):
# Initialize variables(s)
self.nArrived = 0
self.doSync = False
self.syncSet = set()
self.wc = QWaitCondition()
self.imageBufferDict = dict()
self.mutex = QMutex()
def add(self, deviceUrl, imageBuffer, sync=False):
# Device stream is to be synchronized
if sync:
with QMutexLocker(self.mutex):
self.syncSet.add(deviceUrl)
# Add image buffer to map
self.imageBufferDict[deviceUrl] = imageBuffer
def getByDeviceUrl(self, deviceUrl):
return self.imageBufferDict[deviceUrl]
def removeByDeviceUrl(self, deviceUrl):
# Remove buffer for device from imageBufferDict
self.imageBufferDict.pop(deviceUrl)
# Also remove from syncSet (if present)
with QMutexLocker(self.mutex):
if self.syncSet.__contains__(deviceUrl):
self.syncSet.remove(deviceUrl)
self.wc.wakeAll()
def sync(self, deviceUrl):
# Only perform sync if enabled for specified device/stream
self.mutex.lock()
if self.syncSet.__contains__(deviceUrl):
# Increment arrived count
self.nArrived += 1
# We are the last to arrive: wake all waiting threads
if self.doSync and self.nArrived == len(self.syncSet):
self.wc.wakeAll()
# Still waiting for other streams to arrive: wait
else:
self.wc.wait(self.mutex)
# Decrement arrived count
self.nArrived -= 1
self.mutex.unlock()
def wakeAll(self):
with QMutexLocker(self.mutex):
self.wc.wakeAll()
def setSyncEnabled(self, enable):
self.doSync = enable
def isSyncEnabledForDeviceUrl(self, deviceUrl):
return self.syncSet.__contains__(deviceUrl)
def getSyncEnabled(self):
return self.doSync
def containsImageBufferForDeviceUrl(self, deviceUrl):
return self.imageBufferDict.__contains__(deviceUrl)
| [
"[email protected]"
] | |
18da7de4cc349c8c1d2580cadf1d38fed8ba6dfc | 1800155dcdb48bf956fa423858a8cc20ed27e6cb | /two-sum-iii-data-structure-design.py | 24185cd468ecda7dacfaaced0516ae3647362d6b | [] | no_license | gitprouser/LeetCode-3 | 1cc2d1dbbf439af4b3768da388dafd514cc5432b | 530ea79f0377e1fc3fbfb5c5cfe7768159144e57 | refs/heads/master | 2021-06-06T16:30:14.795093 | 2016-08-22T21:40:01 | 2016-08-22T21:40:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 828 | py | class TwoSum(object):
def __init__(self):
"""
initialize your data structure here
"""
self.table = {}
def add(self, number):
"""
Add the number to an internal data structure.
:rtype: nothing
"""
self.table[number] = self.table.get(number, 0) + 1
def find(self, value):
"""
Find if there exists any pair of numbers which sum is equal to the value.
:type value: int
:rtype: bool
"""
for i in self.table.keys():
j = value - i
if i == j and self.table[i] > 1 or i != j and j in self.table:
return True
return False
# Your TwoSum object will be instantiated and called as such:
# twoSum = TwoSum()
# twoSum.add(number)
# twoSum.find(value)
| [
"[email protected]"
] | |
095efaba792693239c806c9b95e67c5d4a6f8409 | 434368cb1b6fee551175129fe177af8211b24018 | /Python/Marius/anfis/anfis.py | e3c7ebb77170702303b71c1ef4fadfb2545ce2da | [
"BSD-3-Clause"
] | permissive | Marius-Juston/MatLab-RL-ANFIS | d0f330d3b1168c16b96131d3d950f29cda0bee62 | 650645345dc197ca5b7069085bf95185acc40467 | refs/heads/main | 2023-07-09T04:06:10.813384 | 2021-08-06T02:08:25 | 2021-08-06T02:08:25 | 392,870,966 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,451 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
ANFIS in torch: the ANFIS layers
@author: James Power <[email protected]> Apr 12 18:13:10 2019
Acknowledgement: twmeggs' implementation of ANFIS in Python was very
useful in understanding how the ANFIS structures could be interpreted:
https://github.com/twmeggs/anfis
"""
from array import array
from collections import OrderedDict
import numpy as np
import torch
import torch.nn.functional as F
from antecedent_layer import AntecedentLayer, MamdaniAntecedentLayer
from consequent_layer import ConsequentLayer, SymmetricWeightsConsequentLayer, ConsequentLayerType, \
PlainConsequentLayer, MamdaniConsequentLayer
from fuzzy_layer import JointFuzzifyLayer
dtype = torch.float
class WeightedSumLayer(torch.nn.Module):
"""
Sum the TSK for each outvar over rules, weighted by fire strengths.
This could/should be layer 5 of the Anfis net.
I don't actually use this class, since it's just one line of code.
"""
def __init__(self):
super(WeightedSumLayer, self).__init__()
def forward(self, weights, tsk):
"""
weights.shape: n_cases * n_rules
tsk.shape: n_cases * n_out * n_rules
y_pred.shape: n_cases * n_out
"""
# Add a dimension to weights to get the bmm to work:
y_pred = torch.bmm(tsk, weights.unsqueeze(2))
return y_pred.squeeze(2)
class ProductSum(torch.nn.Module):
def forward(self, weights, tsk):
return torch.matmul(weights, tsk)
class Empty(torch.nn.Module):
def forward(self, *params):
pass
class Normalization(torch.nn.Module):
def forward(self, weights):
return F.normalize(weights, p=1, dim=1)
class JointAnfisNet(torch.nn.Module):
"""
This is a container for the 5 layers of the ANFIS net.
The forward pass maps inputs to outputs based on current settings,
and then fit_coeff will adjust the TSK coeff using LSE.
"""
def __init__(self, description, invardefs, outvarnames, rules_type=ConsequentLayerType.HYBRID,
mamdani_ruleset=None,
mamdani_defs=None, matlab=False):
super(JointAnfisNet, self).__init__()
self.matlab = matlab
self.description = description
self.outvarnames = outvarnames
self.rules_type = rules_type
varnames = [v for v, _ in invardefs]
# mfdefs = [JointBellMembership(*mfs) for _, mfs in invardefs]
mfdefs = [mfs for _, mfs in invardefs]
self.num_in = len(invardefs)
self.num_rules = np.prod([mfs.num_mfs for mfs in mfdefs])
self.dtype = mfdefs[0].required_dtype()
print("Using datatype:", self.dtype)
if self.rules_type == ConsequentLayerType.MAMDANI:
if mamdani_defs is None:
raise ValueError("There is no Mamdani defintion")
rules = MamdaniAntecedentLayer(mamdani_ruleset)
normalization = Normalization()
cl = MamdaniConsequentLayer(mamdani_defs, rules.mamdani_ruleset['outputs_membership'])
output = ProductSum()
else:
rules = AntecedentLayer(mfdefs)
normalization = Normalization()
output = WeightedSumLayer()
if self.rules_type == ConsequentLayerType.HYBRID:
cl = ConsequentLayer(self.num_in, self.num_rules, self.num_out, dtype=self.dtype)
elif self.rules_type == ConsequentLayerType.SYMMETRIC:
cl = SymmetricWeightsConsequentLayer(self.num_in, self.num_rules, self.num_out, self.dtype)
else:
cl = PlainConsequentLayer(self.num_in, self.num_rules, self.num_out, self.dtype)
self.layer = torch.nn.ModuleDict(OrderedDict([
('fuzzify', JointFuzzifyLayer(mfdefs, varnames)),
('rules', rules),
('normalize', normalization),
('consequent', cl),
('output', output),
# weighted-sum layer is just implemented as a function.
]))
@property
def num_out(self):
return len(self.outvarnames)
@property
def coeff(self):
return self.layer['consequent'].coeff
@coeff.setter
def coeff(self, new_coeff):
self.layer['consequent'].coeff = new_coeff
def fit_coeff(self, *params):
"""
Do a forward pass (to get weights), then fit to y_actual.
Does nothing for a non-hybrid ANFIS, so we have same interface.
"""
if self.rules_type == ConsequentLayerType.HYBRID:
x, y_actual = params
self(x)
self.layer['consequent'].fit_coeff(x, self.weights, y_actual)
elif self.rules_type == ConsequentLayerType.SYMMETRIC:
# with torch.no_grad():
mask, update = self.layer['consequent'].fit_coeff()
# print("Coeff:", self.layer['consequent'].coeff.shape)
if update:
# print("Update")
symmetrical_mask = torch.cat([mask, torch.flip(mask, dims=[0])[1:]])
self.layer['rules'].mf_indices = self.layer['rules'].mf_indices[symmetrical_mask]
# print("Rules", self.layer['rules'].mf_indices.shape)
def input_variables(self):
"""
Return an iterator over this system's input variables.
Yields tuples of the form (var-name, FuzzifyVariable-object)
"""
return self.layer['fuzzify'].varmfs.items()
def output_variables(self):
"""
Return an list of the names of the system's output variables.
"""
return self.outvarnames
def extra_repr(self):
if self.rules_type == ConsequentLayerType.MAMDANI:
vardefs = self.layer['fuzzify'].varmfs
vardefs_names = list(vardefs.keys())
rules = self.layer['rules'].mamdani_ruleset
var_index = rules['variable_rule_index']
mem_index = rules['membership_indices']
out_index = rules['outputs_membership']
out_name = self.layer['consequent'].mamdani_defs.names
rules = []
for i in range(len(var_index)):
temp = []
for var, mem in zip(var_index[i], mem_index[i]):
name = vardefs_names[var]
temp.append(f"{name} is {list(vardefs[name].mfdefs.keys())[mem]}")
rules.append(f'Rule {i}: IF {" AND ".join(temp)} THEN {out_name[out_index[i][0]]}')
return '\n'.join(rules)
else:
rstr = []
vardefs = self.layer['fuzzify'].varmfs
rule_ants = self.layer['rules'].extra_repr(vardefs).split('\n')
for i, crow in enumerate(self.layer['consequent'].coeff):
rstr.append('Rule {:2d}: IF {}'.format(i, rule_ants[i]))
rstr.append(' ' * 9 + 'THEN {}'.format(crow.tolist()))
return '\n'.join(rstr)
def forward(self, x):
"""
Forward pass: run x thru the five layers and return the y values.
I save the outputs from each layer to an instance variable,
as this might be useful for comprehension/debugging.
"""
self.fuzzified = self.layer['fuzzify'](x)
self.raw_weights = self.layer['rules'](self.fuzzified)
self.weights = self.layer['normalize'](self.raw_weights)
self.rule_tsk = self.layer['consequent'](x)
self.y_pred = self.layer['output'](self.weights, self.rule_tsk)
# y_pred = torch.bmm(self.rule_tsk, self.weights.unsqueeze(2))
# self.y_pred = y_pred.squeeze(2)
if self.matlab:
return array('d', self.y_pred)
return self.y_pred
# These hooks are handy for debugging:
def module_hook(label):
""" Use this module hook like this:
m = AnfisNet()
m.layer.fuzzify.register_backward_hook(module_hook('fuzzify'))
m.layer.consequent.register_backward_hook(modul_hook('consequent'))
"""
return (lambda module, grad_input, grad_output:
print('BP for module', label,
'with out grad:', grad_output,
'and in grad:', grad_input))
def tensor_hook(label):
"""
If you want something more fine-graned, attach this to a tensor.
"""
return (lambda grad:
print('BP for', label, 'with grad:', grad))
| [
"[email protected]"
] | |
e35651f899532537e810f36a9aa113eb399a5eaa | 382034646e9d3e32c8e63e8d83d2dd7da5be4ef3 | /workery/shared_foundation/tests/models/test_opening_hours_specification.py | ee5abb18bc973e46a7b73620b7db5b912280e1f9 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | wahello/workery-django | 80c88ecb7968951719af6857711891ec3787cf46 | 289318b0333d830c089f4492716c38d409c365ed | refs/heads/master | 2020-03-30T04:21:48.642659 | 2018-09-28T01:30:22 | 2018-09-28T01:30:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,728 | py | # -*- coding: utf-8 -*-
from django.core.management import call_command
from starterkit.utils import get_unique_username_from_email
from django_tenants.test.cases import TenantTestCase
from django_tenants.test.client import TenantClient
from django.urls import reverse
from shared_foundation.models import *
TEST_USER_EMAIL = "[email protected]"
TEST_USER_USERNAME = "[email protected]"
TEST_USER_PASSWORD = "123P@$$w0rd"
TEST_USER_TEL_NUM = "123 123-1234"
TEST_USER_TEL_EX_NUM = ""
TEST_USER_CELL_NUM = "123 123-1234"
class TestSharedOpeningHoursSpecification(TenantTestCase):
"""
Console:
python manage.py test shared_foundation.tests.models.test_opening_hours_specification
"""
def setUp(self):
super(TestSharedOpeningHoursSpecification, self).setUp()
self.c = TenantClient(self.tenant)
self.user = SharedUser.objects.create(
first_name="Bart",
last_name="Mika",
email=TEST_USER_EMAIL,
is_active=True,
)
self.obj = SharedOpeningHoursSpecification.objects.create(
owner=self.user,
closes="9:00 PM",
day_of_week="Monday",
opens="8:00 AM"
)
def tearDown(self):
del self.c
self.obj.delete()
super(TestSharedOpeningHoursSpecification, self).tearDown()
def test_str(self):
self.assertIsNotNone(str(self.obj))
self.assertIn("9:00 PM", self.obj.closes)
def test_delete_all(self):
SharedOpeningHoursSpecification.objects.delete_all()
try:
obj = SharedOpeningHoursSpecification.objects.get()
except SharedOpeningHoursSpecification.DoesNotExist:
self.assertTrue(True)
| [
"[email protected]"
] | |
27139caa2ff73ba9f0c5c0772b1633c315e9aa52 | a939e018333a9ecd26ddc618f99835b7eb381686 | /deploy/vertical_crawler_youtube/le_crawler/common/page_local_writer.py | 3a63fce7ba2cc572e552cac20ddeef9e74c0b3e1 | [] | no_license | cash2one/crawl_youtube | bff5ba254001c2f31f770e55a4aca39bc54e45ee | 0dc40186a1d89da2b00f29d4f4edfdc5470eb4fc | refs/heads/master | 2021-01-16T22:30:17.800282 | 2016-02-18T11:50:09 | 2016-02-18T11:50:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,190 | py | #!/usr/bin/python
#
# Copyright 2014 LeTV Inc. All Rights Reserved.
__author__ = '[email protected]'
import time
import os
import Queue
import threading
from scrapy import log
from scrapy.utils.project import get_project_settings
from ..core.page_writer import PageWriterBase
"""
for json format data writer
"""
class PageLocalJsonWriter(PageWriterBase):
def __init__(self, spider):
PageWriterBase.__init__(self, spider)
self._init(
get_project_settings().getint('LOCAL_PAGE_WRITER_DATA_TIME_LIMIT', 86400),
get_project_settings().getint('LOCAL_PAGE_WRITER_DATA_FLUSH_LIMIT', 20000),
get_project_settings().get('LOCAL_PAGE_WRITER_DATA_DIR', '/letv/crawler_delta/')
)
self.set_name('PageLocalJsonWriter')
def _init(self, gen_max_time = 86400, file_max_nums = 2000, data_dir = ""):
if not os.path.isdir(data_dir):
raise Exception('%s is not dir' % (data_dir))
self.file_fp_ = None
self.current_file_name_ = ''
self.total_items_ = 0
self.current_nums_ = 0
self.gen_file_max_time_threshold_ = gen_max_time # 10min
self.max_lines_per_file_ = file_max_nums
self.last_flush_time_ = int(time.time())
self.data_dir_ = data_dir
self.data_queue_ = Queue.LifoQueue(maxsize = 10240)
thread = threading.Thread(target = self.file_writer_manger, args = ())
thread.start()
def finalize(self):
self.exit_ = True
while not self.data_queue_.empty():
self.spider_.log('page page_local_writer que[%d]' % (self.data_queue_.qsize()), log.INFO)
time.sleep(1)
self.spider_.log('%s write items[%s]' % (self.name, self.total_items_),
log.INFO)
def process_item(self, item):
self.add_item(item)
def add_item(self, item):
if not item:
return
while True:
try:
self.data_queue_.put(item, block = True, timeout = 5)
return
except Exception, e:
self.spider_.log('try to put item into queu error %s, size %d' % (e, self.data_queue_.qsize()))
continue
def status(self):
return 'total item wrote: %s' % (self.total_items_)
def gen_filestr(self):
return os.path.join(self.data_dir_, '%s_%d'%(time.strftime('%Y%m%d_%H%M%S',
time.localtime()),
os.getpid()))
def gen_json_str(self, item):
if not item:
return None
try:
return item.to_json_str()
except:
self.spider_.log('Failed decoding [%s] with [%s]' %(dict['url'],
dict['page_encoding']), log.WARNING)
dict['page'] = 'error decoding'
return None
def _prepare_writer(self):
if self.file_fp_:
self._dump_file()
self.current_file_name_ = self.gen_filestr()
self.file_fp_ = open(self.current_file_name_ + '.tmp', 'w+')
self.current_nums_ = 0
def _dump_file(self):
try:
if not self.file_fp_:
return False
self.file_fp_.close()
self.last_flush_time_ = int(time.time())
self.file_fp_ = None
if self.current_nums_ == 0:
os.remove(self.current_file_name_ + '.tmp')
else:
os.rename(self.current_file_name_ + '.tmp', self.current_file_name_ + '.json')
return True
except Exception, e:
print e
self.spider_.log('Error while dump file:[%s]' % self.current_file_name_,
log.ERROR)
return False
def file_writer_manger(self):
while not self.exit_ or not self.data_queue_.empty():
item = None
try:
item = self.data_queue_.get(block = True, timeout = 10)
except Exception, e:
self.spider_.log('get item from queu timeout[%s]' %(e), log.DEBUG)
item = None
while not self.file_fp_:
self._prepare_writer()
self.spider_.log('prepare file ptr:[%s]' % self.current_file_name_,
log.INFO)
time.sleep(1)
if item:
line_str = self.gen_json_str(item)
if line_str:
try:
#line_zip = zlib.compress(line_str, zlib.Z_BEST_COMPRESSION)
self.file_fp_.write(line_str)
self.file_fp_.write('\n')
self.current_nums_ += 1
self.total_items_ += 1
if self.current_nums_ > 0 and self.current_nums_ % 1000 == 0:
self.spider_.log('Flush result with [%d]' % (self.current_nums_), log.INFO)
self.file_fp_.flush()
except Exception, e:
print time.localtime()
print e
self.spider_.log('Error while write to file[%s]' % (self.current_file_name_))
nows = int(time.time())
if self.current_nums_ >= self.max_lines_per_file_ or (self.current_nums_ > 0
and (nows - self.last_flush_time_) >= self.gen_file_max_time_threshold_):
# flush file to disk
if not self._dump_file():
self.spider_.log('flush file error:[%s]' % self.current_file_name_,
log.ERROR)
self.spider_.log('flush:[ %s ] with [%d]' %(self.current_file_name_,
self.current_nums_), log.INFO)
self.spider_.log('page_local_writer manager exit normal', log.INFO)
self._dump_file()
| [
"[email protected]"
] | |
21a34dcd8837c42f9ea42b7bc1e4e5db25cfe7a5 | f485dff7fcb036868d6e4053a7a6ccd7f95214bf | /week09/employee_091.py | 62e4238ccb5f78116cc7d56777ee671bba48bdc0 | [] | no_license | jamesfallon99/CA117 | aa4f851365aafe8a4888c85e1b8b2f571e2c9b2a | 0055ccbbd710453c9574930b361c26fcde2b9036 | refs/heads/master | 2020-06-28T14:26:06.829418 | 2019-08-02T16:41:38 | 2019-08-02T16:41:38 | 200,254,591 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 858 | py | #!/usr/bin/env python3
class Employee(object):
def __init__(self, name, number):
self.name = name
self.number = number
def wages(self):
return 0
def __str__(self):
l = []
l.append("Name: {}".format(self.name))
l.append("Number: {}".format(self.number))
l.append("Wages: {:.2f}".format(self.wages()))
return "\n".join(l)
class Manager(Employee):
def __init__(self, name, number, salary):
super().__init__(name, number)
self.salary = salary
def wages(self):
return self.salary / 52
class AssemblyWorker(Employee):
def __init__(self, name, number, hourly_rate, hours):
super().__init__(name, number)
self.hourly_rate = hourly_rate
self.hours = hours
def wages(self):
return self.hourly_rate * self.hours
| [
"[email protected]"
] | |
52be7e9c34bece4e6db3c03156d40c378d72b6ca | a1684facd42cba1cd8af003ccffb530f56582e9a | /backend/pet/admin.py | 94b98dc921d660340f7a676c08544c25cf98bd86 | [] | no_license | dunderlabs/siteong7vidas | e13749863a8f5d64d469b735765044893cc38536 | fb3c529025e05adcc9aab17e1eeada909a193e56 | refs/heads/master | 2021-04-26T21:47:58.638150 | 2018-10-30T15:37:19 | 2018-10-30T15:37:19 | 124,160,847 | 6 | 1 | null | 2018-11-28T02:05:10 | 2018-03-07T01:28:12 | Python | UTF-8 | Python | false | false | 464 | py | from django.contrib import admin
from .models import Pet, PetBreed, PetPelage
class PetAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
class PetBreedAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
class PetPelageAdmin(admin.ModelAdmin):
prepopulated_fields = {"slug": ("name",)}
admin.site.register(Pet, PetAdmin)
admin.site.register(PetBreed, PetBreedAdmin)
admin.site.register(PetPelage, PetPelageAdmin)
| [
"[email protected]"
] | |
c30f230f2e18f35e186cf375fa987efcef0c253c | 3712a929d1124f514ea7af1ac0d4a1de03bb6773 | /开班笔记/python数据分析机器学习部分/机器学习/day08/kpca.py | 3ccd0a9d11baf3e0fd08e50a8ae07ade3578adfb | [] | no_license | jiyabing/learning | abd82aa3fd37310b4a98b11ea802c5b0e37b7ad9 | 6059006b0f86aee9a74cfc116d2284eb44173f41 | refs/heads/master | 2020-04-02T20:47:33.025331 | 2018-10-26T05:46:10 | 2018-10-26T05:46:10 | 154,779,387 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 926 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import sklearn.datasets as sd
import sklearn.decomposition as dc
import matplotlib.pyplot as mp
x, y = sd.make_circles(n_samples=500, factor=0.2,
noise=0.04)
model = dc.KernelPCA(kernel='rbf',
fit_inverse_transform=True,
gamma=10)
kpca_x = model.fit_transform(x)
mp.figure('Original', facecolor='lightgray')
mp.title('Original', fontsize=20)
mp.xlabel('x', fontsize=14)
mp.ylabel('y', fontsize=14)
mp.tick_params(labelsize=10)
mp.grid(linestyle=':')
mp.scatter(x[:, 0], x[:, 1], s=60, c=y, cmap='brg',
alpha=0.5)
mp.figure('KPCA', facecolor='lightgray')
mp.title('KPCA', fontsize=20)
mp.xlabel('x', fontsize=14)
mp.ylabel('y', fontsize=14)
mp.tick_params(labelsize=10)
mp.grid(linestyle=':')
mp.scatter(kpca_x[:, 0], kpca_x[:, 1], s=60, c=y,
cmap='brg', alpha=0.5)
mp.show()
| [
"[email protected]"
] | |
82893714841e99045c7d0a2c1bbfa4dc32f9deac | 3d1a8ccef4153b6154c0aa0232787b73f45137ba | /services/customer/server.py | 02bd6b68c181a44732e11ad2a8e09a2eb503637c | [] | no_license | jan25/hotrod-python | a0527930b2afc33ca3589c1cf7ae07814148535a | dbce7df1bc2d764351dd2ba1122078fc525caed7 | refs/heads/master | 2020-06-03T14:59:35.627093 | 2019-06-22T16:52:19 | 2019-06-22T16:52:19 | 191,616,546 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 845 | py | from flask import Flask, request, jsonify
from uwsgidecorators import postfork
import services.common.middleware as middleware
import services.config.settings as config
import services.common.serializer as serializer
from . import db
app = Flask(__name__)
@postfork
def postfork():
middleware.init_tracer('customer')
@app.before_request
def before_request():
return middleware.before_request(request)
@app.after_request
def after_request(response):
return middleware.after_request(response)
@app.route('/customer')
def get_customer():
customer_id = request.args.get('id')
customer_obj = db.get_customer_by_id(customer_id)
return jsonify(serializer.obj_to_json(customer_obj))
def start_server(debug):
app.run(host='0.0.0.0', port=config.CUSTOMER_PORT, debug=debug)
if __name__ == '__main__': start_server(True) | [
"[email protected]"
] | |
1552ae664ea965da697ab12f85e1cc327fb30124 | 6be8aa517e679b33b47d35f100e6590902a8a1db | /DP/Problem19.py | 07c8de548b63717ec4070b31545cf9ef5c3e6a61 | [] | no_license | LeeJuhae/Algorithm-Python | 7ca4762712e5e84d1e277abecb3bf39c9cbd4e56 | 729947b4428205adfbac194a5527b0eeafe1c525 | refs/heads/master | 2023-04-24T01:02:36.430970 | 2021-05-23T07:17:25 | 2021-05-23T07:17:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 504 | py | # https://www.acmicpc.net/problem/11049
import sys
read = sys.stdin.readline
n = int(read().strip())
arr = [tuple(map(int, read().strip().split())) for _ in range(n)]
dp = [[-1 for _ in range(n)] for _ in range(n)]
def mul(a, b):
if dp[a][b] != -1:
return dp[a][b]
if a == b:
return 0
ret = float('inf')
for k in range(a, b):
ret = min(ret, mul(a, k) + mul(k + 1, b) + arr[a][0] * arr[k][1] * arr[b][1])
dp[a][b] = ret
return ret
print(mul(0, n - 1))
| [
"[email protected]"
] | |
646434dfdde5ec5496ec40dd7f80a1a416587ed2 | f155033606399eadfb5fe694479af36e14115467 | /bit-manipulation/sum-vs-xor.py | de6231f5deb69a511a0ad454164e40d58899ad9c | [] | no_license | KadirEmreOto/hackerrank | 7925e5b49f90bbe6fef41cfe4fd980b6ffd9e3f1 | 143d49adc45e4e3e3d9424168b2fdac80b2155d6 | refs/heads/master | 2021-01-11T02:56:03.535273 | 2017-08-11T13:41:55 | 2017-08-11T13:41:55 | 70,905,859 | 10 | 2 | null | 2022-03-16T09:19:13 | 2016-10-14T11:55:02 | Python | UTF-8 | Python | false | false | 120 | py | from math import factorial
n = long(raw_input())
if not n: print 1; quit()
b = bin(n)[2:]
c = b.count('0')
print 1<<c
| [
"[email protected]"
] | |
f269f66869c2a17070ed2079a79139e33939efe5 | 377b908723f157ab30733fa5ff15db90c0be9693 | /build/celex4/catkin_generated/pkg.installspace.context.pc.py | f861cb62d3e2dc6d77d7eb7eb7bef0ed002c193b | [] | no_license | eleboss/celex4_ros | af0b28ec0ba79016ae80e90e0cd3d270a8865fa1 | 28500367e8e28e6e5384036b6f9bd8981a7b932e | refs/heads/master | 2020-04-30T14:03:43.823980 | 2019-03-21T05:59:05 | 2019-03-21T05:59:05 | 176,877,974 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 441 | py | # generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "cv_bridge;dvs_msgs;image_transport;roscpp;roslib;sensor_msgs".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "celex4"
PROJECT_SPACE_DIR = "/home/ubuntu/Documents/celex4_ros/install"
PROJECT_VERSION = "0.0.0"
| [
"[email protected]"
] | |
2eee67dfdbd5f09719f524851ce744a41a9b3f32 | ce18877752c43eb66f03bdc169e3ef45a1720d15 | /src/apps_common/mailerlite/__init__.py | 2ab0448f4bb1aa6cdcd84a17a2a573dbbf178cb1 | [] | no_license | ajlexgit/robin | 26e8682ae09795acf0f3fc1297d20044285b83df | 25ac1c3455838fc26656cfa16d05b2943d0cbba6 | refs/heads/master | 2021-07-13T22:49:09.177207 | 2017-10-13T07:44:42 | 2017-10-13T07:44:42 | 103,655,240 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,880 | py | """
Модуль подписки на рассылку.
Зависит от:
premailer
libs.associative_request
libs.color_field
libs.pipeline
libs.templatetags
Установка:
# settings.py:
INSTALLED_APPS = (
...
'mailerlite',
...
)
SUIT_CONFIG = {
...
{
'app': 'mailerlite',
'icon': 'icon-envelope',
'models': (
'Campaign',
'Subscriber',
'Group',
'MailerConfig',
)
},
}
MAILERLITE_APIKEY = '438b16c79cbd9acea354a1c1ad5eda08'
# urls.py:
...
url(r'^mailerlite/', include('mailerlite.urls', namespace='mailerlite')),
...
# crontab
*/15 * * * * . $HOME/.profile; ~/aor.com/env/bin/python3 ~/aor.com/src/manage.py mailerlite -ig -es -ic -ec
10 * * * * . $HOME/.profile; ~/aor.com/env/bin/python3 ~/aor.com/src/manage.py mailerlite -ig -eg -es -is
Использование:
# views.py:
from mailerlite import SubscribeForm
class IndexView(View):
def get(self, request, *args, **kwargs):
...
return self.render_to_response({
subscribe_form': SubscribeForm(),
...
})
# template.html:
<form action="" method="post" id="subscribe-form">
{% render_form subscribe_form %}
<input type="submit" value="Subscribe" class="btn">
</form>
"""
default_app_config = 'mailerlite.apps.Config'
| [
"[email protected]"
] | |
ae241182ad08298d4046ba796efca16ca15a0257 | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/arc028/A/4640482.py | 31948a2b01b51da02f93d6230fbd993758c38420 | [] | no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 333 | py | n,a,b = map(int,input().split())
turn = 1
while True:
if turn == 1:
if n > a:
n -= a
turn *= -1
else:
print("Ant")
exit()
else:
if n > b:
n -= b
turn *= -1
else:
print("Bug")
exit() | [
"[email protected]"
] | |
8af54bc9ef8499d1756fe4cccf0d43f3ffde7b28 | 6fa7f99d3d3d9b177ef01ebf9a9da4982813b7d4 | /wZQzNPXkLrHMj4mmz_9.py | 2ef7ee6cc929272d9c4fabf0b66110add30eb2b1 | [] | no_license | daniel-reich/ubiquitous-fiesta | 26e80f0082f8589e51d359ce7953117a3da7d38c | 9af2700dbe59284f5697e612491499841a6c126f | refs/heads/master | 2023-04-05T06:40:37.328213 | 2021-04-06T20:17:44 | 2021-04-06T20:17:44 | 355,318,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52 | py |
def derivative(b, m):
return b * (m ** (b-1))
| [
"[email protected]"
] | |
cf526d0998ec05ba8a704de0e3067c5e2d706869 | 89b45e528f3d495f1dd6f5bcdd1a38ff96870e25 | /AutomateTheBoringStuffWithPython/chapter12/spreadsheet_to_text_files.py | 40698e57cc6c206b11f50b49c18978d9f96df97b | [] | no_license | imatyukin/python | 2ec6e712d4d988335fc815c7f8da049968cc1161 | 58e72e43c835fa96fb2e8e800fe1a370c7328a39 | refs/heads/master | 2023-07-21T13:00:31.433336 | 2022-08-24T13:34:32 | 2022-08-24T13:34:32 | 98,356,174 | 2 | 0 | null | 2023-07-16T02:31:48 | 2017-07-25T22:45:29 | Python | UTF-8 | Python | false | false | 602 | py | #!/usr/bin/env python3
import openpyxl
def main():
spreadsheet = "text_to_spreadsheet.xlsx"
wb = openpyxl.load_workbook(spreadsheet)
sheet = wb.active
nrows = sheet.max_row
ncols = sheet.max_column
for col in range(1, ncols+1):
text_file = "spreadsheet_" + str(col) + ".txt"
with open(text_file, 'w') as f:
for row in range(1, nrows+1):
content = sheet.cell(row=row, column=col).value
if content is None:
continue
f.write(str(content))
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
4b17c0924e25f282c9da1e9945670cf8ad43f50d | 10b205507a3598da489bfcfa10bea769b3700b07 | /snaffle/snaffle.py | cd4fe50b80793721a82cfb75e639e8c4f18464c9 | [] | no_license | yattom/snaffle | fd65b4aebdb2ad9083ff6bc9be96b366399974f5 | fce2cec664c569a584925dd4d89f1eb7a2acd9a0 | refs/heads/master | 2021-01-10T01:22:49.387838 | 2016-02-10T01:01:17 | 2016-02-10T01:01:17 | 51,367,548 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 809 | py | # coding: utf-8
import time
import logging
import webbrowser
logger = logging.getLogger(__name__)
handler = logging.StreamHandler()
handler.setLevel(logging.DEBUG)
logger.setLevel(logging.DEBUG)
logger.addHandler(handler)
import snaffle.ws_server
class Snaffle:
def __init__(self, start=True):
if start:
self.start()
def start(self):
snaffle.ws_server.start_server_tornado()
webbrowser.open('http://localhost:9999/index')
def shutdown(self):
snaffle.ws_server.shutdown()
def write_something(self, msg):
snaffle.ws_server.write_something(msg)
def send_script(self, script):
msg = '''
{{
"type": "script",
"content": "{0}"
}}
'''.format(script)
self.write_something(msg)
| [
"[email protected]"
] | |
1ca7985800be5d1b8bc8cec044cb5e460feeb211 | 74ed8d533e86d57c7db9eca879a9fb5b979b8eaf | /stanford_corenlp_demo/common.py | 35504554c6705b83898f4a7c6159a7f78dc7a351 | [
"MIT"
] | permissive | GyxChen/AmusingPythonCodes | 97c5a2080d47399080df005a0643eddb56bceb25 | 388e90c78c67b79c23b4a8fc1ebe29f26394a54b | refs/heads/master | 2020-06-23T12:03:56.708448 | 2019-05-15T05:19:57 | 2019-05-15T05:19:57 | 198,617,528 | 0 | 1 | null | 2019-07-24T10:57:29 | 2019-07-24T10:57:29 | null | UTF-8 | Python | false | false | 1,857 | py | # encoding: utf-8
import xml.etree.ElementTree as ETree
import requests
import re
def dataset_xml_iterator(filename):
"""An iterator to convert xml-format dataset to more readable text format"""
instances = ETree.parse(filename).getroot()
for instance in instances:
paragraph = instance.find('text').text
questions = instance.findall('questions')[0]
queries = []
for question in questions.findall('question'):
tmp_dict = {'Text': question.get('text')}
for answer in question.findall('answer'):
tmp_dict[answer.get('correct')] = answer.get('text')
queries.append(tmp_dict)
yield paragraph, queries
def read_nth_data(filename, n):
"""Read Nth paragraph and corresponding queries"""
index = 0
for paragraph, queries in dataset_xml_iterator(filename):
index += 1
if n == index:
# para = paragraph
# que = queries
return paragraph, queries
return None
def extract_conceptnet(phrase):
"""Access ConceptNet API and read relational triples as well as their weight and simple example"""
url_head = 'http://api.conceptnet.io/c/en/' # access ConceptNet API
raw_json = requests.get(url_head + phrase).json()
edges = raw_json['edges']
if not edges: # if edges is empty, which means ConceptNet doesn't contain such concept or node
return None
concepts = []
for edge in edges:
triple = re.findall(r'/a/\[/r/(.*?)/.*?,/c/en/(.*?)/.*?,/c/en/(.*?)/.*?\]', edge['@id'])[0] # ERE triple
surface_text = re.sub(r'[\[\]]', '', '' if edge['surfaceText'] is None else edge['surfaceText']) # example
weight = edge['weight'] # weight
concepts.append({'Triple': triple, 'weight': weight, 'example': surface_text})
return concepts
| [
"[email protected]"
] | |
c6e68b46d7b871e2b7567e4b2422530a93f57df6 | 3dc3bbe607ab7b583eb52dbaae86636eb642960a | /configs/skeleton/posec3d/rgbpose_conv3d/pose_only.py | ad413da6a64196514be0bf0a8fef32008dad7d92 | [
"Apache-2.0"
] | permissive | open-mmlab/mmaction2 | 659c36c6083fd3d9d072e074a8d4b3a50342b9bd | 582b78fd6c3240500d5cacd292339d7d1ddbb056 | refs/heads/main | 2023-08-28T18:14:50.423980 | 2023-08-10T09:20:06 | 2023-08-10T09:20:06 | 278,810,244 | 3,498 | 1,028 | Apache-2.0 | 2023-09-07T06:50:44 | 2020-07-11T07:19:10 | Python | UTF-8 | Python | false | false | 3,987 | py | _base_ = '../../../_base_/default_runtime.py'
model = dict(
type='Recognizer3D',
backbone=dict(
type='ResNet3dSlowOnly',
in_channels=17,
base_channels=32,
num_stages=3,
out_indices=(2, ),
stage_blocks=(4, 6, 3),
conv1_stride_s=1,
pool1_stride_s=1,
inflate=(0, 1, 1),
spatial_strides=(2, 2, 2),
temporal_strides=(1, 1, 1),
dilations=(1, 1, 1)),
cls_head=dict(
type='I3DHead',
in_channels=512,
num_classes=60,
dropout_ratio=0.5,
average_clips='prob'))
dataset_type = 'PoseDataset'
ann_file = 'data/skeleton/ntu60_2d.pkl'
left_kp = [1, 3, 5, 7, 9, 11, 13, 15]
right_kp = [2, 4, 6, 8, 10, 12, 14, 16]
train_pipeline = [
dict(type='UniformSampleFrames', clip_len=32),
dict(type='PoseDecode'),
dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True),
dict(type='Resize', scale=(64, 64), keep_ratio=False),
dict(type='RandomResizedCrop', area_range=(0.56, 1.0)),
dict(type='Resize', scale=(56, 56), keep_ratio=False),
dict(type='Flip', flip_ratio=0.5, left_kp=left_kp, right_kp=right_kp),
dict(type='GeneratePoseTarget', with_kp=True, with_limb=False),
dict(type='FormatShape', input_format='NCTHW_Heatmap'),
dict(type='PackActionInputs')
]
val_pipeline = [
dict(type='UniformSampleFrames', clip_len=32, num_clips=1, test_mode=True),
dict(type='PoseDecode'),
dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True),
dict(type='Resize', scale=(64, 64), keep_ratio=False),
dict(type='GeneratePoseTarget', with_kp=True, with_limb=False),
dict(type='FormatShape', input_format='NCTHW_Heatmap'),
dict(type='PackActionInputs')
]
test_pipeline = [
dict(
type='UniformSampleFrames', clip_len=32, num_clips=10, test_mode=True),
dict(type='PoseDecode'),
dict(type='PoseCompact', hw_ratio=1., allow_imgpad=True),
dict(type='Resize', scale=(64, 64), keep_ratio=False),
dict(
type='GeneratePoseTarget',
with_kp=True,
with_limb=False,
left_kp=left_kp,
right_kp=right_kp),
dict(type='FormatShape', input_format='NCTHW_Heatmap'),
dict(type='PackActionInputs')
]
train_dataloader = dict(
batch_size=16,
num_workers=8,
persistent_workers=True,
sampler=dict(type='DefaultSampler', shuffle=True),
dataset=dict(
type='RepeatDataset',
times=10,
dataset=dict(
type=dataset_type,
ann_file=ann_file,
split='xsub_train',
pipeline=train_pipeline)))
val_dataloader = dict(
batch_size=16,
num_workers=8,
persistent_workers=True,
sampler=dict(type='DefaultSampler', shuffle=False),
dataset=dict(
type=dataset_type,
ann_file=ann_file,
split='xsub_val',
pipeline=val_pipeline,
test_mode=True))
test_dataloader = dict(
batch_size=1,
num_workers=8,
persistent_workers=True,
sampler=dict(type='DefaultSampler', shuffle=False),
dataset=dict(
type=dataset_type,
ann_file=ann_file,
split='xsub_val',
pipeline=test_pipeline,
test_mode=True))
val_evaluator = [dict(type='AccMetric')]
test_evaluator = val_evaluator
train_cfg = dict(
type='EpochBasedTrainLoop', max_epochs=18, val_begin=1, val_interval=1)
val_cfg = dict(type='ValLoop')
test_cfg = dict(type='TestLoop')
param_scheduler = [
dict(
type='CosineAnnealingLR',
eta_min=0,
T_max=18,
by_epoch=True,
convert_to_iter_based=True)
]
optim_wrapper = dict(
optimizer=dict(type='SGD', lr=0.2, momentum=0.9, weight_decay=0.0003),
clip_grad=dict(max_norm=40, norm_type=2))
# Default setting for scaling LR automatically
# - `enable` means enable scaling LR automatically
# or not by default.
# - `base_batch_size` = (8 GPUs) x (16 samples per GPU).
auto_scale_lr = dict(enable=False, base_batch_size=128)
| [
"[email protected]"
] | |
16f19364595328e4296082867545a96c7427556e | da29f1f5b4459fbfec968bb694bedb9586f87b14 | /new_algs/Sequence+algorithms/Binary+search+algorithm/palindromes.py | 72730563f8e9eb671891b4dc6258d98dc0c977f9 | [] | no_license | coolsnake/JupyterNotebook | 547806a45a663f090f313dc3e70f779ad9b213c0 | 20d8df6172906337f81583dabb841d66b8f31857 | refs/heads/master | 2023-01-13T18:55:38.615312 | 2020-11-17T22:55:12 | 2020-11-17T22:55:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,685 | py | #!python
"""STARTER CODE FROM NEPTUNIUS"""
import string
import re
# Hint: Use these string constants to ignore capitalization and/or punctuation
# string.ascii_lowercase is 'abcdefghijklmnopqrstuvwxyz'
# string.ascii_uppercase is 'ABCDEFGHIJKLMNOPQRSTUVWXYZ'
# string.ascii_letters is ascii_lowercase + ascii_uppercase
def is_palindrome(text):
"""A string of characters is a palindrome if it reads the same forwards and
backwards, ignoring punctuation, whitespace, and letter casing."""
# implement is_palindrome_iterative and is_palindrome_recursive below, then
# change this to call your implementation to verify it passes all tests
assert isinstance(text, str), 'input is not a string: {}'.format(text)
return is_palindrome_recursive(text)
# return is_palindrome_recursive(text)
def is_palindrome_iterative(text):
#implements the is_palindrome function iteratively here
regex = re.compile('[^a-zA-Z]')
text = regex.sub('', text)
text = text.upper()
thing_1 = 0
thing_2 = len(text)-1
while thing_1 < thing_2:
if text[thing_1] != text[thing_2]:
return False
thing_1+=1
thing_2-=1
return True
# once implemented, change is_palindrome to call is_palindrome_iterative
# to verify that your iterative implementation passes all tests
def is_palindrome_recursive(text, left=None, right=None):
#implements the is_palindrome function recursively here
if left is None:
regex = re.compile('[^a-zA-Z]')
text = regex.sub('', text)
text = text.upper()
left = 0
right = len(text)-1
if text == '':
return True
if text[left] != text[right]:
return False
if left < right:
return is_palindrome_recursive(text, left=left+1, right=right-1)
else:
return True
# once implemented, change is_palindrome to call is_palindrome_recursive
# to verify that your iterative implementation passes all tests
def main():
import sys
args = sys.argv[1:] # Ignore script file name
if len(args) > 0:
for arg in args:
is_pal = is_palindrome(arg)
result = 'PASS' if is_pal else 'FAIL'
is_str = 'is' if is_pal else 'is not'
print('{}: {} {} a palindrome'.format(result, repr(arg), is_str))
else:
print('Usage: {} string1 string2 ... stringN'.format(sys.argv[0]))
print(' checks if each argument given is a palindrome')
if __name__ == '__main__':
#print(is_palindrome_iterative("talcat"))
print(is_palindrome_recursive("TAC!!!Oc at", left=None, right=None))
print(is_palindrome_iterative("no, on!"))
| [
"[email protected]"
] | |
c9de463f2a5670eae402bcb6f8934038fef09461 | b5550fc728b23cb5890fd58ccc5e1668548dc4e3 | /virt/imagecache.py | 182e8745ec9ade92d2338ff13e8e74b23e599763 | [] | no_license | bopopescu/nova-24 | 0de13f078cf7a2b845cf01e613aaca2d3ae6104c | 3247a7199932abf9718fb3260db23e9e40013731 | refs/heads/master | 2022-11-20T00:48:53.224075 | 2016-12-22T09:09:57 | 2016-12-22T09:09:57 | 282,140,423 | 0 | 0 | null | 2020-07-24T06:24:14 | 2020-07-24T06:24:13 | null | UTF-8 | Python | false | false | 5,230 | py | #coding:utf-8
# Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from nova.compute import task_states
from nova.compute import vm_states
imagecache_opts = [
cfg.IntOpt('image_cache_manager_interval',
default=2400,
help='Number of seconds to wait between runs of the image '
'cache manager. Set to -1 to disable. '
'Setting this to 0 will disable, but this will change in '
'the K release to mean "run at the default rate".'),
# TODO(gilliard): Clean the above message after the K release
cfg.StrOpt('image_cache_subdirectory_name',
default='_base',
help='Where cached images are stored under $instances_path. '
'This is NOT the full path - just a folder name. '
'For per-compute-host cached images, set to _base_$my_ip'),
cfg.BoolOpt('remove_unused_base_images',
default=True,
help='Should unused base images be removed?'),
cfg.IntOpt('remove_unused_original_minimum_age_seconds',
default=(24 * 3600),
help='Unused unresized base images younger than this will not '
'be removed'),
]
CONF = cfg.CONF
CONF.register_opts(imagecache_opts)
CONF.import_opt('host', 'nova.netconf')
class ImageCacheManager(object):
"""Base class for the image cache manager.
This class will provide a generic interface to the image cache manager.
"""
def __init__(self):
self.remove_unused_base_images = CONF.remove_unused_base_images
self.resize_states = [task_states.RESIZE_PREP,
task_states.RESIZE_MIGRATING,
task_states.RESIZE_MIGRATED,
task_states.RESIZE_FINISH]
def _get_base(self):
"""Returns the base directory of the cached images."""
raise NotImplementedError()
def _list_running_instances(self, context, all_instances):
"""List running instances (on all compute nodes).
This method returns a dictionary with the following keys:
- used_images
- image_popularity
- instance_names
"""
used_images = {}
image_popularity = {}
instance_names = set()
for instance in all_instances:
# NOTE(mikal): "instance name" here means "the name of a directory
# which might contain an instance" and therefore needs to include
# historical permutations as well as the current one.
instance_names.add(instance.name)
instance_names.add(instance.uuid)
if (instance.task_state in self.resize_states or
instance.vm_state == vm_states.RESIZED):
instance_names.add(instance.name + '_resize')
instance_names.add(instance.uuid + '_resize')
for image_key in ['image_ref', 'kernel_id', 'ramdisk_id']:
image_ref_str = getattr(instance, image_key)
if image_ref_str is None:
continue
local, remote, insts = used_images.get(image_ref_str,
(0, 0, []))
if instance.host == CONF.host:
local += 1
else:
remote += 1
insts.append(instance.name)
used_images[image_ref_str] = (local, remote, insts)
image_popularity.setdefault(image_ref_str, 0)
image_popularity[image_ref_str] += 1
return {'used_images': used_images,
'image_popularity': image_popularity,
'instance_names': instance_names}
def _list_base_images(self, base_dir):
"""Return a list of the images present in _base.
This method returns a dictionary with the following keys:
- unexplained_images
- originals
"""
return {'unexplained_images': [],
'originals': []}
def _age_and_verify_cached_images(self, context, all_instances, base_dir):
"""Ages and verifies cached images."""
raise NotImplementedError()
def update(self, context, all_instances):
"""The cache manager.
This will invoke the cache manager. This will update the cache
according to the defined cache management scheme. The information
populated in the cached stats will be used for the cache management.
"""
raise NotImplementedError()
| [
"[email protected]"
] | |
42b9566db4361ab8b254d2a2264e24c1714fe831 | 1285703d35b5a37734e40121cd660e9c1a73b076 | /leetcode/trees/979_distribute_coins_in_binary_tree.py | bcc55f5a0967dd5c383882f927dcf475948b6d2e | [] | no_license | takin6/algorithm-practice | 21826c711f57131108168775f08e4e13d07a3b38 | f4098bea2085a77d11c29e1593b3cc3f579c24aa | refs/heads/master | 2022-11-30T09:40:58.083766 | 2020-08-07T22:07:46 | 2020-08-07T22:07:46 | 283,609,862 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,101 | py | # Definition for a binary tree node.
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def distributeCoins(self, root: TreeNode) -> int:
self.res = 0
def dfs(root):
if root is None: return 0
excess_from_left = dfs(root.left)
excess_from_right = dfs(root.right)
print(root.val, excess_from_left, excess_from_right)
self.res += abs(excess_from_left) + abs(excess_from_right)
return root.val + excess_from_left + excess_from_right - 1
dfs(root)
return self.res
# t = TreeNode(3)
# t.left = TreeNode(0)
# t.right = TreeNode(0)
# print(Solution().distributeCoins(t))
# print("---------------")
# t = TreeNode(0)
# t.left = TreeNode(3)
# t.right = TreeNode(0)
# print(Solution().distributeCoins(t))
print("---------------")
# [1,0,0,null,3]
t = TreeNode(1)
t.left = TreeNode(0)
t.right = TreeNode(0)
t.left.right = TreeNode(3)
print(Solution().distributeCoins(t))
| [
"[email protected]"
] | |
b66c5203b859c841cc828a61e62cc99f9eb553fa | f6f632bee57875e76e1a2aa713fdbe9f25e18d66 | /python/_0001_0500/0494_target-sum.py | 7deeae6aad4be02d4582bbd2b5c096fd901b4cfb | [] | no_license | Wang-Yann/LeetCodeMe | b50ee60beeeb3661869bb948bef4fbe21fc6d904 | 44765a7d89423b7ec2c159f70b1a6f6e446523c2 | refs/heads/master | 2023-08-07T05:31:23.428240 | 2021-09-30T15:33:53 | 2021-09-30T15:33:53 | 253,497,185 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,213 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : Rock Wayne
# @Created : 2020-05-07 08:00:00
# @Last Modified : 2020-05-07 08:00:00
# @Mail : [email protected]
# @Version : alpha-1.0
"""
# 给定一个非负整数数组,a1, a2, ..., an, 和一个目标数,S。现在你有两个符号 + 和 -。对于数组中的任意一个整数,你都可以从 + 或 -中选
# 择一个符号添加在前面。
#
# 返回可以使最终数组和为目标数 S 的所有添加符号的方法数。
#
# 示例 1:
#
# 输入: nums: [1, 1, 1, 1, 1], S: 3
# 输出: 5
# 解释:
#
# -1+1+1+1+1 = 3
# +1-1+1+1+1 = 3
# +1+1-1+1+1 = 3
# +1+1+1-1+1 = 3
# +1+1+1+1-1 = 3
#
# 一共有5种方法让最终目标和为3。
#
#
# 注意:
#
#
# 数组非空,且长度不会超过20。
# 初始的数组的和不会超过1000。
# 保证返回的最终结果能被32位整数存下。
#
# Related Topics 深度优先搜索 动态规划
"""
import functools
from typing import List
import pytest
# leetcode submit region begin(Prohibit modification and deletion)
class Solution:
def findTargetSumWays(self, nums: List[int], S: int) -> int:
"""
https://leetcode-cn.com/problems/target-sum/solution/0-1bei-bao-you-hua-jie-fa-by-sunrise-z/
sum(a)+sum(b)=sum(nums)
sum(a)-sum(b)=S
a代表所有非负数组,b代表所有非正数组,
一正一负,正的和负的绝对值为总和,正的-负的为我们的目标S
那么可以求得sum(a) = (sum(nums)+S) /2
即在数组nums中取子集,满足子集的和为(sum(nums)+S) /2,看看这样的条件有多少种
转化为 0-1 背包
二维的话
dp[i][j] = x 表示,若只在前 i 个物品中选择,若当前背包的容量为 j,则最多有 x 种方法可以恰好装满背包
dp[i][j] = dp[i-1][j] + dp[i-1][j-nums[i-1]];
"""
total = sum(nums)
if total < S:
return 0
tmp = total + S
if tmp & 0b1:
return 0
target = tmp // 2
dp = [0] * (target + 1)
dp[0] = 1
for num in nums:
for v in range(target, num - 1, -1):
dp[v] += dp[v - num]
return dp[target]
# leetcode submit region end(Prohibit modification and deletion)
class Solution1:
def findTargetSumWays(self, nums: List[int], S: int) -> int:
"""
初始版本 超时
优化后可以过
"""
N = len(nums)
@functools.lru_cache(None)
def dfs(i, cur_sum):
ans = 0
if i == N:
if cur_sum == S:
return 1
else:
ans += dfs(i + 1, cur_sum + nums[i]) + dfs(i + 1, cur_sum - nums[i])
return ans
return dfs(0, 0)
@pytest.mark.parametrize("kw,expected", [
[dict(nums=[1, 1, 1, 1, 1], S=3), 5],
])
def test_solutions(kw, expected):
assert Solution().findTargetSumWays(**kw) == expected
assert Solution1().findTargetSumWays(**kw) == expected
if __name__ == '__main__':
pytest.main(["-q", "--color=yes", "--capture=no", __file__])
| [
"[email protected]"
] | |
43f91e28b0ab99020b437977fe568b429178b0b3 | 068d271e241d8cdb46dbf4243166e4b8ee7025b2 | /day10/homework/FTP/server/core/server_common.py | 18fad1a82f4d7f6fccba1a4c2bf4fd51ff3efa68 | [] | no_license | caiqinxiong/python | f6e226e76cb62aac970bcfbcb6c8adfc64858b60 | 9029f6c528d2cb742b600af224e803baa74cbe6a | refs/heads/master | 2023-05-26T19:41:34.911885 | 2020-05-15T09:02:08 | 2020-05-15T09:02:08 | 195,261,757 | 1 | 0 | null | 2021-06-10T23:33:33 | 2019-07-04T15:01:42 | JavaScript | UTF-8 | Python | false | false | 5,055 | py | # -*- coding: utf-8 -*-
__author__ = 'caiqinxiong_cai'
# 2019/9/3 14:35
import struct
import json
import os
import sys
import hashlib
from core.server_auth import ServerAuth as sa
from core.log import Log as log
from conf import settings as ss
class Common:
'''公共类'''
@staticmethod
def mySend(conn, msgb, dic=False):
'''发送数据时,解决粘包问题'''
if dic: msgb = json.dumps(msgb).encode('utf-8')
len_msg = len(msgb)
pack_len = struct.pack('i', len_msg)
conn.send(pack_len)
conn.send(msgb)
@staticmethod
def myRecv(conn, dic=False):
'''接收数据时,解决粘包问题'''
pack_len = conn.recv(4) # struct机制,在发送数据前,加上固定长度4字节的头部
len_msg = struct.unpack('i', pack_len)[0] # 解包,得到元组。
msg_b = conn.recv(len_msg)
if dic: msg_b = json.loads(msg_b.decode('utf-8'))
return msg_b
@staticmethod
def processBar(num, total):
'''打印进度条'''
rate = num / total
rate_num = int(rate * 100)
bar = ('>' * rate_num, rate_num,) # 展示的进度条符号
r = '\r%s>%d%%\n' % bar if rate_num == 100 else '\r%s>%d%%' % bar
sys.stdout.write(r) # 覆盖写入
return sys.stdout.flush # 实时刷新
@staticmethod
def updateQuota(file, name, quota_new):
'''更新磁盘配额'''
with open(file, mode='r', encoding='utf-8') as f1, open(file + '.bak', mode='w', encoding='utf-8') as f2:
for line in f1:
if line.strip():
if name in line:
usr, pwd, quota_old = line.split('|')
line = usr + '|' + pwd + '|' + quota_new + '\n'
f2.write(line)
os.remove(file)
os.rename(file + '.bak', file)
@staticmethod
def checkQuota(file, dic):
'''检查磁盘配额'''
for n, p, q in sa.readInfo(file):
if dic['name'] == n:
dic['msg'] = '用户%s当前磁盘配额剩余:%s字节\n上传文件大小为:%s字节' % (dic['name'], q, dic['filesize'])
num = int(q) - int(dic['filesize'])
dic['flag'] = False if num < 0 else True
if not dic['flag']: dic['msg'] = '%s用户磁盘配额不足!\n' % dic['name'] + dic['msg']
dic['total'] = q
dic['quota'] = str(num)
return dic
@classmethod
def startTransfer(cls, conn, dic, kind, file, mode, b_size=1024000):
'''开始传输,提取上传下载公共代码'''
md5 = hashlib.md5() # 发送数据时,添加MD5校验,就不用再单独打开一次文件做校验了
if dic['exist_size']: log.debug('文件上次已经%s了%s字节,开始断点续传!' % (kind, dic['exist_size']))
with open(file, mode) as f:
if kind == '下载': f.seek(dic['exist_size']) # 将指针移动到指定位置开始读
while dic['filesize'] > 0:
if kind == '下载':
line = f.read(b_size)
conn.send(line) # 发生粘包也没有关系,反正最后把文件传完就行
elif kind == '上传':
line = conn.recv(b_size) # 发生粘包也没有关系,反正最后把文件传完就行
f.write(line)
dic['exist_size'] += len(line) # 累计发送文件大小,传输进度条用
dic['filesize'] -= len(line) # 退出循环用
cls.processBar(dic['exist_size'], dic['total_size'])
md5.update(line)
dic['server_md5'] = md5.hexdigest() # 自己发送数据的MD5值
dic['client_md5'] = cls.myRecv(conn).decode('utf-8') # 接收对方的MD5值
dic['msg'] = 'MD5校验OK,文件传输成功!' if dic['client_md5'] == dic['server_md5'] else 'MD5不一致,文件传输失败!'
if not dic['msg'].find('成功') < 0 and kind == '上传':
cls.updateQuota(ss.USER_FILE, dic['name'], dic['quota']) # 传输成功时更新磁盘配额
dic['msg'] = dic['msg'] + '\n文件上传位置:' + dic['upload_file'] + '\nMD5值为:' + dic['server_md5'] + '\n磁盘配额剩余:%s字节' % dic['quota']
elif not dic['msg'].find('成功') < 0 and kind == '下载':
dic['msg'] = dic['msg'] + '\n文件下载位置:' + dic['download_file'] + '\nMD5值为:' + dic['server_md5']
log.readAndWrite(dic['msg'])
cls.mySend(conn, dic, True)
return dic
@classmethod
def startGetFile(cls, conn, dic):
'''客户端从服务器下载文件'''
return cls.startTransfer(conn, dic, kind='下载', file=dic['file_path'], mode='rb')
@classmethod
def startPutFile(cls, conn, dic):
'''从客户端上传文件到服务器'''
return cls.startTransfer(conn, dic, kind='上传', file=dic['upload_file'], mode='ab')
| [
"[email protected]"
] | |
bd232b3367effd7cfbda818926e79471f68ef3ce | 5b4fe473179b5fadaf59ec96d55b2ec4cb326f65 | /test/runtime/frontend_test/chainer_test/pow_var_var_test.py | 608f866af8cbbd74d77544442bc55e0e6a2adf6b | [
"Zlib",
"MIT"
] | permissive | TarrySingh/webdnn | 13d3f1ec4936916abacfb67e270f48571e2fcff2 | b31b19de0798d8ca198b78d19cb06e4fce1bc260 | refs/heads/master | 2021-05-07T02:24:47.500746 | 2017-11-13T13:00:24 | 2017-11-13T13:00:24 | 110,582,816 | 0 | 1 | null | 2017-11-13T18:03:46 | 2017-11-13T18:03:46 | null | UTF-8 | Python | false | false | 1,044 | py | import chainer
import numpy as np
from test.util import generate_kernel_test_case
from webdnn.frontend.chainer.converter import ChainerConverter
def test():
vx1 = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32))
vx2 = chainer.Variable(np.random.rand(2, 4, 6, 8).astype(np.float32))
vy = vx1 ** vx2
graph = ChainerConverter().convert([vx1, vx2], [vy])
x1 = graph.inputs[0]
x2 = graph.inputs[1]
y = graph.outputs[0]
generate_kernel_test_case(
description=f"[chainer] F.PowVarVar",
graph=graph,
inputs={
x1: vx1.data,
x2: vx2.data
},
expected={y: vy.data},
)
def test_itself():
vx = chainer.Variable(np.random.rand(2, 4, 6, 8))
vy = vx ** vx
graph = ChainerConverter().convert([vx], [vy])
x = graph.inputs[0]
y = graph.outputs[0]
generate_kernel_test_case(
description=f"[chainer] F.PowVarVar itself",
graph=graph,
inputs={x: vx.data},
expected={y: vy.data},
)
| [
"[email protected]"
] | |
0f36fc3d21dc7041777d4a12b840417a029e470f | 9929ba720faf432a5bf3f5cc51dc9f429c24cb84 | /QUANTTOOLS/QAStockETL/QASU/save_usstock_alpha.py | a924c44ce2d05151a07caabb07f54d287cf3f1c4 | [] | no_license | chaopaoo12/QuantTools | 45fb344fc085bd7a40d94f646d0982d6b93db1a8 | 2bb1c5ad6aab3d454cfe32b6e6c86107992bed0c | refs/heads/master | 2023-08-18T04:03:11.944128 | 2023-08-13T10:58:49 | 2023-08-13T10:58:49 | 174,860,433 | 9 | 11 | null | null | null | null | UTF-8 | Python | false | false | 8,002 | py | from QUANTAXIS.QAUtil import (DATABASE, QA_util_log_info,QA_util_to_json_from_pandas,QA_util_today_str)
from QUANTTOOLS.QAStockETL.QAUtil import (QA_util_get_trade_range, QA_util_if_trade)
from QUANTTOOLS.QAStockETL.QAFetch import QA_fetch_usstock_list
from QUANTTOOLS.QAStockETL.QAFetch import (QA_fetch_get_usstock_alpha,
QA_fetch_get_usstock_alpha101)
import pymongo
import gc
def QA_SU_save_usstock_alpha_day(code = None, start_date = None, end_date = None, client=DATABASE, ui_log = None, ui_progress = None):
'''
save stock_day
保存财报日历
历史全部数据
:return:
'''
if end_date is None:
end_date = QA_util_today_str()
if start_date is None:
start_date = '2009-01-01'
deal_date_list = QA_util_get_trade_range(start_date, end_date, 'us')
if code is None:
code = list(QA_fetch_usstock_list()['code'])
stock_alpha = client.usstock_alpha
stock_alpha.create_index([("code", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING)], unique=True)
err = []
def __saving_work(date, code):
try:
QA_util_log_info(
'##JOB01 Now Saving USStock Alpha191==== {}'.format(str(date)), ui_log)
data = QA_fetch_get_usstock_alpha(code, date)
if data is not None:
stock_alpha.insert_many(QA_util_to_json_from_pandas(data), ordered=False)
gc.collect()
except Exception as error0:
print(error0)
err.append(str(date))
for item in deal_date_list:
QA_util_log_info('The {} of Total {}'.format
((deal_date_list.index(item) +1), len(deal_date_list)))
strProgressToLog = 'DOWNLOAD PROGRESS {}'.format(str(float((deal_date_list.index(item) +1) / len(deal_date_list) * 100))[0:4] + '%', ui_log)
intProgressToLog = int(float((deal_date_list.index(item) +1) / len(deal_date_list) * 100))
QA_util_log_info(strProgressToLog, ui_log= ui_log, ui_progress= ui_progress, ui_progress_int_value= intProgressToLog)
if QA_util_if_trade(item) == True:
__saving_work( item, code)
if len(err) < 1:
QA_util_log_info('SUCCESS save USStock Alpha191 ^_^', ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log)
QA_util_log_info(err, ui_log)
def QA_SU_save_usstock_alpha_his(code = None, start_date = None, end_date = None, client=DATABASE, ui_log = None, ui_progress = None):
'''
save stock_day
保存财报日历
反向查询四个季度财报
:return:
'''
if code is None:
code = list(QA_fetch_usstock_list()['code'])
if end_date is None:
end_date = QA_util_today_str()
if start_date is None:
start_date = '2009-01-01'
deal_date_list = QA_util_get_trade_range(start_date, end_date, 'us')
stock_alpha = client.usstock_alpha
stock_alpha.create_index([("code", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING)], unique=True)
err = []
def __saving_work(code, date):
try:
QA_util_log_info(
'##JOB01 Now Saving USStock Alpha191==== {}'.format(str(date)), ui_log)
data = QA_fetch_get_usstock_alpha(code, date)
if data is not None:
stock_alpha.insert_many(QA_util_to_json_from_pandas(data), ordered=False)
except Exception as error0:
print(error0)
err.append(str(date))
for item in deal_date_list:
QA_util_log_info('The {} of Total {}'.format
((deal_date_list.index(item) +1), len(deal_date_list)))
strProgressToLog = 'DOWNLOAD PROGRESS {}'.format(str(float((deal_date_list.index(item) +1) / len(deal_date_list) * 100))[0:4] + '%', ui_log)
intProgressToLog = int(float((deal_date_list.index(item) + 1)/ len(deal_date_list) * 100))
QA_util_log_info(strProgressToLog, ui_log= ui_log, ui_progress= ui_progress, ui_progress_int_value= intProgressToLog)
if QA_util_if_trade(item) == True:
__saving_work(code, item)
if len(err) < 1:
QA_util_log_info('SUCCESS save USStock Alpha191 ^_^', ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log)
QA_util_log_info(err, ui_log)
def QA_SU_save_usstock_alpha101_day(code = None, start_date = None, end_date = None, client=DATABASE, ui_log = None, ui_progress = None):
'''
save stock_day
保存财报日历
历史全部数据
:return:
'''
if end_date is None:
end_date = QA_util_today_str()
if start_date is None:
start_date = '2009-01-01'
codes = code
if codes is None:
codes = list(QA_fetch_usstock_list()['code'])
stock_alpha = client.usstock_alpha101
stock_alpha.create_index([("code", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING)], unique=True)
err = []
def __saving_work(code,start,end):
try:
QA_util_log_info(
'##JOB01 Now Saving USStock Alpha101==== {}'.format(str(code)), ui_log)
data = QA_fetch_get_usstock_alpha101(code,start,end)
if data is not None:
stock_alpha.insert_many(QA_util_to_json_from_pandas(data), ordered=False)
gc.collect()
except Exception as error0:
print(error0)
err.append(str(code))
for code in codes:
QA_util_log_info('The {} of Total {}'.format
((codes.index(code) +1), len(codes)))
strProgressToLog = 'DOWNLOAD PROGRESS {}'.format(str(float((codes.index(code) +1) / len(codes) * 100))[0:4] + '%', ui_log)
intProgressToLog = int(float((codes.index(code) +1) / len(codes) * 100))
QA_util_log_info(strProgressToLog, ui_log= ui_log, ui_progress= ui_progress, ui_progress_int_value= intProgressToLog)
__saving_work(code,start_date,end_date)
if len(err) < 1:
QA_util_log_info('SUCCESS save USStock Alpha101 ^_^', ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log)
QA_util_log_info(err, ui_log)
def QA_SU_save_usstock_alpha101_his(code = None, start_date = None, end_date = None, client=DATABASE, ui_log = None, ui_progress = None):
'''
save stock_day
保存财报日历
历史全部数据
:return:
'''
if end_date is None:
end_date = QA_util_today_str()
if start_date is None:
start_date = '2009-01-01'
codes = code
if codes is None:
codes = list(QA_fetch_usstock_list()['code'])
stock_alpha = client.usstock_alpha101
stock_alpha.create_index([("code", pymongo.ASCENDING), ("date_stamp", pymongo.ASCENDING)], unique=True)
err = []
def __saving_work(code,start,end):
try:
QA_util_log_info(
'##JOB01 Now Saving USStock Alpha101==== {}'.format(str(code)), ui_log)
data = QA_fetch_get_usstock_alpha101(code,start,end)
if data is not None:
stock_alpha.insert_many(QA_util_to_json_from_pandas(data), ordered=False)
gc.collect()
except Exception as error0:
print(error0)
err.append(str(code))
for code in codes:
QA_util_log_info('The {} of Total {}'.format
((codes.index(code) +1), len(codes)))
strProgressToLog = 'DOWNLOAD PROGRESS {}'.format(str(float((codes.index(code) +1) / len(codes) * 100))[0:4] + '%', ui_log)
intProgressToLog = int(float((codes.index(code) +1) / len(codes) * 100))
QA_util_log_info(strProgressToLog, ui_log= ui_log, ui_progress= ui_progress, ui_progress_int_value= intProgressToLog)
__saving_work(code,start_date,end_date)
if len(err) < 1:
QA_util_log_info('SUCCESS save USStock Alpha101 ^_^', ui_log)
else:
QA_util_log_info(' ERROR CODE \n ', ui_log)
QA_util_log_info(err, ui_log)
if __name__ == '__main__':
pass | [
"[email protected]"
] | |
e49a38be69dc29f6136d40d0aeb977f4023ebb4a | c130a094e04eb448201ca2ab8ed4fe56cd1d80bc | /samples/client/petstore/python_disallowAdditionalPropertiesIfNotPresent/petstore_api/model/array_test.py | 3d1146f50cbcf105ed811bf584fd8d1b4295846d | [
"Apache-2.0"
] | permissive | janweinschenker/openapi-generator | 83fb57f9a5a94e548e9353cbf289f4b4172a724e | 2d927a738b1758c2213464e10985ee5124a091c6 | refs/heads/master | 2022-02-01T17:22:05.604745 | 2022-01-19T10:43:39 | 2022-01-19T10:43:39 | 221,860,152 | 1 | 0 | Apache-2.0 | 2019-11-15T06:36:25 | 2019-11-15T06:36:24 | null | UTF-8 | Python | false | false | 11,599 | py | """
OpenAPI Petstore
This spec is mainly for testing Petstore server and contains fake endpoints, models. Please do not use this for any other purpose. Special characters: \" \\ # noqa: E501
The version of the OpenAPI document: 1.0.0
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from petstore_api.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
OpenApiModel
)
from petstore_api.exceptions import ApiAttributeError
def lazy_import():
from petstore_api.model.read_only_first import ReadOnlyFirst
globals()['ReadOnlyFirst'] = ReadOnlyFirst
class ArrayTest(ModelNormal):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
}
validations = {
}
additional_properties_type = None
_nullable = False
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'array_of_string': ([str],), # noqa: E501
'array_array_of_integer': ([[int]],), # noqa: E501
'array_array_of_model': ([[ReadOnlyFirst]],), # noqa: E501
}
@cached_property
def discriminator():
return None
attribute_map = {
'array_of_string': 'array_of_string', # noqa: E501
'array_array_of_integer': 'array_array_of_integer', # noqa: E501
'array_array_of_model': 'array_array_of_model', # noqa: E501
}
read_only_vars = {
}
_composed_schemas = {}
@classmethod
@convert_js_args_to_python_args
def _from_openapi_data(cls, *args, **kwargs): # noqa: E501
"""ArrayTest - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
array_of_string ([str]): [optional] # noqa: E501
array_array_of_integer ([[int]]): [optional] # noqa: E501
array_array_of_model ([[ReadOnlyFirst]]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
self = super(OpenApiModel, cls).__new__(cls)
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
return self
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""ArrayTest - a model defined in OpenAPI
Keyword Args:
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
array_of_string ([str]): [optional] # noqa: E501
array_array_of_integer ([[int]]): [optional] # noqa: E501
array_array_of_model ([[ReadOnlyFirst]]): [optional] # noqa: E501
"""
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
for var_name, var_value in kwargs.items():
if var_name not in self.attribute_map and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
self.additional_properties_type is None:
# discard variable.
continue
setattr(self, var_name, var_value)
if var_name in self.read_only_vars:
raise ApiAttributeError(f"`{var_name}` is a read-only attribute. Use `from_openapi_data` to instantiate "
f"class with read only attributes.")
| [
"[email protected]"
] | |
312a8e05f383be3790e403f2863f1c553e88a5c0 | 6f56cf11d2d7750edb193831f368c8c7d156b974 | /test/mitmproxy/test_flow_export/locust_get.py | 72d5932aa5bef794e7caeda7833a2d335e7cbb34 | [
"MIT"
] | permissive | lifeNrun/mitmproxy | 000ad22e7262948ee6d4835c96d49b4a96ae1597 | a7b9e3033db29a27344c9f5d968c2af25d4a9ac0 | refs/heads/master | 2021-01-12T21:36:59.985896 | 2016-05-19T05:51:27 | 2016-05-19T05:51:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 650 | py | from locust import HttpLocust, TaskSet, task
class UserBehavior(TaskSet):
def on_start(self):
''' on_start is called when a Locust start before any task is scheduled '''
self.path()
@task()
def path(self):
url = self.locust.host + '/path'
headers = {
'header': 'qvalue',
'content-length': '7',
}
self.response = self.client.request(
method='GET',
url=url,
headers=headers,
)
### Additional tasks can go here ###
class WebsiteUser(HttpLocust):
task_set = UserBehavior
min_wait = 1000
max_wait = 3000
| [
"[email protected]"
] | |
d52d3f946f64395395f21a74aa35b9864124e73b | 0df0bd96bea3e3f8ed8d339f0180c1a9fe529471 | /shipments/migrations/0012_auto_20141029_1158.py | 26fb7a1bdffad5cebdf7f0e74d184a1b6a505803 | [
"BSD-3-Clause"
] | permissive | theirc/CTS | d04141c4a7db1c32e915d65369e286c9c04ab9b9 | 43eb3e3b78c19f9e1dc02158ca12fc0c5d6bb270 | refs/heads/develop | 2020-12-03T05:26:07.564049 | 2018-03-21T14:47:53 | 2018-03-21T14:47:53 | 35,951,007 | 25 | 9 | BSD-3-Clause | 2018-03-21T14:47:54 | 2015-05-20T13:52:48 | JavaScript | UTF-8 | Python | false | false | 702 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('shipments', '0011_auto_20141023_1542'),
]
operations = [
migrations.AlterField(
model_name='location',
name='latitude',
field=models.DecimalField(null=True, max_digits=13, decimal_places=10),
preserve_default=True,
),
migrations.AlterField(
model_name='location',
name='longitude',
field=models.DecimalField(null=True, max_digits=13, decimal_places=10),
preserve_default=True,
),
]
| [
"[email protected]"
] | |
5cce55f013cb0b6b48a89f9691a49b9ad2e6d83b | 18f2d1458103e1aacaaa14d9ff52654da0154dc8 | /src/trainers/da.py | bf333759f7cf0480d3a8e49372caa521914a290b | [] | no_license | yamad07/IADA | 4fbda5b2e7cdb5efd83f2bd2960bfb8dcfd0d455 | 7dbda1eb336f44e57567f4541e14b31304a4e381 | refs/heads/master | 2020-04-10T23:18:01.809883 | 2019-01-30T16:05:21 | 2019-01-30T16:05:21 | 161,347,800 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,422 | py | import torch.optim as optim
import torch
import torch.nn as nn
import torch.nn.functional as F
class DomainAdversarialTrainer:
def __init__(self, experiment, source_encoder, target_encoder, domain_discriminator, source_domain_discriminator,
source_generator, data_loader, valid_data_loader, classifier):
self.experiment = experiment
self.source_encoder = source_encoder
self.target_encoder = target_encoder
self.classifier = classifier
self.domain_discriminator = domain_discriminator
self.source_domain_discriminator = source_domain_discriminator
self.source_generator = source_generator
self.data_loader = data_loader
self.validate_data_loader = valid_data_loader
self.device = torch.device("cuda:1" if torch.cuda.is_available() else "cpu")
def set_loader(self, data_loader):
self.data_loader = data_loader
def val_set_loader(self, validate_data_loader):
self.validate_data_loader = validate_data_loader
def train(self, s_epoch, sm_epoch):
self.supervised_criterion = nn.NLLLoss()
self.discriminator_criterion = nn.NLLLoss()
self.source_domain_discriminator_criterion = nn.NLLLoss()
self.source_domain_generator_criterion = nn.NLLLoss()
self.adversarial_criterion = nn.NLLLoss()
self.source_encoder.to(self.device)
self.target_encoder.to(self.device)
self.classifier.to(self.device)
self.domain_discriminator.to(self.device)
self.source_domain_discriminator.to(self.device)
self.source_generator.to(self.device)
self.classifier_optim = optim.SGD(self.classifier.parameters(), lr=1e-3)
self.source_optim = optim.Adam(self.source_encoder.parameters(), lr=1e-3)
self.target_optim = optim.Adam(self.target_encoder.parameters(), lr=1e-4)
self.discrim_optim = optim.Adam(self.domain_discriminator.parameters(), lr=1e-4)
self.source_domain_discriminator_optim = optim.Adam(self.source_domain_discriminator.parameters(), lr=1e-4)
self.source_domain_generator_optim = optim.Adam(self.source_generator.parameters(), lr=1e-4)
for e in range(s_epoch):
for i, (source_data, source_labels, target_data) in enumerate(self.data_loader):
source_data = source_data.to(self.device)
source_labels = source_labels.to(self.device)
target_data = target_data.to(self.device)
# step 1. supervised learning using source data
classifier_loss, source_accuracy = self._train_source(source_data, source_labels)
self.experiment.log_current_epoch(e)
self.experiment.log_metric('source_accuracy', source_accuracy)
print("Epoch: {0} classifier: {1} source accuracy: {2}".format(e, classifier_loss, source_accuracy))
for e in range(sm_epoch):
for i, (source_data, source_labels, target_data) in enumerate(self.data_loader):
source_data = source_data.to(self.device)
discriminator_loss, generator_loss = self._train_source_modeling(source_data)
self.experiment.log_metric('D(x)', discriminator_loss)
self.experiment.log_metric('D(G(x))', generator_loss)
self.experiment.log_current_epoch(e)
print("Epoch: {0} D(x): {1} D(G(x)): {2}".format(e, discriminator_loss, generator_loss))
self.target_encoder.load_state_dict(self.source_encoder.state_dict())
self.source_generator.eval()
def train_da(self, epoch):
for e in range(epoch):
self.source_encoder.train()
self.target_encoder.train()
self.domain_discriminator.train()
self.classifier.train()
for i, (source_data, source_labels, target_data) in enumerate(self.data_loader):
source_data = source_data.to(self.device)
source_labels = source_labels.to(self.device)
target_data = target_data.to(self.device)
discriminator_loss = self._ad_train_discriminator(source_data, target_data)
target_adversarial_loss = self._ad_train_target_encoder(target_data)
target_features = self.target_encoder(target_data)
target_preds = self.classifier(target_features)
self.experiment.log_metric('discriminator_loss', discriminator_loss)
self.experiment.log_metric('target_adversarial_loss', target_adversarial_loss)
target_valid_accuracy = self.validate(e)
self.experiment.log_current_epoch(e)
self.experiment.log_metric('valid_target_accuracy', target_valid_accuracy)
print("Epoch: {0} D(x): {1} D(G(x)): {2} target_accuracy: {3}".format(
e, discriminator_loss, target_adversarial_loss, target_valid_accuracy))
def validate(self, e):
accuracy = 0
for i, (target_data, target_labels) in enumerate(self.validate_data_loader):
target_data = target_data.to(self.device)
target_labels = target_labels.to(self.device)
self.target_encoder.eval()
self.classifier.eval()
target_features = self.target_encoder(target_data)
target_preds = self.classifier(target_features)
_, target_preds = torch.max(target_preds, 1)
accuracy += 100 * (target_preds == target_labels).sum().item() / target_preds.size()[0]
accuracy /= len(self.validate_data_loader)
return accuracy
def _train_source(self, source_data, source_labels):
# init
self.classifier_optim.zero_grad()
self.source_optim.zero_grad()
# forward
source_features = self.source_encoder(source_data)
source_preds = self.classifier(source_features)
classifier_loss = self.supervised_criterion(source_preds, source_labels)
# backward
classifier_loss.backward()
self.classifier_optim.step()
self.source_optim.step()
source_accuracy = self._calc_accuracy(source_preds, source_labels)
return classifier_loss, source_accuracy
def _train_source_modeling(self, source_data):
self.source_optim.zero_grad()
self.source_domain_generator_optim.zero_grad()
self.source_domain_discriminator_optim.zero_grad()
source_features = self.source_encoder(source_data)
z = torch.randn(16, 100).to(self.device).detach()
source_fake_features = self.source_generator(z)
true_preds = self.source_domain_discriminator(source_features.detach())
fake_preds = self.source_domain_discriminator(source_fake_features.detach())
labels = torch.cat((torch.ones(16).long().to(self.device), torch.zeros(16).long().to(self.device)))
preds = torch.cat((true_preds, fake_preds))
discriminator_loss = self.source_domain_discriminator_criterion(preds, labels)
discriminator_loss.backward()
self.source_domain_discriminator_optim.step()
self.source_domain_generator_optim.zero_grad()
self.source_domain_discriminator_optim.zero_grad()
z = torch.randn(16, 100).to(self.device).detach()
source_fake_features = self.source_generator(z)
fake_preds = self.source_domain_discriminator(source_fake_features)
generator_loss = - self.source_domain_generator_criterion(fake_preds, torch.zeros(16).long().to(self.device))
generator_loss.backward()
self.source_domain_generator_optim.step()
return discriminator_loss, generator_loss
def _ad_train_target_encoder(self, target_data):
# init
self.target_optim.zero_grad()
self.source_optim.zero_grad()
self.discrim_optim.zero_grad()
# forward
target_features = self.target_encoder(target_data)
target_domain_predicts = self.domain_discriminator(target_features)
target_adversarial_loss = - self.adversarial_criterion(target_domain_predicts, torch.zeros(16).long().to(self.device))
# backward
target_adversarial_loss.backward()
self.target_optim.step()
return target_adversarial_loss
def _ad_train_discriminator(self, source_data, target_data):
# init
self.target_optim.zero_grad()
self.source_optim.zero_grad()
self.discrim_optim.zero_grad()
# forward
z = torch.randn(16, 100).to(self.device)
source_features = self.source_generator(z)
# source_features = self.source_encoder(source_data)
source_domain_preds = self.domain_discriminator(source_features.detach())
target_features = self.target_encoder(target_data)
target_domain_preds = self.domain_discriminator(target_features.detach())
domain_labels = torch.cat((torch.ones(16).long().to(self.device), torch.zeros(16).long().to(self.device)))
# backward
discriminator_loss = self.discriminator_criterion(torch.cat((source_domain_preds, target_domain_preds)), domain_labels)
discriminator_loss.backward()
self.discrim_optim.step()
return discriminator_loss
def _calc_accuracy(self, preds, labels):
_, preds = torch.max(preds, 1)
accuracy = 100 * (preds == labels).sum().item() / preds.size()[0]
return accuracy
| [
"[email protected]"
] | |
c8cc0bf1366841760b28e2cc11fcc4859a8afe9d | 8810b14fa59d749c755fc4c27281a3894674cde3 | /coding_challenges/algo/optimization/latest_set.py | 4f9833e664dbbf810e264593b7ab61090aac2d8f | [] | no_license | Yamp/home_lab | e76f23af12c794c38422623f92921617e371c9fe | 5c6d5b13bb0b845b7af8c1a94b42e2dad368e373 | refs/heads/master | 2022-03-03T12:32:06.243021 | 2020-04-01T10:34:04 | 2020-04-01T10:34:04 | 224,238,847 | 0 | 1 | null | 2022-02-10T21:53:23 | 2019-11-26T16:35:58 | Jupyter Notebook | UTF-8 | Python | false | false | 20 | py | # class LatestSet:
| [
"[email protected]"
] | |
fc7808e28aefa7f89ef9a9bd7a27c3525fb6ef52 | 71e838612daddbfc9bda01d9ba0ca76fab48c3bd | /full-stack-web-app/api/DjangoAPI/EmployeeApp/serializers.py | 712aa9a1fbbb1256c71c9576bc81feb251dcd9c1 | [] | no_license | Nourreddine1920/full-stack-web-app | d77e23955f3f11f853af51fbf02504e6237458a1 | 11a793c371d29ee592ee38c1ccfc83fb9b0401ce | refs/heads/main | 2023-06-29T21:38:57.416787 | 2021-08-06T12:50:15 | 2021-08-06T12:50:15 | 393,375,332 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 439 | py | from rest_framework import serializers
from EmployeeApp.models import Departments,Employees
class DepartmentSerializer(serializers.ModelSerializer):
class Meta:
model=Departments
fields=('DepartmentId','DepartmentName')
class EmployeeSerializer(serializers.ModelSerializer):
class Meta:
model=Employees
fields=('EmployeeId','EmployeeName','Department','DateOfJoining','PhotoFileName') | [
"[email protected]"
] | |
34a752ae52ae267613e3396371a6eb4159ae908f | 4520f56d4952c788e198ee7eee39911c9a76c60f | /03_Bigdata/02_Standardization_Analysis/2. Excel/7pandas_column_by_index.py | 3ca0166f2800315d790994cafc78d6cdd7fced1e | [] | no_license | SuHyeonJung/iot_python2019 | bef8877a1cd41981ad2125291f5af44f4fd1701c | 7860630ae28c53677a3c2761c9e997b28ea55f26 | refs/heads/master | 2020-06-14T22:18:27.503781 | 2019-11-08T05:50:41 | 2019-11-08T05:50:41 | 195,142,234 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 349 | py | import sys
import pandas as pd
input_file = sys.argv[1]
output_file = sys.argv[2]
data_frame = pd.read_excel(input_file, 'january_2013', index_col=None)
data_frame_column_by_index = data_frame.iloc[:, [1, 4]]
writer = pd.ExcelWriter(output_file)
data_frame_column_by_index.to_excel(writer, sheet_name='jan_13_output', index=False)
writer.save()
| [
"[email protected]"
] | |
0e4c3531a57387a683f6035ab1f6f0d2d72d85ed | e90a772733e73e45b4cdbb5f240ef3b4a9e71de1 | /18. 4Sum.py | 1bf91d01a1717a31da740d4bb0ec68e689638928 | [] | no_license | jiewu-stanford/leetcode | 102829fcbcace17909e4de49c01c3d705b6e6e3a | cbd47f713d3307f900daf55c8f27301c70542fc4 | refs/heads/master | 2022-05-28T18:25:00.885047 | 2022-05-18T05:16:22 | 2022-05-18T05:16:22 | 214,486,622 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,893 | py | '''
Title : 18. 4Sum
Problem : https://leetcode.com/problems/4sum/
'''
''' pair up to convert to the 2-sum problem '''
class Solution:
def fourSum(self, nums: List[int], target: int) -> List[List[int]]:
pair_nums = {}
for i in range(len(nums)-1):
for j in range(i+1, len(nums)):
pair_num = nums[i] + nums[j]
if pair_num in pair_nums:
pair_nums[pair_num].append((i, j))
else:
pair_nums[pair_num] = [(i, j)]
quadruplets = set()
for key, val in pair_nums.items():
dif = target - key
if dif in pair_nums:
pair1, pair2 = val, pair_nums[dif]
for i, j in pair1:
for k, l in pair2:
quad = [i, j, k, l]
if len(set(quad)) != len(quad): continue
quadruplet = [nums[i], nums[j], nums[k], nums[l]]
quadruplet.sort()
quadruplets.add(tuple(quadruplet))
return list(quadruplets)
''' iterative solution, more comprehensible but much slower '''
class Solution:
def fourSum(self, nums: List[int], target: int) -> List[List[int]]:
nums.sort()
quadruplets = []
L = len(nums)
for i in range(L-3):
if i > 0 and nums[i-1] == nums[i]: continue
for j in range(i+1, L-2):
if j > i + 1 and nums[j-1] == nums[j]: continue
dif = target - nums[i] - nums[j]
l, r = j+1, L-1
while l < r:
if nums[l] + nums[r] == dif:
quadruplets.append((nums[i], nums[j], nums[l], nums[r]))
r -= 1
l += 1
while l < r and nums[l-1] == nums[l]:
l += 1
while l < r and nums[r] == nums[r+1]:
r -= 1
elif nums[l] + nums[r] > dif:
r -= 1
else:
l += 1
return quadruplets
'''
combine the solution of 1. Two Sum and 15. 3Sum
Reference: https://programmer.help/blogs/leetcode-2sum-3sum-4sum-python.html
'''
class Solution:
def fourSum(self, nums: List[int], target: int) -> List[List[int]]:
if len(nums) < 4: return []
nums.sort()
def threeSum(nums, target): # the 15. solution
def twoSum(nums, target, num, triplets): # the 1. solution
l, r, tgt = 0, len(nums)-1, target-num
while l != r:
if nums[l] + nums[r] < tgt:
l += 1
elif nums[l] + nums[r] > tgt:
r -= 1
else:
triplet = [num, nums[l], nums[r]]
l += 1
while l != r and nums[l-1] == nums[l]:
l += 1
triplets.append(triplet)
return triplets
result = []
for i in range(len(nums)-2):
if i > 0 and nums[i-1] == nums[i]:
continue
else:
remnant = nums[i+1:]
result = twoSum(remnant, target, nums[i], result)
return result
res = []
for i in range(len(nums)-3):
if i > 0 and nums[i-1] == nums[i]:
continue
num = nums[i]
trisum = target - num
rem = nums[i+1:]
triples = threeSum(rem, trisum)
if len(triples) > 0:
for triple in triples:
triple.append(num)
res.append(triple)
return res | [
"[email protected]"
] | |
6d2b146447d9c996712e4fcf47c58fe6cf589442 | 8e127527301ef9439960725784d5522af824bc9b | /account_flujo_caja_it/__manifest__.py | cee213aea2a18577408ba87507f6b8fbdac513ff | [] | no_license | Makelator/heleo | b8fcf2beef7359ec91c20ef8930753bd0f6f4670 | a5a04203f1f19f813f495a484e590cd22886edf7 | refs/heads/master | 2020-07-30T15:09:27.224489 | 2019-09-23T05:43:10 | 2019-09-23T05:43:10 | 210,272,436 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 504 | py | # -*- encoding: utf-8 -*-
{
'name': 'Saldos Comprobantes Analisis IT Extended',
'category': 'account',
'author': 'ITGRUPO-COMPATIBLE-BO',
'depends': ['analisis_saldos_comprobantes_periodo_it','account_sheet_work','account_multipayment_invoices_it','account_multipayment_invoices_it_advance'],
'version': '1.0',
'description':"""
Analisis de Saldos por Comprobantes version 2017
""",
'auto_install': False,
'demo': [],
'data': ['wizard/account_contable_period_view.xml'],
'installable': True
}
| [
"[email protected]"
] | |
ff7c384224d512d1d88787ac98df6f64a52eb2ab | ca6e4edfc31439aeaed4b8e9e75ea9c8b679c44f | /autofill_users.py | ace539b5d555e832bdd4a7310c5b2ed3644d450a | [
"MIT"
] | permissive | prateekchandan/pickup-server | b74f949c5d3b0471d7318fd72417b3bd0b1ccfc2 | 9132c3ef9c0e95ba34a69d9ed2a9fb980356372a | refs/heads/master | 2021-01-22T15:58:43.838560 | 2015-10-07T15:41:23 | 2015-10-07T15:41:23 | 33,261,955 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 716 | py | import requests
import os
import json
loginURL = "http://pickup.prateekchandan.me/add_user"
HTTPSession = requests.session()
start_locations=['Larsen+Tourbo+Powai','IIT+Bombay+Hostel+9','Hiranandani+Hospital','Hiranandani','Kanjur+Marg+Station',
'Chandivali','Raheja+Vihar','Supreme+Powai','Galleria+Hiranandani','Powai+Plaza']
for i in range(100):
postData = {'key':'9f83c32cf3c9d529e' ,'fbid':i ,'name':'person'+str(i) , 'email':'person'+str(i)+'@gmail.com' ,
'device_id':i , 'gcm_id':i, 'mac_addr':i , 'gender':'male'}
afterLoginPage = HTTPSession.post(loginURL, data = postData )
print afterLoginPage.content
#afterLoginPage = HTTPSession.post(loginURL, data = postData )
#print afterLoginPage.content
| [
"[email protected]"
] | |
2c2c6faaaf98f2e09011c3f88cd4fbe50341b325 | 24e7e0dfaaeaca8f911b40fcc2937342a0f278fd | /venv/Lib/site-packages/psutil/_common.py | b7a5478658fe78b9a1537dd893a3a77cc095581f | [
"MIT"
] | permissive | BimiLevi/Covid19 | 90e234c639192d62bb87364ef96d6a46d8268fa0 | 5f07a9a4609383c02597373d76d6b6485d47936e | refs/heads/master | 2023-08-04T13:13:44.480700 | 2023-08-01T08:36:36 | 2023-08-01T08:36:36 | 288,455,446 | 1 | 0 | MIT | 2021-01-22T19:36:26 | 2020-08-18T12:53:43 | HTML | UTF-8 | Python | false | false | 26,159 | py | # Copyright (c) 2009, Giampaolo Rodola'. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Common objects shared by __init__.py and _ps*.py modules."""
# Note: this module is imported by setup.py so it should not import
# psutil or third-party modules.
from __future__ import division, print_function
import contextlib
import errno
import functools
import os
import socket
import stat
import sys
import threading
import warnings
from collections import defaultdict
from collections import namedtuple
from socket import AF_INET
from socket import SOCK_DGRAM
from socket import SOCK_STREAM
try:
from socket import AF_INET6
except ImportError:
AF_INET6 = None
try:
from socket import AF_UNIX
except ImportError:
AF_UNIX = None
if sys.version_info >= (3, 4):
import enum
else:
enum = None
# can't take it from _common.py as this script is imported by setup.py
PY3 = sys.version_info[0] == 3
__all__ = [
# OS constants
'FREEBSD', 'BSD', 'LINUX', 'NETBSD', 'OPENBSD', 'MACOS', 'OSX', 'POSIX',
'SUNOS', 'WINDOWS',
# connection constants
'CONN_CLOSE', 'CONN_CLOSE_WAIT', 'CONN_CLOSING', 'CONN_ESTABLISHED',
'CONN_FIN_WAIT1', 'CONN_FIN_WAIT2', 'CONN_LAST_ACK', 'CONN_LISTEN',
'CONN_NONE', 'CONN_SYN_RECV', 'CONN_SYN_SENT', 'CONN_TIME_WAIT',
# net constants
'NIC_DUPLEX_FULL', 'NIC_DUPLEX_HALF', 'NIC_DUPLEX_UNKNOWN',
# process status constants
'STATUS_DEAD', 'STATUS_DISK_SLEEP', 'STATUS_IDLE', 'STATUS_LOCKED',
'STATUS_RUNNING', 'STATUS_SLEEPING', 'STATUS_STOPPED', 'STATUS_SUSPENDED',
'STATUS_TRACING_STOP', 'STATUS_WAITING', 'STATUS_WAKE_KILL',
'STATUS_WAKING', 'STATUS_ZOMBIE', 'STATUS_PARKED',
# other constants
'ENCODING', 'ENCODING_ERRS', 'AF_INET6',
# named tuples
'pconn', 'pcputimes', 'pctxsw', 'pgids', 'pio', 'pionice', 'popenfile',
'pthread', 'puids', 'sconn', 'scpustats', 'sdiskio', 'sdiskpart',
'sdiskusage', 'snetio', 'snicaddr', 'snicstats', 'sswap', 'suser',
# utility functions
'conn_tmap', 'deprecated_method', 'isfile_strict', 'memoize',
'parse_environ_block', 'path_exists_strict', 'usage_percent',
'supports_ipv6', 'sockfam_to_enum', 'socktype_to_enum', "wrap_numbers",
'bytes2human', 'conn_to_ntuple', 'debug',
# shell utils
'hilite', 'term_supports_colors', 'print_color',
]
# ===================================================================
# --- OS constants
# ===================================================================
POSIX = os.name == "posix"
WINDOWS = os.name == "nt"
LINUX = sys.platform.startswith("linux")
MACOS = sys.platform.startswith("darwin")
OSX = MACOS # deprecated alias
FREEBSD = sys.platform.startswith("freebsd")
OPENBSD = sys.platform.startswith("openbsd")
NETBSD = sys.platform.startswith("netbsd")
BSD = FREEBSD or OPENBSD or NETBSD
SUNOS = sys.platform.startswith(("sunos", "solaris"))
AIX = sys.platform.startswith("aix")
# ===================================================================
# --- API constants
# ===================================================================
# Process.status()
STATUS_RUNNING = "running"
STATUS_SLEEPING = "sleeping"
STATUS_DISK_SLEEP = "disk-sleep"
STATUS_STOPPED = "stopped"
STATUS_TRACING_STOP = "tracing-stop"
STATUS_ZOMBIE = "zombie"
STATUS_DEAD = "dead"
STATUS_WAKE_KILL = "wake-kill"
STATUS_WAKING = "waking"
STATUS_IDLE = "idle" # Linux, macOS, FreeBSD
STATUS_LOCKED = "locked" # FreeBSD
STATUS_WAITING = "waiting" # FreeBSD
STATUS_SUSPENDED = "suspended" # NetBSD
STATUS_PARKED = "parked" # Linux
# Process.connections() and psutil.net_connections()
CONN_ESTABLISHED = "ESTABLISHED"
CONN_SYN_SENT = "SYN_SENT"
CONN_SYN_RECV = "SYN_RECV"
CONN_FIN_WAIT1 = "FIN_WAIT1"
CONN_FIN_WAIT2 = "FIN_WAIT2"
CONN_TIME_WAIT = "TIME_WAIT"
CONN_CLOSE = "CLOSE"
CONN_CLOSE_WAIT = "CLOSE_WAIT"
CONN_LAST_ACK = "LAST_ACK"
CONN_LISTEN = "LISTEN"
CONN_CLOSING = "CLOSING"
CONN_NONE = "NONE"
# net_if_stats()
if enum is None:
NIC_DUPLEX_FULL = 2
NIC_DUPLEX_HALF = 1
NIC_DUPLEX_UNKNOWN = 0
else:
class NicDuplex(enum.IntEnum):
NIC_DUPLEX_FULL = 2
NIC_DUPLEX_HALF = 1
NIC_DUPLEX_UNKNOWN = 0
globals().update(NicDuplex.__members__)
# sensors_battery()
if enum is None:
POWER_TIME_UNKNOWN = -1
POWER_TIME_UNLIMITED = -2
else:
class BatteryTime(enum.IntEnum):
POWER_TIME_UNKNOWN = -1
POWER_TIME_UNLIMITED = -2
globals().update(BatteryTime.__members__)
# --- others
ENCODING = sys.getfilesystemencoding()
if not PY3:
ENCODING_ERRS = "replace"
else:
try:
ENCODING_ERRS = sys.getfilesystemencodeerrors() # py 3.6
except AttributeError:
ENCODING_ERRS = "surrogateescape" if POSIX else "replace"
# ===================================================================
# --- namedtuples
# ===================================================================
# --- for system functions
# psutil.swap_memory()
sswap = namedtuple('sswap', ['total', 'used', 'free', 'percent', 'sin',
'sout'])
# psutil.disk_usage()
sdiskusage = namedtuple('sdiskusage', ['total', 'used', 'free', 'percent'])
# psutil.disk_io_counters()
sdiskio = namedtuple('sdiskio', ['read_count', 'write_count',
'read_bytes', 'write_bytes',
'read_time', 'write_time'])
# psutil.disk_partitions()
sdiskpart = namedtuple('sdiskpart', ['device', 'mountpoint', 'fstype', 'opts'])
# psutil.net_io_counters()
snetio = namedtuple('snetio', ['bytes_sent', 'bytes_recv',
'packets_sent', 'packets_recv',
'errin', 'errout',
'dropin', 'dropout'])
# psutil.users()
suser = namedtuple('suser', ['name', 'terminal', 'host', 'started', 'pid'])
# psutil.net_connections()
sconn = namedtuple('sconn', ['fd', 'family', 'type', 'laddr', 'raddr',
'status', 'pid'])
# psutil.net_if_addrs()
snicaddr = namedtuple('snicaddr',
['family', 'address', 'netmask', 'broadcast', 'ptp'])
# psutil.net_if_stats()
snicstats = namedtuple('snicstats', ['isup', 'duplex', 'speed', 'mtu'])
# psutil.cpu_stats()
scpustats = namedtuple(
'scpustats', ['ctx_switches', 'interrupts', 'soft_interrupts', 'syscalls'])
# psutil.cpu_freq()
scpufreq = namedtuple('scpufreq', ['current', 'min', 'max'])
# psutil.sensors_temperatures()
shwtemp = namedtuple(
'shwtemp', ['label', 'current', 'high', 'critical'])
# psutil.sensors_battery()
sbattery = namedtuple('sbattery', ['percent', 'secsleft', 'power_plugged'])
# psutil.sensors_fans()
sfan = namedtuple('sfan', ['label', 'current'])
# --- for Process methods
# psutil.Process.cpu_times()
pcputimes = namedtuple('pcputimes',
['user', 'system', 'children_user', 'children_system'])
# psutil.Process.open_files()
popenfile = namedtuple('popenfile', ['path', 'fd'])
# psutil.Process.threads()
pthread = namedtuple('pthread', ['id', 'user_time', 'system_time'])
# psutil.Process.uids()
puids = namedtuple('puids', ['real', 'effective', 'saved'])
# psutil.Process.gids()
pgids = namedtuple('pgids', ['real', 'effective', 'saved'])
# psutil.Process.io_counters()
pio = namedtuple('pio', ['read_count', 'write_count',
'read_bytes', 'write_bytes'])
# psutil.Process.ionice()
pionice = namedtuple('pionice', ['ioclass', 'value'])
# psutil.Process.ctx_switches()
pctxsw = namedtuple('pctxsw', ['voluntary', 'involuntary'])
# psutil.Process.connections()
pconn = namedtuple('pconn', ['fd', 'family', 'type', 'laddr', 'raddr',
'status'])
# psutil.connections() and psutil.Process.connections()
addr = namedtuple('addr', ['ip', 'port'])
# ===================================================================
# --- Process.connections() 'kind' parameter mapping
# ===================================================================
conn_tmap = {
"all": ([AF_INET, AF_INET6, AF_UNIX], [SOCK_STREAM, SOCK_DGRAM]),
"tcp": ([AF_INET, AF_INET6], [SOCK_STREAM]),
"tcp4": ([AF_INET], [SOCK_STREAM]),
"udp": ([AF_INET, AF_INET6], [SOCK_DGRAM]),
"udp4": ([AF_INET], [SOCK_DGRAM]),
"inet": ([AF_INET, AF_INET6], [SOCK_STREAM, SOCK_DGRAM]),
"inet4": ([AF_INET], [SOCK_STREAM, SOCK_DGRAM]),
"inet6": ([AF_INET6], [SOCK_STREAM, SOCK_DGRAM]),
}
if AF_INET6 is not None:
conn_tmap.update({
"tcp6": ([AF_INET6], [SOCK_STREAM]),
"udp6": ([AF_INET6], [SOCK_DGRAM]),
})
if AF_UNIX is not None:
conn_tmap.update({
"unix": ([AF_UNIX], [SOCK_STREAM, SOCK_DGRAM]),
})
# =====================================================================
# --- Exceptions
# =====================================================================
class Error(Exception):
"""Base exception class. All other psutil exceptions inherit
from this one.
"""
__module__ = 'psutil'
def __init__(self, msg=""):
Exception.__init__(self, msg)
self.msg = msg
def __repr__(self):
ret = "psutil.%s %s" % (self.__class__.__name__, self.msg)
return ret.strip()
__str__ = __repr__
class NoSuchProcess(Error):
"""Exception raised when a process with a certain PID doesn't
or no longer exists.
"""
__module__ = 'psutil'
def __init__(self, pid, name=None, msg=None):
Error.__init__(self, msg)
self.pid = pid
self.name = name
self.msg = msg
if msg is None:
if name:
details = "(pid=%s, name=%s)" % (self.pid, repr(self.name))
else:
details = "(pid=%s)" % self.pid
self.msg = "process no longer exists " + details
class ZombieProcess(NoSuchProcess):
"""Exception raised when querying a zombie process. This is
raised on macOS, BSD and Solaris only, and not always: depending
on the query the OS may be able to succeed anyway.
On Linux all zombie processes are querable (hence this is never
raised). Windows doesn't have zombie processes.
"""
__module__ = 'psutil'
def __init__(self, pid, name=None, ppid=None, msg=None):
NoSuchProcess.__init__(self, msg)
self.pid = pid
self.ppid = ppid
self.name = name
self.msg = msg
if msg is None:
args = ["pid=%s" % pid]
if name:
args.append("name=%s" % repr(self.name))
if ppid:
args.append("ppid=%s" % self.ppid)
details = "(%s)" % ", ".join(args)
self.msg = "process still exists but it's a zombie " + details
class AccessDenied(Error):
"""Exception raised when permission to perform an action is denied."""
__module__ = 'psutil'
def __init__(self, pid=None, name=None, msg=None):
Error.__init__(self, msg)
self.pid = pid
self.name = name
self.msg = msg
if msg is None:
if (pid is not None) and (name is not None):
self.msg = "(pid=%s, name=%s)" % (pid, repr(name))
elif (pid is not None):
self.msg = "(pid=%s)" % self.pid
else:
self.msg = ""
class TimeoutExpired(Error):
"""Raised on Process.wait(timeout) if timeout expires and process
is still alive.
"""
__module__ = 'psutil'
def __init__(self, seconds, pid=None, name=None):
Error.__init__(self, "timeout after %s seconds" % seconds)
self.seconds = seconds
self.pid = pid
self.name = name
if (pid is not None) and (name is not None):
self.msg += " (pid=%s, name=%s)" % (pid, repr(name))
elif (pid is not None):
self.msg += " (pid=%s)" % self.pid
# ===================================================================
# --- utils
# ===================================================================
def usage_percent(used, total, round_=None):
"""Calculate percentage usage of 'used' against 'total'."""
try:
ret = (float(used) / total) * 100
except ZeroDivisionError:
return 0.0
else:
if round_ is not None:
ret = round(ret, round_)
return ret
def memoize(fun):
"""A simple memoize decorator for functions supporting (hashable)
positional arguments.
It also provides a cache_clear() function for clearing the cache:
>>> @memoize
... def foo()
... return 1
...
>>> foo()
1
>>> foo.cache_clear()
>>>
"""
@functools.wraps(fun)
def wrapper(*args, **kwargs):
key = (args, frozenset(sorted(kwargs.items())))
try:
return cache[key]
except KeyError:
ret = cache[key] = fun(*args, **kwargs)
return ret
def cache_clear():
"""Clear cache."""
cache.clear()
cache = {}
wrapper.cache_clear = cache_clear
return wrapper
def memoize_when_activated(fun):
"""A memoize decorator which is disabled by default. It can be
activated and deactivated on request.
For efficiency reasons it can be used only against class methods
accepting no arguments.
>>> class Foo:
... @memoize
... def foo()
... print(1)
...
>>> f = Foo()
>>> # deactivated (default)
>>> foo()
1
>>> foo()
1
>>>
>>> # activated
>>> foo.cache_activate(self)
>>> foo()
1
>>> foo()
>>> foo()
>>>
"""
@functools.wraps(fun)
def wrapper(self):
try:
# case 1: we previously entered oneshot() ctx
ret = self._cache[fun]
except AttributeError:
# case 2: we never entered oneshot() ctx
return fun(self)
except KeyError:
# case 3: we entered oneshot() ctx but there's no cache
# for this entry yet
ret = self._cache[fun] = fun(self)
return ret
def cache_activate(proc):
"""Activate cache. Expects a Process instance. Cache will be
stored as a "_cache" instance attribute."""
proc._cache = {}
def cache_deactivate(proc):
"""Deactivate and clear cache."""
try:
del proc._cache
except AttributeError:
pass
wrapper.cache_activate = cache_activate
wrapper.cache_deactivate = cache_deactivate
return wrapper
def isfile_strict(path):
"""Same as os.path.isfile() but does not swallow EACCES / EPERM
exceptions, see:
http://mail.python.org/pipermail/python-dev/2012-June/120787.html
"""
try:
st = os.stat(path)
except OSError as err:
if err.errno in (errno.EPERM, errno.EACCES):
raise
return False
else:
return stat.S_ISREG(st.st_mode)
def path_exists_strict(path):
"""Same as os.path.exists() but does not swallow EACCES / EPERM
exceptions, see:
http://mail.python.org/pipermail/python-dev/2012-June/120787.html
"""
try:
os.stat(path)
except OSError as err:
if err.errno in (errno.EPERM, errno.EACCES):
raise
return False
else:
return True
@memoize
def supports_ipv6():
"""Return True if IPv6 is supported on this platform."""
if not socket.has_ipv6 or AF_INET6 is None:
return False
try:
sock = socket.socket(AF_INET6, socket.SOCK_STREAM)
with contextlib.closing(sock):
sock.bind(("::1", 0))
return True
except socket.error:
return False
def parse_environ_block(data):
"""Parse a C environ block of environment variables into a dictionary."""
# The block is usually raw data from the target process. It might contain
# trailing garbage and lines that do not look like assignments.
ret = {}
pos = 0
# localize global variable to speed up access.
WINDOWS_ = WINDOWS
while True:
next_pos = data.find("\0", pos)
# nul byte at the beginning or double nul byte means finish
if next_pos <= pos:
break
# there might not be an equals sign
equal_pos = data.find("=", pos, next_pos)
if equal_pos > pos:
key = data[pos:equal_pos]
value = data[equal_pos + 1:next_pos]
# Windows expects environment variables to be uppercase only
if WINDOWS_:
key = key.upper()
ret[key] = value
pos = next_pos + 1
return ret
def sockfam_to_enum(num):
"""Convert a numeric socket family value to an IntEnum member.
If it's not a known member, return the numeric value itself.
"""
if enum is None:
return num
else: # pragma: no cover
try:
return socket.AddressFamily(num)
except ValueError:
return num
def socktype_to_enum(num):
"""Convert a numeric socket type value to an IntEnum member.
If it's not a known member, return the numeric value itself.
"""
if enum is None:
return num
else: # pragma: no cover
try:
return socket.SocketKind(num)
except ValueError:
return num
def conn_to_ntuple(fd, fam, type_, laddr, raddr, status, status_map, pid=None):
"""Convert a raw connection tuple to a proper ntuple."""
if fam in (socket.AF_INET, AF_INET6):
if laddr:
laddr = addr(*laddr)
if raddr:
raddr = addr(*raddr)
if type_ == socket.SOCK_STREAM and fam in (AF_INET, AF_INET6):
status = status_map.get(status, CONN_NONE)
else:
status = CONN_NONE # ignore whatever C returned to us
fam = sockfam_to_enum(fam)
type_ = socktype_to_enum(type_)
if pid is None:
return pconn(fd, fam, type_, laddr, raddr, status)
else:
return sconn(fd, fam, type_, laddr, raddr, status, pid)
def deprecated_method(replacement):
"""A decorator which can be used to mark a method as deprecated
'replcement' is the method name which will be called instead.
"""
def outer(fun):
msg = "%s() is deprecated and will be removed; use %s() instead" % (
fun.__name__, replacement)
if fun.__doc__ is None:
fun.__doc__ = msg
@functools.wraps(fun)
def inner(self, *args, **kwargs):
warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
return getattr(self, replacement)(*args, **kwargs)
return inner
return outer
class _WrapNumbers:
"""Watches numbers so that they don't overflow and wrap
(reset to zero).
"""
def __init__(self):
self.lock = threading.Lock()
self.cache = {}
self.reminders = {}
self.reminder_keys = {}
def _add_dict(self, input_dict, name):
assert name not in self.cache
assert name not in self.reminders
assert name not in self.reminder_keys
self.cache[name] = input_dict
self.reminders[name] = defaultdict(int)
self.reminder_keys[name] = defaultdict(set)
def _remove_dead_reminders(self, input_dict, name):
"""In case the number of keys changed between calls (e.g. a
disk disappears) this removes the entry from self.reminders.
"""
old_dict = self.cache[name]
gone_keys = set(old_dict.keys()) - set(input_dict.keys())
for gone_key in gone_keys:
for remkey in self.reminder_keys[name][gone_key]:
del self.reminders[name][remkey]
del self.reminder_keys[name][gone_key]
def run(self, input_dict, name):
"""Cache dict and sum numbers which overflow and wrap.
Return an updated copy of `input_dict`
"""
if name not in self.cache:
# This was the first call.
self._add_dict(input_dict, name)
return input_dict
self._remove_dead_reminders(input_dict, name)
old_dict = self.cache[name]
new_dict = {}
for key in input_dict.keys():
input_tuple = input_dict[key]
try:
old_tuple = old_dict[key]
except KeyError:
# The input dict has a new key (e.g. a new disk or NIC)
# which didn't exist in the previous call.
new_dict[key] = input_tuple
continue
bits = []
for i in range(len(input_tuple)):
input_value = input_tuple[i]
old_value = old_tuple[i]
remkey = (key, i)
if input_value < old_value:
# it wrapped!
self.reminders[name][remkey] += old_value
self.reminder_keys[name][key].add(remkey)
bits.append(input_value + self.reminders[name][remkey])
new_dict[key] = tuple(bits)
self.cache[name] = input_dict
return new_dict
def cache_clear(self, name=None):
"""Clear the internal cache, optionally only for function 'name'."""
with self.lock:
if name is None:
self.cache.clear()
self.reminders.clear()
self.reminder_keys.clear()
else:
self.cache.pop(name, None)
self.reminders.pop(name, None)
self.reminder_keys.pop(name, None)
def cache_info(self):
"""Return internal cache dicts as a tuple of 3 elements."""
with self.lock:
return (self.cache, self.reminders, self.reminder_keys)
def wrap_numbers(input_dict, name):
"""Given an `input_dict` and a function `name`, adjust the numbers
which "wrap" (restart from zero) across different calls by adding
"old value" to "new value" and return an updated dict.
"""
with _wn.lock:
return _wn.run(input_dict, name)
_wn = _WrapNumbers()
wrap_numbers.cache_clear = _wn.cache_clear
wrap_numbers.cache_info = _wn.cache_info
def open_binary(fname, **kwargs):
return open(fname, "rb", **kwargs)
def open_text(fname, **kwargs):
"""On Python 3 opens a file in text mode by using fs encoding and
a proper en/decoding errors handler.
On Python 2 this is just an alias for open(name, 'rt').
"""
if PY3:
# See:
# https://github.com/giampaolo/psutil/issues/675
# https://github.com/giampaolo/psutil/pull/733
kwargs.setdefault('encoding', ENCODING)
kwargs.setdefault('errors', ENCODING_ERRS)
return open(fname, "rt", **kwargs)
def bytes2human(n, format="%(value).1f%(symbol)s"):
"""Used by various scripts. See:
http://goo.gl/zeJZl
>>> bytes2human(10000)
'9.8K'
>>> bytes2human(100001221)
'95.4M'
"""
symbols = ('B', 'K', 'M', 'G', 'T', 'P', 'E', 'Z', 'Y')
prefix = {}
for i, s in enumerate(symbols[1:]):
prefix[s] = 1 << (i + 1) * 10
for symbol in reversed(symbols[1:]):
if n >= prefix[symbol]:
value = float(n) / prefix[symbol]
return format % locals()
return format % dict(symbol=symbols[0], value=n)
def get_procfs_path():
"""Return updated psutil.PROCFS_PATH constant."""
return sys.modules['psutil'].PROCFS_PATH
if PY3:
def decode(s):
return s.decode(encoding=ENCODING, errors=ENCODING_ERRS)
else:
def decode(s):
return s
# =====================================================================
# --- shell utils
# =====================================================================
@memoize
def term_supports_colors(file=sys.stdout): # pragma: no cover
if os.name == 'nt':
return True
try:
import curses
assert file.isatty()
curses.setupterm()
assert curses.tigetnum("colors") > 0
except Exception:
return False
else:
return True
def hilite(s, color=None, bold=False): # pragma: no cover
"""Return an highlighted version of 'string'."""
if not term_supports_colors():
return s
attr = []
colors = dict(green='32', red='91', brown='33', yellow='93', blue='34',
violet='35', lightblue='36', grey='37', darkgrey='30')
colors[None] = '29'
try:
color = colors[color]
except KeyError:
raise ValueError("invalid color %r; choose between %s" % (
list(colors.keys())))
attr.append(color)
if bold:
attr.append('1')
return '\x1b[%sm%s\x1b[0m' % (';'.join(attr), s)
def print_color(
s, color=None, bold=False, file=sys.stdout): # pragma: no cover
"""Print a colorized version of string."""
if not term_supports_colors():
print(s, file=file) # NOQA
elif POSIX:
print(hilite(s, color, bold), file=file) # NOQA
else:
import ctypes
DEFAULT_COLOR = 7
GetStdHandle = ctypes.windll.Kernel32.GetStdHandle
SetConsoleTextAttribute = \
ctypes.windll.Kernel32.SetConsoleTextAttribute
colors = dict(green=2, red=4, brown=6, yellow=6)
colors[None] = DEFAULT_COLOR
try:
color = colors[color]
except KeyError:
raise ValueError("invalid color %r; choose between %r" % (
color, list(colors.keys())))
if bold and color <= 7:
color += 8
handle_id = -12 if file is sys.stderr else -11
GetStdHandle.restype = ctypes.c_ulong
handle = GetStdHandle(handle_id)
SetConsoleTextAttribute(handle, color)
try:
print(s, file=file) # NOQA
finally:
SetConsoleTextAttribute(handle, DEFAULT_COLOR)
if bool(os.getenv('PSUTIL_DEBUG', 0)):
import inspect
def debug(msg):
"""If PSUTIL_DEBUG env var is set, print a debug message to stderr."""
fname, lineno, func_name, lines, index = inspect.getframeinfo(
inspect.currentframe().f_back)
print("psutil-debug [%s:%s]> %s" % (fname, lineno, msg), # NOQA
file=sys.stderr)
else:
def debug(msg):
pass
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.