blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
7f2ff1e453a578c8abf2536555d54b86452d42a3 | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r8/Gen/DecFiles/options/13134040.py | 2692f3e9b3f0d0a78d49e95833ebb37e89575d13 | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,853 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r8/Gen/DecFiles/options/13134040.py generated: Fri, 27 Mar 2015 15:48:00
#
# Event Type: 13134040
#
# ASCII decay Descriptor: {[[B_s0]nos -> (J/psi(1S) -> p+ p~-) (f_0(980) -> pi+ pi-)]cc, [[B_s0]os -> (J/psi(1S) -> p+ p~-) (f_0(980) -> pi- pi+)]cc}
#
from Configurables import Generation
Generation().EventType = 13134040
Generation().SampleGenerationTool = "SignalRepeatedHadronization"
from Configurables import SignalRepeatedHadronization
Generation().addTool( SignalRepeatedHadronization )
Generation().SignalRepeatedHadronization.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Bs_Jpsif0,pp=DecProdCut.dec"
Generation().SignalRepeatedHadronization.CutTool = "DaughtersInLHCb"
Generation().SignalRepeatedHadronization.SignalPIDList = [ 531,-531 ]
# Ad-hoc particle gun code
from Configurables import ParticleGun
pgun = ParticleGun("ParticleGun")
pgun.SignalPdgCode = 531
pgun.DecayTool = "EvtGenDecay"
pgun.GenCutTool = "DaughtersInLHCb"
from Configurables import FlatNParticles
pgun.NumberOfParticlesTool = "FlatNParticles"
pgun.addTool( FlatNParticles , name = "FlatNParticles" )
from Configurables import MomentumSpectrum
pgun.ParticleGunTool = "MomentumSpectrum"
pgun.addTool( MomentumSpectrum , name = "MomentumSpectrum" )
pgun.MomentumSpectrum.PdgCodes = [ 531,-531 ]
pgun.MomentumSpectrum.InputFile = "$PGUNSDATAROOT/data/Ebeam4000GeV/MomentumSpectrum_531.root"
pgun.MomentumSpectrum.BinningVariables = "pteta"
pgun.MomentumSpectrum.HistogramPath = "h_pteta"
from Configurables import BeamSpotSmearVertex
pgun.addTool(BeamSpotSmearVertex, name="BeamSpotSmearVertex")
pgun.VertexSmearingTool = "BeamSpotSmearVertex"
pgun.EventType = 13134040
| [
"[email protected]"
] | |
b762af412c6c4bda55851994e6fb8833f90b16e2 | 45c12e6703e621d32ec46137d3c5c65d02d0a2c2 | /08. On Time for the Exam.py | fcea6f82c81e51c2b1c97cc66a937263149929de | [] | no_license | antondelchev/Conditional-Statements-Advanced---Exercise | ce6511e7ad6a06ecebeddc6c758c4dcaa76bf062 | e74a3ee6353d37035955686eb2d334f423758fef | refs/heads/main | 2023-02-22T12:10:18.628103 | 2021-01-25T20:33:25 | 2021-01-25T20:33:25 | 332,012,060 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,603 | py | exam_hour = int(input())
exam_minutes = int(input())
arrival_hour = int(input())
arrival_minutes = int(input())
converted_mins_exam = exam_hour * 60 + exam_minutes
converted_mins_arrival = arrival_hour * 60 + arrival_minutes
if converted_mins_exam < converted_mins_arrival:
total_mins_difference = converted_mins_arrival - converted_mins_exam
hours_over = total_mins_difference // 60
mins_over = total_mins_difference % 60
print("Late")
if total_mins_difference <= 59:
print(f"{mins_over} minutes after the start")
else:
print(f"{hours_over}:{mins_over:02d} hours after the start")
elif converted_mins_exam == converted_mins_arrival or 0 < (converted_mins_exam - converted_mins_arrival) <= 30:
total_mins_difference = converted_mins_exam - converted_mins_arrival
hours_under = total_mins_difference // 60
mins_under = total_mins_difference % 60
print("On time")
if 0 < total_mins_difference <= 59:
print(f"{mins_under} minutes before the start")
elif total_mins_difference > 59:
print(f"{hours_under}:{mins_under:02d} hours before the start")
elif converted_mins_exam - converted_mins_arrival > 30:
total_mins_difference = converted_mins_exam - converted_mins_arrival
hours_under = total_mins_difference // 60
mins_under = total_mins_difference % 60
print("Early")
if 0 < total_mins_difference <= 59:
print(f"{mins_under} minutes before the start")
elif total_mins_difference > 59:
print(f"{hours_under}:{mins_under:02d} hours before the start")
| [
"[email protected]"
] | |
a98ff93e061dffd4e07b14ff57b7b8f5c6ffe2fa | 92aeff7cf4b42beac59131e6f7cef0f96a3ad12e | /pubmedpy/tests/test_names.py | a556dd7e62afc77f6ad3748119244132af686360 | [
"BlueOak-1.0.0"
] | permissive | ben-heil/pubmedpy | f6a6714ec9452e4a730e48cf1158e325c2c9ac99 | 9d716768f5ab798ec448154588e4fd99afd7584a | refs/heads/main | 2023-04-03T04:16:42.852244 | 2021-04-09T14:22:10 | 2021-04-09T14:22:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,213 | py | import pytest
from ..names import simplify_fore_name, simplify_last_name
@pytest.mark.parametrize(
("fore_name", "expected"),
[
(" Daniel ", "Daniel"),
("AB Chow", "Chow"),
("A.B. Chow", "Chow"),
("Mc-Winters", "Mc-Winters"),
("LE", None),
("Le", "Le"),
(None, None),
("", None),
(" ", None),
("-", None),
("-Rafeel!", "Rafeel"),
],
)
def test_simplify_fore_name(fore_name, expected):
assert simplify_fore_name(fore_name) == expected
@pytest.mark.parametrize(
("fore_name", "expected"),
[
(" Daniel ", "daniel"),
("Mc-Winters", "mc-winters"),
("LE", None),
("", None),
(" ", None),
("-", None),
],
)
def test_simplify_fore_name_lower(fore_name, expected):
assert simplify_fore_name(fore_name, lower=True) == expected
@pytest.mark.parametrize(
("last_name", "expected"),
[
(" Heavenstone .", "Heavenstone"),
("Heavenstone", "Heavenstone"),
("", None),
(" ", None),
(None, None),
],
)
def test_simplify_last_name(last_name, expected):
assert simplify_last_name(last_name) == expected
| [
"[email protected]"
] | |
9c4fab802d9025afaed146b90a8ac64f54f6080b | da5bc6efaebc9ff015938d207b25c7804bc03b33 | /12_file/ex03/ex03.py | e38c6d46c75404f638dff5a618058e8ca63df2df | [] | no_license | codud0954/megait_python_20201116 | b0f68f50a1e0d41c3c35535e718d5a236a7b1a98 | a71f57d4332027406953599612cd014de2d26713 | refs/heads/master | 2023-01-31T11:14:27.611468 | 2020-12-18T09:03:11 | 2020-12-18T09:03:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 715 | py | # csv 파일 쓰기/읽기
# csv 파일 만들기
# f = open('member.csv', 'w', encoding='ms949')
# f.write("유재석,49,=\"01011112222\"\n")
# f.write("이효리,30,=\"01012346666\"\n")
# f.write("강호동,55,=\"01014563555\"\n")
# f.close()
# csv 파일 열기
f = open('member.csv', 'r', encoding='ms949')
while True:
line = f.readline()
if not line:
break
line = line.replace("\n", "") # 줄바꿈 제거
line = line.replace("=", "") # = 제거
line = line.replace('"', "") # " 제거
#print(line)
name, age, phone = line.split(',')
print("이름은 %s이고 나이는 %d, 번호는 %s이다." % (name, int(age), phone))
f.close()
| [
"[email protected]"
] | |
daf3148101435d5d8870c7b72995f4b085a2419a | 70cc02f7ef2ce7e0985ff175f1947bd48452c6b9 | /Leetcode/array/remove_duplicates_sorted_array2.py | 6b0b76920227a4642663af30beb9783d80df90ad | [] | no_license | Harshala-Gaikwad/Programming | 9784c050d9a8d72afefe1836a5493b30194f0a2a | 826a06499fbde4c2310fb9dad1a6fee84bc32f83 | refs/heads/main | 2023-07-04T20:22:27.753976 | 2021-08-19T17:34:23 | 2021-08-19T17:34:23 | 312,341,223 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 284 | py | class Solution:
def removeDuplicates(self, nums: List[int]) -> int:
l = []
for i in nums:
if l.count(i)<2:
l.append(i)
nums.clear()
for i in l:
nums.append(i)
return len(nums)
| [
"[email protected]"
] | |
9d4d2b81cf1cca80ec29f799099cf8544f48d103 | 97080682a7cabb3fc770de47c97ee757575837d9 | /modern_users/forms.py | 993c3a4cb0a79cd1d010b74e74e9e1dd0b8b51f8 | [] | no_license | SlikNik/modern_village | fe77f210bb2b7eea27a149d7b78634937dfbd733 | ca26e553caa0fe36758b228ca55229a8373c569e | refs/heads/master | 2023-01-02T23:29:16.996471 | 2020-10-20T03:27:12 | 2020-10-20T03:27:12 | 301,758,821 | 2 | 0 | null | 2020-10-19T23:13:17 | 2020-10-06T14:42:43 | Python | UTF-8 | Python | false | false | 1,029 | py | from django import forms
from modern_users.models import ModernUsers
class SignUpForm(forms.ModelForm):
password = forms.CharField(widget=forms.PasswordInput)
confirm_password=forms.CharField(widget=forms.PasswordInput())
class Meta:
model = ModernUsers
fields = ('first_name', 'last_name', 'email', 'address', 'city', 'zipcode', 'age', 'birthday', 'facebook', 'instagram', 'twitter', 'username', 'password', 'user_pic',)
def clean(self):
cleaned_data = super(SignUpForm, self).clean()
password = cleaned_data.get("password")
confirm_password = cleaned_data.get("confirm_password")
if password != confirm_password:
raise forms.ValidationError(
"password and confirm_password does not match"
)
class EditProfileForm(forms.ModelForm):
class Meta:
model = ModernUsers
fields = ('first_name', 'last_name', 'address', 'city', 'zipcode', 'age', 'birthday', 'facebook', 'instagram', 'twitter', 'user_pic',)
| [
"[email protected]"
] | |
faa36c8bcb8ff93361f4d6bcb847f541945fa7b1 | 1e71542814768fb570dadda69f3ceac57d6f7a35 | /untitled/frame1.py | c2c176a928e83d39319469d7c56e9aadf5bd5cfd | [] | no_license | apabhishek178/website_work | 7a621bb5983e68c971db0ae2117c946a9484a8a7 | 97910ca2541ea612f715d2e19c5f659694941122 | refs/heads/master | 2021-07-21T07:52:21.901795 | 2017-10-29T05:59:35 | 2017-10-29T05:59:35 | 108,672,122 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | from tkinter import *
import tkinter
root=tkinter.Tk()
topframe=Frame(root)
topframe.pack()
bottomframe=Frame(root)
bottomframe.pack(side=RIGHT)
button1=Button(root,text="1",fg="red",font=28)
button2=Button(root,text="2",fg="green",font=26)
button3=Button(root,text="3",fg="blue",font=24)
button4=Button(bottomframe,text="4",fg="purple",font=22)
button1.pack(side=TOP)
button2.pack(side=LEFT)
button3.pack(side=LEFT)
button4.pack(side=LEFT)
root.mainloop() | [
"[email protected]"
] | |
4ec5569bc9090ac9aaf0a82c8b87e18a9fdd11b6 | a8d771af415aa5f0e23952f8599441f36f3c4733 | /setup.py | c7fbeeacbee493d16d1cd876cf4ff8b36d5146e0 | [
"LicenseRef-scancode-public-domain",
"CC0-1.0"
] | permissive | biomodels/MODEL1302010019 | dda481b65b15cfbedcd70be79c182be4da292cea | 4c746971883b61b7ba85937ee360b8148e9074ea | refs/heads/master | 2021-01-22T05:01:13.045263 | 2014-10-16T05:58:26 | 2014-10-16T05:58:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | from setuptools import setup, find_packages
setup(name='MODEL1302010019',
version=20140916,
description='MODEL1302010019 from BioModels',
url='http://www.ebi.ac.uk/biomodels-main/MODEL1302010019',
maintainer='Stanley Gu',
maintainer_url='[email protected]',
packages=find_packages(),
package_data={'': ['*.xml', 'README.md']},
) | [
"[email protected]"
] | |
ec6d550379c5e9ea31b8ebb48e0e9e8f9b1695f4 | a474040f7315e485282e926db751793d66e8718d | /vendor/jx_python/meta.py | 63d69a3007e8fac46897b4212b3edaccf326404c | [] | no_license | klahnakoski/tuid_experiment | f6b14d899428e30e30b07e432d0974882438c068 | 854bd7d87ed4df00c15d58f3999e4ad7eebbeabc | refs/heads/clogger | 2020-03-22T20:33:02.515299 | 2018-07-11T17:37:42 | 2018-07-11T17:37:42 | 140,608,533 | 0 | 0 | null | 2018-07-11T17:40:24 | 2018-07-11T17:31:06 | Python | UTF-8 | Python | false | false | 15,390 | py | # encoding: utf-8
#
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this file,
# You can obtain one at http:# mozilla.org/MPL/2.0/.
#
# Author: Kyle Lahnakoski ([email protected])
#
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from collections import Mapping
from datetime import date
from datetime import datetime
import jx_base
from jx_base import python_type_to_json_type
from jx_base import STRUCT, Column, Table
from jx_base.schema import Schema
from jx_python import jx
from mo_collections import UniqueIndex
from mo_dots import Data, concat_field, get_attr, listwrap, unwraplist, NullType, FlatList, set_default, split_field, join_field, ROOT_PATH, wrap, coalesce
from mo_future import none_type, text_type, long, PY2
from mo_json.typed_encoder import untype_path, unnest_path
from mo_logs import Log
from mo_threads import Lock
from mo_times.dates import Date
singlton = None
class ColumnList(Table):
"""
OPTIMIZED FOR THE PARTICULAR ACCESS PATTERNS USED
"""
def __init__(self):
Table.__init__(self, "meta.columns")
self.data = {} # MAP FROM ES_INDEX TO (abs_column_name to COLUMNS)
self.locker = Lock()
self._schema = None
self.extend(METADATA_COLUMNS)
def find(self, es_index, abs_column_name):
with self.locker:
if es_index.startswith("meta."):
self._update_meta()
if not abs_column_name:
return [c for cs in self.data.get(es_index, {}).values() for c in cs]
else:
return self.data.get(es_index, {}).get(abs_column_name, [])
def extend(self, columns):
self.dirty = True
with self.locker:
for column in columns:
self._add(column)
def add(self, column):
self.dirty = True
with self.locker:
return self._add(column)
def _add(self, column):
columns_for_table = self.data.setdefault(column.es_index, {})
existing_columns = columns_for_table.setdefault(column.names["."], [])
for canonical in existing_columns:
if canonical is column:
return canonical
if canonical.es_type == column.es_type:
set_default(column.names, canonical.names)
for key in Column.__slots__:
canonical[key] = column[key]
return canonical
existing_columns.append(column)
return column
def _update_meta(self):
if not self.dirty:
return
for mcl in self.data.get("meta.columns").values():
for mc in mcl:
count = 0
values = set()
objects = 0
multi = 1
for t, cs in self.data.items():
for c, css in cs.items():
for column in css:
value = column[mc.names["."]]
if value == None:
pass
else:
count += 1
if isinstance(value, list):
multi = max(multi, len(value))
try:
values |= set(value)
except Exception:
objects += len(value)
elif isinstance(value, Mapping):
objects += 1
else:
values.add(value)
mc.count = count
mc.cardinality = len(values) + objects
mc.partitions = jx.sort(values)
mc.multi = multi
mc.last_updated = Date.now()
self.dirty = False
def __iter__(self):
self._update_meta()
for t, cs in self.data.items():
for c, css in cs.items():
for column in css:
yield column
def __len__(self):
return self.data['meta.columns']['es_index'].count
def update(self, command):
self.dirty = True
try:
command = wrap(command)
eq = command.where.eq
if eq.es_index:
columns = self.find(eq.es_index, eq.name)
columns = [
c
for c in columns
if all(get_attr(c, k) == v for k, v in eq.items())
]
else:
with self.locker:
columns = list(self)
columns = jx.filter(columns, command.where)
with self.locker:
for col in list(columns):
for k in command["clear"]:
if k == ".":
columns.remove(col)
else:
col[k] = None
for k, v in command.set.items():
col[k] = v
except Exception as e:
Log.error("should not happen", cause=e)
def query(self, query):
with self.locker:
self._update_meta()
query.frum = self.__iter__()
output = jx.run(query)
return output
def groupby(self, keys):
with self.locker:
self._update_meta()
return jx.groupby(self.__iter__(), keys)
@property
def schema(self):
if not self._schema:
with self.locker:
self._update_meta()
self._schema = Schema(".", [c for cs in self.data["meta.columns"].values() for c in cs])
return self._schema
@property
def namespace(self):
return self
def denormalized(self):
"""
THE INTERNAL STRUCTURE FOR THE COLUMN METADATA IS VERY DIFFERENT FROM
THE DENORMALIZED PERSPECITVE. THIS PROVIDES THAT PERSPECTIVE FOR QUERIES
"""
with self.locker:
self._update_meta()
output = [
{
"table": concat_field(c.es_index, untype_path(table)),
"name": untype_path(name),
"cardinality": c.cardinality,
"es_column": c.es_column,
"es_index": c.es_index,
"last_updated": c.last_updated,
"count": c.count,
"nested_path": [unnest_path(n) for n in c.nested_path],
"es_type": c.es_type,
"type": c.jx_type
}
for tname, css in self.data.items()
for cname, cs in css.items()
for c in cs
if c.jx_type not in STRUCT # and c.es_column != "_id"
for table, name in c.names.items()
]
from jx_python.containers.list_usingPythonList import ListContainer
return ListContainer(
self.name,
data=output,
schema=jx_base.Schema(
"meta.columns",
SIMPLE_METADATA_COLUMNS
)
)
def get_schema_from_list(table_name, frum):
"""
SCAN THE LIST FOR COLUMN TYPES
"""
columns = UniqueIndex(keys=("names.\\.",))
_get_schema_from_list(frum, ".", parent=".", nested_path=ROOT_PATH, columns=columns)
return Schema(table_name=table_name, columns=list(columns))
def _get_schema_from_list(frum, table_name, parent, nested_path, columns):
"""
:param frum: The list
:param table_name: Name of the table this list holds records for
:param prefix_path: parent path
:param nested_path: each nested array, in reverse order
:param columns: map from full name to column definition
:return:
"""
for d in frum:
row_type = _type_to_name[d.__class__]
if row_type != "object":
full_name = parent
column = columns[full_name]
if not column:
column = Column(
names={table_name: full_name},
es_column=full_name,
es_index=".",
jx_type=python_type_to_json_type[d.__class__],
es_type=row_type,
nested_path=nested_path
)
columns.add(column)
column.es_type = _merge_type[column.es_type][row_type]
column.jx_type = _merge_type[coalesce(column.jx_type, "undefined")][row_type]
else:
for name, value in d.items():
full_name = concat_field(parent, name)
column = columns[full_name]
if not column:
column = Column(
names={table_name: full_name},
es_column=full_name,
es_index=".",
es_type="undefined",
nested_path=nested_path
)
columns.add(column)
if isinstance(value, (list, set)): # GET TYPE OF MULTIVALUE
v = list(value)
if len(v) == 0:
this_type = "undefined"
elif len(v) == 1:
this_type = _type_to_name[v[0].__class__]
else:
this_type = _type_to_name[v[0].__class__]
if this_type == "object":
this_type = "nested"
else:
this_type = _type_to_name[value.__class__]
new_type = _merge_type[column.es_type][this_type]
column.es_type = new_type
if this_type == "object":
_get_schema_from_list([value], table_name, full_name, nested_path, columns)
elif this_type == "nested":
np = listwrap(nested_path)
newpath = unwraplist([join_field(split_field(np[0]) + [name])] + np)
_get_schema_from_list(value, table_name, full_name, newpath, columns)
METADATA_COLUMNS = (
[
Column(
names={".": c},
es_index="meta.columns",
es_column=c,
es_type="string",
nested_path=ROOT_PATH
)
for c in ["es_type", "jx_type", "nested_path", "es_column", "es_index"]
] + [
Column(
es_index="meta.columns",
names={".": c},
es_column=c,
es_type="object",
nested_path=ROOT_PATH
)
for c in ["names", "partitions"]
] + [
Column(
names={".": c},
es_index="meta.columns",
es_column=c,
es_type="long",
nested_path=ROOT_PATH
)
for c in ["count", "cardinality", "multi"]
] + [
Column(
names={".": "last_updated"},
es_index="meta.columns",
es_column="last_updated",
es_type="time",
nested_path=ROOT_PATH
)
]
)
SIMPLE_METADATA_COLUMNS = (
[
Column(
names={".": c},
es_index="meta.columns",
es_column=c,
es_type="string",
nested_path=ROOT_PATH
)
for c in ["table", "name", "type", "nested_path"]
] + [
Column(
names={".": c},
es_index="meta.columns",
es_column=c,
es_type="long",
nested_path=ROOT_PATH
)
for c in ["count", "cardinality", "multi"]
] + [
Column(
names={".": "last_updated"},
es_index="meta.columns",
es_column="last_updated",
es_type="time",
nested_path=ROOT_PATH
)
]
)
_type_to_name = {
none_type: "undefined",
NullType: "undefined",
bool: "boolean",
str: "string",
text_type: "string",
int: "integer",
float: "double",
Data: "object",
dict: "object",
set: "nested",
list: "nested",
FlatList: "nested",
Date: "double",
datetime: "double",
date: "double"
}
if PY2:
_type_to_name[long] = "integer"
_merge_type = {
"undefined": {
"undefined": "undefined",
"boolean": "boolean",
"integer": "integer",
"long": "long",
"float": "float",
"double": "double",
"number": "number",
"string": "string",
"object": "object",
"nested": "nested"
},
"boolean": {
"undefined": "boolean",
"boolean": "boolean",
"integer": "integer",
"long": "long",
"float": "float",
"double": "double",
"number": "number",
"string": "string",
"object": None,
"nested": None
},
"integer": {
"undefined": "integer",
"boolean": "integer",
"integer": "integer",
"long": "long",
"float": "float",
"double": "double",
"number": "number",
"string": "string",
"object": None,
"nested": None
},
"long": {
"undefined": "long",
"boolean": "long",
"integer": "long",
"long": "long",
"float": "double",
"double": "double",
"number": "number",
"string": "string",
"object": None,
"nested": None
},
"float": {
"undefined": "float",
"boolean": "float",
"integer": "float",
"long": "double",
"float": "float",
"double": "double",
"number": "number",
"string": "string",
"object": None,
"nested": None
},
"double": {
"undefined": "double",
"boolean": "double",
"integer": "double",
"long": "double",
"float": "double",
"double": "double",
"number": "number",
"string": "string",
"object": None,
"nested": None
},
"number": {
"undefined": "number",
"boolean": "number",
"integer": "number",
"long": "number",
"float": "number",
"double": "number",
"number": "number",
"string": "string",
"object": None,
"nested": None
},
"string": {
"undefined": "string",
"boolean": "string",
"integer": "string",
"long": "string",
"float": "string",
"double": "string",
"number": "string",
"string": "string",
"object": None,
"nested": None
},
"object": {
"undefined": "object",
"boolean": None,
"integer": None,
"long": None,
"float": None,
"double": None,
"number": None,
"string": None,
"object": "object",
"nested": "nested"
},
"nested": {
"undefined": "nested",
"boolean": None,
"integer": None,
"long": None,
"float": None,
"double": None,
"number": None,
"string": None,
"object": "nested",
"nested": "nested"
}
}
| [
"[email protected]"
] | |
d8143fe470c7d97ff2001d71a1ec498b37680e06 | 4bd555bc662b8182a2e7644976bfdb00ed5e1ebe | /PythonistaAppTemplate/PythonistaKit.framework/pylib/encodings/ptcp154.py | 670112ca0f9772b1f225c6906ef4f20da39f0624 | [] | no_license | fhelmli/homeNOWG2 | a103df1ef97194dec9501dbda87ec1f7c111fb4a | e794fd87b296544542fd9dc7ac94c981c6312419 | refs/heads/master | 2020-04-04T13:40:20.417769 | 2019-01-30T21:41:04 | 2019-01-30T21:41:04 | 155,970,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,855 | py | #\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
""" Python Character Mapping Codec generated from 'PTCP154.txt' with gencodec.py.
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
"""#"
import codecs
### Codec APIs
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return codecs.charmap_encode(input,errors,encoding_map)
def decode(self,input,errors='strict'):
return codecs.charmap_decode(input,errors,decoding_map)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input,self.errors,encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input,self.errors,decoding_map)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='ptcp154',
encode=Codec().encode,
decode=Codec().decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
### Decoding Map
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({
0x0080: 0x0496, # CYRILLIC CAPITAL LETTER ZHE WITH DESCENDER
0x0081: 0x0492, # CYRILLIC CAPITAL LETTER GHE WITH STROKE
0x0082: 0x04ee, # CYRILLIC CAPITAL LETTER U WITH MACRON
0x0083: 0x0493, # CYRILLIC SMALL LETTER GHE WITH STROKE
0x0084: 0x201e, # DOUBLE LOW-9 QUOTATION MARK
0x0085: 0x2026, # HORIZONTAL ELLIPSIS
0x0086: 0x04b6, # CYRILLIC CAPITAL LETTER CHE WITH DESCENDER
0x0087: 0x04ae, # CYRILLIC CAPITAL LETTER STRAIGHT U
0x0088: 0x04b2, # CYRILLIC CAPITAL LETTER HA WITH DESCENDER
0x0089: 0x04af, # CYRILLIC SMALL LETTER STRAIGHT U
0x008a: 0x04a0, # CYRILLIC CAPITAL LETTER BASHKIR KA
0x008b: 0x04e2, # CYRILLIC CAPITAL LETTER I WITH MACRON
0x008c: 0x04a2, # CYRILLIC CAPITAL LETTER EN WITH DESCENDER
0x008d: 0x049a, # CYRILLIC CAPITAL LETTER KA WITH DESCENDER
0x008e: 0x04ba, # CYRILLIC CAPITAL LETTER SHHA
0x008f: 0x04b8, # CYRILLIC CAPITAL LETTER CHE WITH VERTICAL STROKE
0x0090: 0x0497, # CYRILLIC SMALL LETTER ZHE WITH DESCENDER
0x0091: 0x2018, # LEFT SINGLE QUOTATION MARK
0x0092: 0x2019, # RIGHT SINGLE QUOTATION MARK
0x0093: 0x201c, # LEFT DOUBLE QUOTATION MARK
0x0094: 0x201d, # RIGHT DOUBLE QUOTATION MARK
0x0095: 0x2022, # BULLET
0x0096: 0x2013, # EN DASH
0x0097: 0x2014, # EM DASH
0x0098: 0x04b3, # CYRILLIC SMALL LETTER HA WITH DESCENDER
0x0099: 0x04b7, # CYRILLIC SMALL LETTER CHE WITH DESCENDER
0x009a: 0x04a1, # CYRILLIC SMALL LETTER BASHKIR KA
0x009b: 0x04e3, # CYRILLIC SMALL LETTER I WITH MACRON
0x009c: 0x04a3, # CYRILLIC SMALL LETTER EN WITH DESCENDER
0x009d: 0x049b, # CYRILLIC SMALL LETTER KA WITH DESCENDER
0x009e: 0x04bb, # CYRILLIC SMALL LETTER SHHA
0x009f: 0x04b9, # CYRILLIC SMALL LETTER CHE WITH VERTICAL STROKE
0x00a1: 0x040e, # CYRILLIC CAPITAL LETTER SHORT U (Byelorussian)
0x00a2: 0x045e, # CYRILLIC SMALL LETTER SHORT U (Byelorussian)
0x00a3: 0x0408, # CYRILLIC CAPITAL LETTER JE
0x00a4: 0x04e8, # CYRILLIC CAPITAL LETTER BARRED O
0x00a5: 0x0498, # CYRILLIC CAPITAL LETTER ZE WITH DESCENDER
0x00a6: 0x04b0, # CYRILLIC CAPITAL LETTER STRAIGHT U WITH STROKE
0x00a8: 0x0401, # CYRILLIC CAPITAL LETTER IO
0x00aa: 0x04d8, # CYRILLIC CAPITAL LETTER SCHWA
0x00ad: 0x04ef, # CYRILLIC SMALL LETTER U WITH MACRON
0x00af: 0x049c, # CYRILLIC CAPITAL LETTER KA WITH VERTICAL STROKE
0x00b1: 0x04b1, # CYRILLIC SMALL LETTER STRAIGHT U WITH STROKE
0x00b2: 0x0406, # CYRILLIC CAPITAL LETTER BYELORUSSIAN-UKRAINIAN I
0x00b3: 0x0456, # CYRILLIC SMALL LETTER BYELORUSSIAN-UKRAINIAN I
0x00b4: 0x0499, # CYRILLIC SMALL LETTER ZE WITH DESCENDER
0x00b5: 0x04e9, # CYRILLIC SMALL LETTER BARRED O
0x00b8: 0x0451, # CYRILLIC SMALL LETTER IO
0x00b9: 0x2116, # NUMERO SIGN
0x00ba: 0x04d9, # CYRILLIC SMALL LETTER SCHWA
0x00bc: 0x0458, # CYRILLIC SMALL LETTER JE
0x00bd: 0x04aa, # CYRILLIC CAPITAL LETTER ES WITH DESCENDER
0x00be: 0x04ab, # CYRILLIC SMALL LETTER ES WITH DESCENDER
0x00bf: 0x049d, # CYRILLIC SMALL LETTER KA WITH VERTICAL STROKE
0x00c0: 0x0410, # CYRILLIC CAPITAL LETTER A
0x00c1: 0x0411, # CYRILLIC CAPITAL LETTER BE
0x00c2: 0x0412, # CYRILLIC CAPITAL LETTER VE
0x00c3: 0x0413, # CYRILLIC CAPITAL LETTER GHE
0x00c4: 0x0414, # CYRILLIC CAPITAL LETTER DE
0x00c5: 0x0415, # CYRILLIC CAPITAL LETTER IE
0x00c6: 0x0416, # CYRILLIC CAPITAL LETTER ZHE
0x00c7: 0x0417, # CYRILLIC CAPITAL LETTER ZE
0x00c8: 0x0418, # CYRILLIC CAPITAL LETTER I
0x00c9: 0x0419, # CYRILLIC CAPITAL LETTER SHORT I
0x00ca: 0x041a, # CYRILLIC CAPITAL LETTER KA
0x00cb: 0x041b, # CYRILLIC CAPITAL LETTER EL
0x00cc: 0x041c, # CYRILLIC CAPITAL LETTER EM
0x00cd: 0x041d, # CYRILLIC CAPITAL LETTER EN
0x00ce: 0x041e, # CYRILLIC CAPITAL LETTER O
0x00cf: 0x041f, # CYRILLIC CAPITAL LETTER PE
0x00d0: 0x0420, # CYRILLIC CAPITAL LETTER ER
0x00d1: 0x0421, # CYRILLIC CAPITAL LETTER ES
0x00d2: 0x0422, # CYRILLIC CAPITAL LETTER TE
0x00d3: 0x0423, # CYRILLIC CAPITAL LETTER U
0x00d4: 0x0424, # CYRILLIC CAPITAL LETTER EF
0x00d5: 0x0425, # CYRILLIC CAPITAL LETTER HA
0x00d6: 0x0426, # CYRILLIC CAPITAL LETTER TSE
0x00d7: 0x0427, # CYRILLIC CAPITAL LETTER CHE
0x00d8: 0x0428, # CYRILLIC CAPITAL LETTER SHA
0x00d9: 0x0429, # CYRILLIC CAPITAL LETTER SHCHA
0x00da: 0x042a, # CYRILLIC CAPITAL LETTER HARD SIGN
0x00db: 0x042b, # CYRILLIC CAPITAL LETTER YERU
0x00dc: 0x042c, # CYRILLIC CAPITAL LETTER SOFT SIGN
0x00dd: 0x042d, # CYRILLIC CAPITAL LETTER E
0x00de: 0x042e, # CYRILLIC CAPITAL LETTER YU
0x00df: 0x042f, # CYRILLIC CAPITAL LETTER YA
0x00e0: 0x0430, # CYRILLIC SMALL LETTER A
0x00e1: 0x0431, # CYRILLIC SMALL LETTER BE
0x00e2: 0x0432, # CYRILLIC SMALL LETTER VE
0x00e3: 0x0433, # CYRILLIC SMALL LETTER GHE
0x00e4: 0x0434, # CYRILLIC SMALL LETTER DE
0x00e5: 0x0435, # CYRILLIC SMALL LETTER IE
0x00e6: 0x0436, # CYRILLIC SMALL LETTER ZHE
0x00e7: 0x0437, # CYRILLIC SMALL LETTER ZE
0x00e8: 0x0438, # CYRILLIC SMALL LETTER I
0x00e9: 0x0439, # CYRILLIC SMALL LETTER SHORT I
0x00ea: 0x043a, # CYRILLIC SMALL LETTER KA
0x00eb: 0x043b, # CYRILLIC SMALL LETTER EL
0x00ec: 0x043c, # CYRILLIC SMALL LETTER EM
0x00ed: 0x043d, # CYRILLIC SMALL LETTER EN
0x00ee: 0x043e, # CYRILLIC SMALL LETTER O
0x00ef: 0x043f, # CYRILLIC SMALL LETTER PE
0x00f0: 0x0440, # CYRILLIC SMALL LETTER ER
0x00f1: 0x0441, # CYRILLIC SMALL LETTER ES
0x00f2: 0x0442, # CYRILLIC SMALL LETTER TE
0x00f3: 0x0443, # CYRILLIC SMALL LETTER U
0x00f4: 0x0444, # CYRILLIC SMALL LETTER EF
0x00f5: 0x0445, # CYRILLIC SMALL LETTER HA
0x00f6: 0x0446, # CYRILLIC SMALL LETTER TSE
0x00f7: 0x0447, # CYRILLIC SMALL LETTER CHE
0x00f8: 0x0448, # CYRILLIC SMALL LETTER SHA
0x00f9: 0x0449, # CYRILLIC SMALL LETTER SHCHA
0x00fa: 0x044a, # CYRILLIC SMALL LETTER HARD SIGN
0x00fb: 0x044b, # CYRILLIC SMALL LETTER YERU
0x00fc: 0x044c, # CYRILLIC SMALL LETTER SOFT SIGN
0x00fd: 0x044d, # CYRILLIC SMALL LETTER E
0x00fe: 0x044e, # CYRILLIC SMALL LETTER YU
0x00ff: 0x044f, # CYRILLIC SMALL LETTER YA
})
### Encoding Map
encoding_map = codecs.make_encoding_map(decoding_map)
| [
"[email protected]"
] | |
6515b86f3b9cb06fd00e71f39ec0409ca301402f | 12258001571bd504223fbf4587870960fa93a46d | /mud/django-haystack-2.3.2/test_haystack/test_indexes.py | 11dabc7bf51aa1600dfa66a75598c1a9e51880b0 | [
"BSD-3-Clause",
"MIT"
] | permissive | Nik0las1984/mud-obj | 0bd71e71855a9b0f0d3244dec2c877bd212cdbd2 | 5d74280724ff6c6ac1b2d3a7c86b382e512ecf4d | refs/heads/master | 2023-01-07T04:12:33.472377 | 2019-10-11T09:10:14 | 2019-10-11T09:10:14 | 69,223,190 | 2 | 0 | null | 2022-12-26T20:15:20 | 2016-09-26T07:11:49 | Python | UTF-8 | Python | false | false | 26,599 | py | import datetime
from threading import Thread
import time
from django.test import TestCase
from django.utils.six.moves import queue
from haystack import connections, connection_router
from haystack.exceptions import SearchFieldError
from haystack import indexes
from haystack.utils.loading import UnifiedIndex
from test_haystack.core.models import MockModel, AThirdMockModel, AFifthMockModel
class BadSearchIndex1(indexes.SearchIndex, indexes.Indexable):
author = indexes.CharField(model_attr='author')
pub_date = indexes.DateTimeField(model_attr='pub_date')
def get_model(self):
return MockModel
class BadSearchIndex2(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
content2 = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
pub_date = indexes.DateTimeField(model_attr='pub_date')
def get_model(self):
return MockModel
class GoodMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
pub_date = indexes.DateTimeField(model_attr='pub_date')
extra = indexes.CharField(indexed=False, use_template=True)
def get_model(self):
return MockModel
# For testing inheritance...
class AltGoodMockSearchIndex(GoodMockSearchIndex, indexes.Indexable):
additional = indexes.CharField(model_attr='author')
def get_model(self):
return MockModel
class GoodCustomMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author', faceted=True)
pub_date = indexes.DateTimeField(model_attr='pub_date', faceted=True)
extra = indexes.CharField(indexed=False, use_template=True)
hello = indexes.CharField(model_attr='hello')
def prepare(self, obj):
super(GoodCustomMockSearchIndex, self).prepare(obj)
self.prepared_data['whee'] = 'Custom preparation.'
return self.prepared_data
def prepare_author(self, obj):
return "Hi, I'm %s" % self.prepared_data['author']
def load_all_queryset(self):
return self.get_model()._default_manager.filter(id__gt=1)
def get_model(self):
return MockModel
def index_queryset(self, using=None):
return MockModel.objects.all()
def read_queryset(self, using=None):
return MockModel.objects.filter(author__in=['daniel1', 'daniel3'])
def build_queryset(self, start_date=None, end_date=None):
return MockModel.objects.filter(author__in=['daniel1', 'daniel3'])
class GoodNullableMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author', null=True, faceted=True)
def get_model(self):
return MockModel
class GoodOverriddenFieldNameMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True, index_fieldname='more_content')
author = indexes.CharField(model_attr='author', index_fieldname='name_s')
hello = indexes.CharField(model_attr='hello')
def get_model(self):
return MockModel
class GoodFacetedMockSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
author = indexes.CharField(model_attr='author')
author_foo = indexes.FacetCharField(facet_for='author')
pub_date = indexes.DateTimeField(model_attr='pub_date')
pub_date_exact = indexes.FacetDateTimeField(facet_for='pub_date')
def get_model(self):
return MockModel
def prepare_author(self, obj):
return "Hi, I'm %s" % self.prepared_data['author']
def prepare_pub_date_exact(self, obj):
return "2010-10-26T01:54:32"
class MROFieldsSearchIndexA(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, model_attr='test_a')
def get_model(self):
return MockModel
class MROFieldsSearchIndexB(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, model_attr='test_b')
def get_model(self):
return MockModel
class MROFieldsSearchChild(MROFieldsSearchIndexA, MROFieldsSearchIndexB):
pass
class SearchIndexTestCase(TestCase):
def setUp(self):
super(SearchIndexTestCase, self).setUp()
self.sb = connections['default'].get_backend()
self.mi = GoodMockSearchIndex()
self.cmi = GoodCustomMockSearchIndex()
self.cnmi = GoodNullableMockSearchIndex()
self.gfmsi = GoodFacetedMockSearchIndex()
# Fake the unified index.
self.old_unified_index = connections['default']._index
self.ui = UnifiedIndex()
self.ui.build(indexes=[self.mi])
connections['default']._index = self.ui
self.sample_docs = {
u'core.mockmodel.1': {
'text': u'Indexed!\n1',
'django_id': u'1',
'django_ct': u'core.mockmodel',
'extra': u'Stored!\n1',
'author': u'daniel1',
'pub_date': datetime.datetime(2009, 3, 17, 6, 0),
'id': u'core.mockmodel.1'
},
u'core.mockmodel.2': {
'text': u'Indexed!\n2',
'django_id': u'2',
'django_ct': u'core.mockmodel',
'extra': u'Stored!\n2',
'author': u'daniel2',
'pub_date': datetime.datetime(2009, 3, 17, 7, 0),
'id': u'core.mockmodel.2'
},
u'core.mockmodel.3': {
'text': u'Indexed!\n3',
'django_id': u'3',
'django_ct': u'core.mockmodel',
'extra': u'Stored!\n3',
'author': u'daniel3',
'pub_date': datetime.datetime(2009, 3, 17, 8, 0),
'id': u'core.mockmodel.3'
}
}
def tearDown(self):
connections['default']._index = self.old_unified_index
super(SearchIndexTestCase, self).tearDown()
def test_no_contentfield_present(self):
self.assertRaises(SearchFieldError, BadSearchIndex1)
def test_too_many_contentfields_present(self):
self.assertRaises(SearchFieldError, BadSearchIndex2)
def test_contentfield_present(self):
try:
mi = GoodMockSearchIndex()
except:
self.fail()
def test_proper_fields(self):
self.assertEqual(len(self.mi.fields), 4)
self.assertTrue('text' in self.mi.fields)
self.assertTrue(isinstance(self.mi.fields['text'], indexes.CharField))
self.assertTrue('author' in self.mi.fields)
self.assertTrue(isinstance(self.mi.fields['author'], indexes.CharField))
self.assertTrue('pub_date' in self.mi.fields)
self.assertTrue(isinstance(self.mi.fields['pub_date'], indexes.DateTimeField))
self.assertTrue('extra' in self.mi.fields)
self.assertTrue(isinstance(self.mi.fields['extra'], indexes.CharField))
self.assertEqual(len(self.cmi.fields), 7)
self.assertTrue('text' in self.cmi.fields)
self.assertTrue(isinstance(self.cmi.fields['text'], indexes.CharField))
self.assertTrue('author' in self.cmi.fields)
self.assertTrue(isinstance(self.cmi.fields['author'], indexes.CharField))
self.assertTrue('author_exact' in self.cmi.fields)
self.assertTrue(isinstance(self.cmi.fields['author_exact'], indexes.FacetCharField))
self.assertTrue('pub_date' in self.cmi.fields)
self.assertTrue(isinstance(self.cmi.fields['pub_date'], indexes.DateTimeField))
self.assertTrue('pub_date_exact' in self.cmi.fields)
self.assertTrue(isinstance(self.cmi.fields['pub_date_exact'], indexes.FacetDateTimeField))
self.assertTrue('extra' in self.cmi.fields)
self.assertTrue(isinstance(self.cmi.fields['extra'], indexes.CharField))
self.assertTrue('hello' in self.cmi.fields)
self.assertTrue(isinstance(self.cmi.fields['extra'], indexes.CharField))
def test_index_queryset(self):
self.assertEqual(len(self.cmi.index_queryset()), 3)
def test_read_queryset(self):
self.assertEqual(len(self.cmi.read_queryset()), 2)
def test_build_queryset(self):
# The custom SearchIndex.build_queryset returns the same records as
# the read_queryset
self.assertEqual(len(self.cmi.build_queryset()), 2)
# Store a reference to the original method
old_guf = self.mi.__class__.get_updated_field
self.mi.__class__.get_updated_field = lambda self: 'pub_date'
# With an updated field, we should get have filtered results
sd = datetime.datetime(2009, 3, 17, 7, 0)
self.assertEqual(len(self.mi.build_queryset(start_date=sd)), 2)
ed = datetime.datetime(2009, 3, 17, 7, 59)
self.assertEqual(len(self.mi.build_queryset(end_date=ed)), 2)
sd = datetime.datetime(2009, 3, 17, 6, 0)
ed = datetime.datetime(2009, 3, 17, 6, 59)
self.assertEqual(len(self.mi.build_queryset(start_date=sd,
end_date=ed)), 1)
# Remove the updated field for the next test
del self.mi.__class__.get_updated_field
# The default should return all 3 even if we specify a start date
# because there is no updated field specified
self.assertEqual(len(self.mi.build_queryset(start_date=sd)), 3)
# Restore the original attribute
self.mi.__class__.get_updated_field = old_guf
def test_prepare(self):
mock = MockModel()
mock.pk = 20
mock.author = 'daniel%s' % mock.id
mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
self.assertEqual(len(self.mi.prepare(mock)), 7)
self.assertEqual(sorted(self.mi.prepare(mock).keys()), ['author', 'django_ct', 'django_id', 'extra', 'id', 'pub_date', 'text'])
def test_custom_prepare(self):
mock = MockModel()
mock.pk = 20
mock.author = 'daniel%s' % mock.id
mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
self.assertEqual(len(self.cmi.prepare(mock)), 11)
self.assertEqual(sorted(self.cmi.prepare(mock).keys()), ['author', 'author_exact', 'django_ct', 'django_id', 'extra', 'hello', 'id', 'pub_date', 'pub_date_exact', 'text', 'whee'])
self.assertEqual(len(self.cmi.full_prepare(mock)), 11)
self.assertEqual(sorted(self.cmi.full_prepare(mock).keys()), ['author', 'author_exact', 'django_ct', 'django_id', 'extra', 'hello', 'id', 'pub_date', 'pub_date_exact', 'text', 'whee'])
def test_thread_safety(self):
# This is a regression. ``SearchIndex`` used to write to
# ``self.prepared_data``, which would leak between threads if things
# went too fast.
exceptions = []
def threaded_prepare(index_queue, index, model):
try:
index.queue = index_queue
prepped = index.prepare(model)
except Exception as e:
exceptions.append(e)
raise
class ThreadedSearchIndex(GoodMockSearchIndex):
def prepare_author(self, obj):
if obj.pk == 20:
time.sleep(0.1)
else:
time.sleep(0.5)
index_queue.put(self.prepared_data['author'])
return self.prepared_data['author']
tmi = ThreadedSearchIndex()
index_queue = queue.Queue()
mock_1 = MockModel()
mock_1.pk = 20
mock_1.author = 'foo'
mock_1.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
mock_2 = MockModel()
mock_2.pk = 21
mock_2.author = 'daniel%s' % mock_2.id
mock_2.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
th1 = Thread(target=threaded_prepare, args=(index_queue, tmi, mock_1))
th2 = Thread(target=threaded_prepare, args=(index_queue, tmi, mock_2))
th1.start()
th2.start()
th1.join()
th2.join()
mock_1_result = index_queue.get()
mock_2_result = index_queue.get()
self.assertEqual(mock_1_result, u'foo')
self.assertEqual(mock_2_result, u'daniel21')
def test_custom_prepare_author(self):
mock = MockModel()
mock.pk = 20
mock.author = 'daniel%s' % mock.id
mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
self.assertEqual(len(self.cmi.prepare(mock)), 11)
self.assertEqual(sorted(self.cmi.prepare(mock).keys()), ['author', 'author_exact', 'django_ct', 'django_id', 'extra', 'hello', 'id', 'pub_date', 'pub_date_exact', 'text', 'whee'])
self.assertEqual(len(self.cmi.full_prepare(mock)), 11)
self.assertEqual(sorted(self.cmi.full_prepare(mock).keys()), ['author', 'author_exact', 'django_ct', 'django_id', 'extra', 'hello', 'id', 'pub_date', 'pub_date_exact', 'text', 'whee'])
self.assertEqual(self.cmi.prepared_data['author'], "Hi, I'm daniel20")
self.assertEqual(self.cmi.prepared_data['author_exact'], "Hi, I'm daniel20")
def test_custom_model_attr(self):
mock = MockModel()
mock.pk = 20
mock.author = 'daniel%s' % mock.id
mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
self.assertEqual(len(self.cmi.prepare(mock)), 11)
self.assertEqual(sorted(self.cmi.prepare(mock).keys()), ['author', 'author_exact', 'django_ct', 'django_id', 'extra', 'hello', 'id', 'pub_date', 'pub_date_exact', 'text', 'whee'])
self.assertEqual(len(self.cmi.full_prepare(mock)), 11)
self.assertEqual(sorted(self.cmi.full_prepare(mock).keys()), ['author', 'author_exact', 'django_ct', 'django_id', 'extra', 'hello', 'id', 'pub_date', 'pub_date_exact', 'text', 'whee'])
self.assertEqual(self.cmi.prepared_data['hello'], u'World!')
def test_custom_index_fieldname(self):
mock = MockModel()
mock.pk = 20
mock.author = 'daniel%s' % mock.id
mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
cofnmi = GoodOverriddenFieldNameMockSearchIndex()
self.assertEqual(len(cofnmi.prepare(mock)), 6)
self.assertEqual(sorted(cofnmi.prepare(mock).keys()), ['django_ct', 'django_id', 'hello', 'id', 'more_content', 'name_s'])
self.assertEqual(cofnmi.prepared_data['name_s'], u'daniel20')
self.assertEqual(cofnmi.get_content_field(), 'more_content')
def test_get_content_field(self):
self.assertEqual(self.mi.get_content_field(), 'text')
def test_update(self):
self.sb.clear()
self.assertEqual(self.sb.search('*')['hits'], 0)
self.mi.update()
self.assertEqual(self.sb.search('*')['hits'], 3)
self.sb.clear()
def test_update_object(self):
self.sb.clear()
self.assertEqual(self.sb.search('*')['hits'], 0)
mock = MockModel()
mock.pk = 20
mock.author = 'daniel%s' % mock.id
mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
self.mi.update_object(mock)
self.assertEqual([(res.content_type(), res.pk) for res in self.sb.search('*')['results']], [(u'core.mockmodel', u'20')])
self.sb.clear()
def test_remove_object(self):
self.mi.update()
self.assertEqual(self.sb.search('*')['hits'], 3)
mock = MockModel()
mock.pk = 20
mock.author = 'daniel%s' % mock.id
mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
self.mi.update_object(mock)
self.assertEqual(self.sb.search('*')['hits'], 4)
self.mi.remove_object(mock)
self.assertEqual([(res.content_type(), res.pk) for res in self.sb.search('*')['results']], [(u'core.mockmodel', u'1'), (u'core.mockmodel', u'2'), (u'core.mockmodel', u'3')])
# Put it back so we can test passing kwargs.
mock = MockModel()
mock.pk = 20
mock.author = 'daniel%s' % mock.id
mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
self.mi.update_object(mock)
self.assertEqual(self.sb.search('*')['hits'], 4)
self.mi.remove_object(mock, commit=False)
self.assertEqual([(res.content_type(), res.pk) for res in self.sb.search('*')['results']], [(u'core.mockmodel', u'1'), (u'core.mockmodel', u'2'), (u'core.mockmodel', u'3'), (u'core.mockmodel', u'20')])
self.sb.clear()
def test_clear(self):
self.mi.update()
self.assertGreater(self.sb.search('*')['hits'], 0)
self.mi.clear()
self.assertEqual(self.sb.search('*')['hits'], 0)
def test_reindex(self):
self.mi.reindex()
self.assertEqual([(res.content_type(), res.pk) for res in self.sb.search('*')['results']], [(u'core.mockmodel', u'1'), (u'core.mockmodel', u'2'), (u'core.mockmodel', u'3')])
self.sb.clear()
def test_inheritance(self):
try:
agmi = AltGoodMockSearchIndex()
except:
self.fail()
self.assertEqual(len(agmi.fields), 5)
self.assertTrue('text' in agmi.fields)
self.assertTrue(isinstance(agmi.fields['text'], indexes.CharField))
self.assertTrue('author' in agmi.fields)
self.assertTrue(isinstance(agmi.fields['author'], indexes.CharField))
self.assertTrue('pub_date' in agmi.fields)
self.assertTrue(isinstance(agmi.fields['pub_date'], indexes.DateTimeField))
self.assertTrue('extra' in agmi.fields)
self.assertTrue(isinstance(agmi.fields['extra'], indexes.CharField))
self.assertTrue('additional' in agmi.fields)
self.assertTrue(isinstance(agmi.fields['additional'], indexes.CharField))
def test_proper_field_resolution(self):
mrofsc = MROFieldsSearchChild()
mock = MockModel()
mock.pk = 20
mock.author = 'daniel%s' % mock.id
mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
mock.test_a = 'This is A'
mock.test_b = 'This is B'
self.assertEqual(len(mrofsc.fields), 1)
prepped_data = mrofsc.prepare(mock)
self.assertEqual(len(prepped_data), 4)
self.assertEqual(prepped_data['text'], 'This is A')
def test_load_all_queryset(self):
self.assertEqual([obj.id for obj in self.cmi.load_all_queryset()], [2, 3])
def test_nullable(self):
mock = MockModel()
mock.pk = 20
mock.author = None
mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
prepared_data = self.cnmi.prepare(mock)
self.assertEqual(len(prepared_data), 6)
self.assertEqual(sorted(prepared_data.keys()), ['author', 'author_exact', 'django_ct', 'django_id', 'id', 'text'])
prepared_data = self.cnmi.full_prepare(mock)
self.assertEqual(len(prepared_data), 4)
self.assertEqual(sorted(prepared_data.keys()), ['django_ct', 'django_id', 'id', 'text'])
def test_custom_facet_fields(self):
mock = MockModel()
mock.pk = 20
mock.author = 'daniel'
mock.pub_date = datetime.datetime(2009, 1, 31, 4, 19, 0)
prepared_data = self.gfmsi.prepare(mock)
self.assertEqual(len(prepared_data), 8)
self.assertEqual(sorted(prepared_data.keys()), ['author', 'author_foo', 'django_ct', 'django_id', 'id', 'pub_date', 'pub_date_exact', 'text'])
prepared_data = self.gfmsi.full_prepare(mock)
self.assertEqual(len(prepared_data), 8)
self.assertEqual(sorted(prepared_data.keys()), ['author', 'author_foo', 'django_ct', 'django_id', 'id', 'pub_date', 'pub_date_exact', 'text'])
self.assertEqual(prepared_data['author_foo'], u"Hi, I'm daniel")
self.assertEqual(prepared_data['pub_date_exact'], '2010-10-26T01:54:32')
class BasicModelSearchIndex(indexes.ModelSearchIndex, indexes.Indexable):
class Meta:
model = MockModel
class FieldsModelSearchIndex(indexes.ModelSearchIndex, indexes.Indexable):
class Meta:
model = MockModel
fields = ['author', 'pub_date']
class ExcludesModelSearchIndex(indexes.ModelSearchIndex, indexes.Indexable):
class Meta:
model = MockModel
excludes = ['author', 'foo']
class FieldsWithOverrideModelSearchIndex(indexes.ModelSearchIndex, indexes.Indexable):
foo = indexes.IntegerField(model_attr='foo')
class Meta:
model = MockModel
fields = ['author', 'foo']
def get_index_fieldname(self, f):
if f.name == 'author':
return 'author_bar'
else:
return f.name
class YetAnotherBasicModelSearchIndex(indexes.ModelSearchIndex, indexes.Indexable):
text = indexes.CharField(document=True)
class Meta:
model = AThirdMockModel
class GhettoAFifthMockModelSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True)
def get_model(self):
return AFifthMockModel
def index_queryset(self, using=None):
# Index everything,
return self.get_model().objects.complete_set()
def read_queryset(self, using=None):
return self.get_model().objects.all()
class ReadQuerySetTestSearchIndex(indexes.SearchIndex, indexes.Indexable):
author = indexes.CharField(model_attr='author', document=True)
def get_model(self):
return AFifthMockModel
def read_queryset(self, using=None):
return self.get_model().objects.complete_set()
class TextReadQuerySetTestSearchIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(model_attr='author', document=True)
def get_model(self):
return AFifthMockModel
def read_queryset(self, using=None):
return self.get_model().objects.complete_set()
class ModelSearchIndexTestCase(TestCase):
def setUp(self):
super(ModelSearchIndexTestCase, self).setUp()
self.sb = connections['default'].get_backend()
self.bmsi = BasicModelSearchIndex()
self.fmsi = FieldsModelSearchIndex()
self.emsi = ExcludesModelSearchIndex()
self.fwomsi = FieldsWithOverrideModelSearchIndex()
self.yabmsi = YetAnotherBasicModelSearchIndex()
def test_basic(self):
self.assertEqual(len(self.bmsi.fields), 4)
self.assertTrue('foo' in self.bmsi.fields)
self.assertTrue(isinstance(self.bmsi.fields['foo'], indexes.CharField))
self.assertEqual(self.bmsi.fields['foo'].null, False)
self.assertEqual(self.bmsi.fields['foo'].index_fieldname, 'foo')
self.assertTrue('author' in self.bmsi.fields)
self.assertTrue(isinstance(self.bmsi.fields['author'], indexes.CharField))
self.assertEqual(self.bmsi.fields['author'].null, False)
self.assertTrue('pub_date' in self.bmsi.fields)
self.assertTrue(isinstance(self.bmsi.fields['pub_date'], indexes.DateTimeField))
self.assertTrue(isinstance(self.bmsi.fields['pub_date'].default, datetime.datetime))
self.assertTrue('text' in self.bmsi.fields)
self.assertTrue(isinstance(self.bmsi.fields['text'], indexes.CharField))
self.assertEqual(self.bmsi.fields['text'].document, True)
self.assertEqual(self.bmsi.fields['text'].use_template, True)
def test_fields(self):
self.assertEqual(len(self.fmsi.fields), 3)
self.assertTrue('author' in self.fmsi.fields)
self.assertTrue(isinstance(self.fmsi.fields['author'], indexes.CharField))
self.assertTrue('pub_date' in self.fmsi.fields)
self.assertTrue(isinstance(self.fmsi.fields['pub_date'], indexes.DateTimeField))
self.assertTrue('text' in self.fmsi.fields)
self.assertTrue(isinstance(self.fmsi.fields['text'], indexes.CharField))
def test_excludes(self):
self.assertEqual(len(self.emsi.fields), 2)
self.assertTrue('pub_date' in self.emsi.fields)
self.assertTrue(isinstance(self.emsi.fields['pub_date'], indexes.DateTimeField))
self.assertTrue('text' in self.emsi.fields)
self.assertTrue(isinstance(self.emsi.fields['text'], indexes.CharField))
def test_fields_with_override(self):
self.assertEqual(len(self.fwomsi.fields), 3)
self.assertTrue('author' in self.fwomsi.fields)
self.assertTrue(isinstance(self.fwomsi.fields['author'], indexes.CharField))
self.assertTrue('foo' in self.fwomsi.fields)
self.assertTrue(isinstance(self.fwomsi.fields['foo'], indexes.IntegerField))
self.assertTrue('text' in self.fwomsi.fields)
self.assertTrue(isinstance(self.fwomsi.fields['text'], indexes.CharField))
def test_overriding_field_name_with_get_index_fieldname(self):
self.assertTrue(self.fwomsi.fields['foo'].index_fieldname, 'foo')
self.assertTrue(self.fwomsi.fields['author'].index_fieldname, 'author_bar')
def test_float_integer_fields(self):
self.assertEqual(len(self.yabmsi.fields), 5)
self.assertEqual(sorted(self.yabmsi.fields.keys()), ['author', 'average_delay', 'pub_date', 'text', 'view_count'])
self.assertTrue('author' in self.yabmsi.fields)
self.assertTrue(isinstance(self.yabmsi.fields['author'], indexes.CharField))
self.assertEqual(self.yabmsi.fields['author'].null, False)
self.assertTrue('pub_date' in self.yabmsi.fields)
self.assertTrue(isinstance(self.yabmsi.fields['pub_date'], indexes.DateTimeField))
self.assertTrue(isinstance(self.yabmsi.fields['pub_date'].default, datetime.datetime))
self.assertTrue('text' in self.yabmsi.fields)
self.assertTrue(isinstance(self.yabmsi.fields['text'], indexes.CharField))
self.assertEqual(self.yabmsi.fields['text'].document, True)
self.assertEqual(self.yabmsi.fields['text'].use_template, False)
self.assertTrue('view_count' in self.yabmsi.fields)
self.assertTrue(isinstance(self.yabmsi.fields['view_count'], indexes.IntegerField))
self.assertEqual(self.yabmsi.fields['view_count'].null, False)
self.assertEqual(self.yabmsi.fields['view_count'].index_fieldname, 'view_count')
self.assertTrue('average_delay' in self.yabmsi.fields)
self.assertTrue(isinstance(self.yabmsi.fields['average_delay'], indexes.FloatField))
self.assertEqual(self.yabmsi.fields['average_delay'].null, False)
self.assertEqual(self.yabmsi.fields['average_delay'].index_fieldname, 'average_delay')
| [
"[email protected]"
] | |
4b17f4f3c8e4448fa2da50d78b68a1111cc8b288 | 1c74a2e075793e1d35c441518e2e138e14e26ea5 | /DynamicProgramming/139. 单词拆分.py | c91f575176f9e720791ba0590cb1319d478959d0 | [] | no_license | Dawinia/LeetCode | 1a385bfadbc4869c46dc1e9b8ca7656b77d746a0 | e1dcc71ca657b42eb8eb15116697e852ef4a475a | refs/heads/master | 2021-07-20T00:56:01.058471 | 2020-07-22T14:07:04 | 2020-07-22T14:07:04 | 197,305,126 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | class Solution:
def wordBreak(self, s: str, wordDict: List[str]) -> bool:
if not s: return False
dp = [False for _ in range(len(s) + 1)]
dp[0] = True
wordDict = set(wordDict)
for i in range(len(s) + 1):
for j in range(i):
if dp[j] and s[j: i] in wordDict:
dp[i] = True
break
return dp[-1] | [
"[email protected]"
] | |
96aa05d3ed0dc1b7ff4de70729ea4aee4bb5d044 | 5063faf298a36466cdb90f1cbd0a4f4e855b5d3b | /test/test_projects_api.py | 0f0c3cd8d6d5647894b277fdf3303c40f5c0006d | [] | no_license | pollination/python-sdk | d4eb4efbcbe3a76cc170cf8e71ad5bc6ca6c3011 | e4a94b236534658b150961795256224fe8dd93c2 | refs/heads/master | 2023-08-04T15:19:29.126613 | 2022-03-06T10:43:21 | 2022-03-06T10:51:08 | 224,588,062 | 3 | 1 | null | 2023-09-05T20:52:35 | 2019-11-28T06:48:40 | Python | UTF-8 | Python | false | false | 2,637 | py | # coding: utf-8
"""
pollination-server
Pollination Server OpenAPI Definition # noqa: E501
The version of the OpenAPI document: 0.27.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import pollination_sdk
from pollination_sdk.api.projects_api import ProjectsApi # noqa: E501
from pollination_sdk.rest import ApiException
class TestProjectsApi(unittest.TestCase):
"""ProjectsApi unit test stubs"""
def setUp(self):
self.api = pollination_sdk.api.projects_api.ProjectsApi() # noqa: E501
def tearDown(self):
pass
def test_create_project(self):
"""Test case for create_project
Create a Project # noqa: E501
"""
pass
def test_create_project_recipe_filter(self):
"""Test case for create_project_recipe_filter
Upsert a recipe filter to a project # noqa: E501
"""
pass
def test_delete_project(self):
"""Test case for delete_project
Delete a Project # noqa: E501
"""
pass
def test_delete_project_org_permission(self):
"""Test case for delete_project_org_permission
Remove a Project permissions # noqa: E501
"""
pass
def test_delete_project_recipe_filter(self):
"""Test case for delete_project_recipe_filter
Remove a Project recipe filter # noqa: E501
"""
pass
def test_get_project(self):
"""Test case for get_project
Get a project # noqa: E501
"""
pass
def test_get_project_access_permissions(self):
"""Test case for get_project_access_permissions
Get project access permissions # noqa: E501
"""
pass
def test_get_project_recipe_filters(self):
"""Test case for get_project_recipe_filters
Get project recipe filters # noqa: E501
"""
pass
def test_get_project_recipes(self):
"""Test case for get_project_recipes
Get project recipes # noqa: E501
"""
pass
def test_list_projects(self):
"""Test case for list_projects
List Projects # noqa: E501
"""
pass
def test_update(self):
"""Test case for update
Update a Project # noqa: E501
"""
pass
def test_upsert_project_permission(self):
"""Test case for upsert_project_permission
Upsert a new permission to a project # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
bf13f2f9b888f7442c436a5fa5a5a310061ebfa4 | f608dbe94b6e05f63d9bfa030c8ca87725957b93 | /tests/unit/test.py | 65b30979d98919ac06c53a99435ae0dfde61fd60 | [] | no_license | ProjectMHQ/projectm | 3336c82cbd1e330e065cb178d476c72d552fbfaf | adcb42722354ea4929300e9a4597e734b431c6e5 | refs/heads/master | 2023-04-22T18:41:48.091889 | 2021-01-30T11:28:28 | 2021-01-30T11:28:28 | 216,660,020 | 0 | 0 | null | 2021-05-06T20:33:28 | 2019-10-21T20:32:21 | Python | UTF-8 | Python | false | false | 105 | py | from unittest import TestCase
class Test(TestCase):
def setUp(self):
self.assertTrue(True)
| [
"[email protected]"
] | |
542f651000d9847a3a1e8b6fd63bd0714affc2da | 551b75f52d28c0b5c8944d808a361470e2602654 | /huaweicloud-sdk-kms/huaweicloudsdkkms/v1/model/api_version_detail.py | 2fb3fceb4eb332ac3df439c22622271584e00bd6 | [
"Apache-2.0"
] | permissive | wuchen-huawei/huaweicloud-sdk-python-v3 | 9d6597ce8ab666a9a297b3d936aeb85c55cf5877 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | refs/heads/master | 2023-05-08T21:32:31.920300 | 2021-05-26T08:54:18 | 2021-05-26T08:54:18 | 370,898,764 | 0 | 0 | NOASSERTION | 2021-05-26T03:50:07 | 2021-05-26T03:50:07 | null | UTF-8 | Python | false | false | 7,101 | py | # coding: utf-8
import pprint
import re
import six
class ApiVersionDetail:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'id': 'str',
'links': 'list[ApiLink]',
'version': 'str',
'status': 'str',
'updated': 'str',
'min_version': 'str'
}
attribute_map = {
'id': 'id',
'links': 'links',
'version': 'version',
'status': 'status',
'updated': 'updated',
'min_version': 'min_version'
}
def __init__(self, id=None, links=None, version=None, status=None, updated=None, min_version=None):
"""ApiVersionDetail - a model defined in huaweicloud sdk"""
self._id = None
self._links = None
self._version = None
self._status = None
self._updated = None
self._min_version = None
self.discriminator = None
if id is not None:
self.id = id
if links is not None:
self.links = links
if version is not None:
self.version = version
if status is not None:
self.status = status
if updated is not None:
self.updated = updated
if min_version is not None:
self.min_version = min_version
@property
def id(self):
"""Gets the id of this ApiVersionDetail.
版本ID(版本号),如“v1.0”。
:return: The id of this ApiVersionDetail.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this ApiVersionDetail.
版本ID(版本号),如“v1.0”。
:param id: The id of this ApiVersionDetail.
:type: str
"""
self._id = id
@property
def links(self):
"""Gets the links of this ApiVersionDetail.
JSON对象,详情请参见links字段数据结构说明。
:return: The links of this ApiVersionDetail.
:rtype: list[ApiLink]
"""
return self._links
@links.setter
def links(self, links):
"""Sets the links of this ApiVersionDetail.
JSON对象,详情请参见links字段数据结构说明。
:param links: The links of this ApiVersionDetail.
:type: list[ApiLink]
"""
self._links = links
@property
def version(self):
"""Gets the version of this ApiVersionDetail.
若该版本API支持微版本,则填支持的最大微版本号,如果不支持微版本,则返回空字符串。
:return: The version of this ApiVersionDetail.
:rtype: str
"""
return self._version
@version.setter
def version(self, version):
"""Sets the version of this ApiVersionDetail.
若该版本API支持微版本,则填支持的最大微版本号,如果不支持微版本,则返回空字符串。
:param version: The version of this ApiVersionDetail.
:type: str
"""
self._version = version
@property
def status(self):
"""Gets the status of this ApiVersionDetail.
版本状态,包含如下3种: - CURRENT:表示该版本为主推版本。 - SUPPORTED:表示为老版本,但是现在还继续支持。 - DEPRECATED:表示为废弃版本,存在后续删除的可能。
:return: The status of this ApiVersionDetail.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this ApiVersionDetail.
版本状态,包含如下3种: - CURRENT:表示该版本为主推版本。 - SUPPORTED:表示为老版本,但是现在还继续支持。 - DEPRECATED:表示为废弃版本,存在后续删除的可能。
:param status: The status of this ApiVersionDetail.
:type: str
"""
self._status = status
@property
def updated(self):
"""Gets the updated of this ApiVersionDetail.
版本发布时间,要求用UTC时间表示。如v1.发布的时间2014-06-28T12:20:21Z。
:return: The updated of this ApiVersionDetail.
:rtype: str
"""
return self._updated
@updated.setter
def updated(self, updated):
"""Sets the updated of this ApiVersionDetail.
版本发布时间,要求用UTC时间表示。如v1.发布的时间2014-06-28T12:20:21Z。
:param updated: The updated of this ApiVersionDetail.
:type: str
"""
self._updated = updated
@property
def min_version(self):
"""Gets the min_version of this ApiVersionDetail.
若该版本API 支持微版本,则填支持的最小微版本号,如果不支持微版本,则返回空字符串。
:return: The min_version of this ApiVersionDetail.
:rtype: str
"""
return self._min_version
@min_version.setter
def min_version(self, min_version):
"""Sets the min_version of this ApiVersionDetail.
若该版本API 支持微版本,则填支持的最小微版本号,如果不支持微版本,则返回空字符串。
:param min_version: The min_version of this ApiVersionDetail.
:type: str
"""
self._min_version = min_version
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ApiVersionDetail):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
e8b10831544dd015ee2d3f1a85a00b24b27c14ec | be0f3dfbaa2fa3d8bbe59229aef3212d032e7dd1 | /Gauss_v45r8/Gen/DecFiles/options/25113000.py | 00acb6e700d0a179d725b0d6740b0388b780f7d4 | [] | no_license | Sally27/backup_cmtuser_full | 34782102ed23c6335c48650a6eaa901137355d00 | 8924bebb935b96d438ce85b384cfc132d9af90f6 | refs/heads/master | 2020-05-21T09:27:04.370765 | 2018-12-12T14:41:07 | 2018-12-12T14:41:07 | 185,989,173 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 756 | py | # file /home/hep/ss4314/cmtuser/Gauss_v45r8/Gen/DecFiles/options/25113000.py generated: Fri, 27 Mar 2015 15:48:05
#
# Event Type: 25113000
#
# ASCII decay Descriptor: [Lambda_c+ -> p+ mu- mu+]cc
#
from Configurables import Generation
Generation().EventType = 25113000
Generation().SampleGenerationTool = "SignalPlain"
from Configurables import SignalPlain
Generation().addTool( SignalPlain )
Generation().SignalPlain.ProductionTool = "PythiaProduction"
from Configurables import ToolSvc
from Configurables import EvtGenDecay
ToolSvc().addTool( EvtGenDecay )
ToolSvc().EvtGenDecay.UserDecayFile = "$DECFILESROOT/dkfiles/Lc_pmumu=OS,DecProdCut.dec"
Generation().SignalPlain.CutTool = "DaughtersInLHCb"
Generation().SignalPlain.SignalPIDList = [ 4122,-4122 ]
| [
"[email protected]"
] | |
83f6ae6e2ca3950d4b58b8575c1d4bbdaa0baa58 | ec931947aa3e06ce565637e7ee1cb707f56375a2 | /aoc2015/day24/day24.py | d4455fd57993b221ad974a8240dfe528ac07f407 | [] | no_license | chrisglencross/advent-of-code | 5f16ed7e2265d27ce15f502ce2a1c2f11fc99fc0 | 21623d4aa01a9e20285a0233c50f8f56c4099af5 | refs/heads/master | 2023-01-24T22:01:30.829679 | 2023-01-12T23:03:03 | 2023-01-12T23:03:03 | 224,833,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,031 | py | #!/usr/bin/python3
# Advent of code 2015 day 24
# See https://adventofcode.com/2015/day/24
import functools
import itertools
def quantum_entanglement(packages):
return functools.reduce(lambda x, y: x * y, packages)
def get_passenger_compartment_quantum_entanglement(packages, compartments):
total_weight = sum(packages)
compartment_weight = total_weight // compartments
candidates = []
for i in range(0, len(packages)):
if candidates:
break
for group in itertools.combinations(packages, i):
if sum(group) == compartment_weight:
candidates.append(group)
passenger_compartment_packages = min(candidates, key=quantum_entanglement)
return quantum_entanglement(passenger_compartment_packages)
with open("input.txt") as f:
all_packages = [int(line) for line in f.readlines()]
print("Part 1:", get_passenger_compartment_quantum_entanglement(all_packages, 3))
print("Part 2:", get_passenger_compartment_quantum_entanglement(all_packages, 4))
| [
"[email protected]"
] | |
cd87b660cb646404a973d34f0eac4fd6a9701f37 | 183d51188e6aaf077023a7ab2f9e35a681707e4e | /Well_of_Mimir/local/bin/pilprint.py | 93c7698b0101acd9e5fe33fcfd2e63812d92026f | [] | no_license | hanwei2008/Virtual_Environment | 6541d8dd608d620f76fcbc84f1c5bf2581a3b49e | 5df207171d27333d3f7cf45447a558f4f97e1c10 | refs/heads/master | 2016-09-14T07:11:57.086452 | 2016-04-22T01:21:43 | 2016-04-22T01:21:43 | 56,732,689 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,421 | py | #!/home/caiyuantao/Virtual_Environment/Well_of_Mimir/bin/python
#
# The Python Imaging Library.
# $Id$
#
# print image files to postscript printer
#
# History:
# 0.1 1996-04-20 fl Created
# 0.2 1996-10-04 fl Use draft mode when converting.
# 0.3 2003-05-06 fl Fixed a typo or two.
#
from __future__ import print_function
VERSION = "pilprint 0.3/2003-05-05"
from PIL import Image
from PIL import PSDraw
letter = ( 1.0*72, 1.0*72, 7.5*72, 10.0*72 )
def description(file, image):
import os
title = os.path.splitext(os.path.split(file)[1])[0]
format = " (%dx%d "
if image.format:
format = " (" + image.format + " %dx%d "
return title + format % image.size + image.mode + ")"
import getopt, os, sys
if len(sys.argv) == 1:
print("PIL Print 0.2a1/96-10-04 -- print image files")
print("Usage: pilprint files...")
print("Options:")
print(" -c colour printer (default is monochrome)")
print(" -p print via lpr (default is stdout)")
print(" -P <printer> same as -p but use given printer")
sys.exit(1)
try:
opt, argv = getopt.getopt(sys.argv[1:], "cdpP:")
except getopt.error as v:
print(v)
sys.exit(1)
printer = None # print to stdout
monochrome = 1 # reduce file size for most common case
for o, a in opt:
if o == "-d":
# debug: show available drivers
Image.init()
print(Image.ID)
sys.exit(1)
elif o == "-c":
# colour printer
monochrome = 0
elif o == "-p":
# default printer channel
printer = "lpr"
elif o == "-P":
# printer channel
printer = "lpr -P%s" % a
for file in argv:
try:
im = Image.open(file)
title = description(file, im)
if monochrome and im.mode not in ["1", "L"]:
im.draft("L", im.size)
im = im.convert("L")
if printer:
fp = os.popen(printer, "w")
else:
fp = sys.stdout
ps = PSDraw.PSDraw(fp)
ps.begin_document()
ps.setfont("Helvetica-Narrow-Bold", 18)
ps.text((letter[0], letter[3]+24), title)
ps.setfont("Helvetica-Narrow-Bold", 8)
ps.text((letter[0], letter[1]-30), VERSION)
ps.image(letter, im)
ps.end_document()
except:
print("cannot print image", end=' ')
print("(%s:%s)" % (sys.exc_info()[0], sys.exc_info()[1]))
| [
"[email protected]"
] | |
264c720f7c4b2f8ab8780a4d71622bd4e1a65372 | 5d58a7d712702680337154b1851296ee4eb6a5ef | /TEST/GUI/00120_page_pixsel/log.py | 80cb4a3e4fe61ea79c39aed82a6a979b2a410f38 | [
"LicenseRef-scancode-warranty-disclaimer"
] | no_license | creuzige/OOF2 | f04b6767a4f5ef0afefe419e8d19a19920cc3bbf | 2e815592a55cd7a15c95b8e63efc3d9cde684299 | refs/heads/master | 2021-05-20T23:02:52.727468 | 2019-11-06T22:02:48 | 2019-11-06T22:02:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 57,503 | py | checkpoint toplevel widget mapped OOF2 Activity Viewer
# -*- python -*-
# This software was produced by NIST, an agency of the U.S. government,
# and by statute is not subject to copyright in the United States.
# Recipients of this software assume all responsibilities associated
# with its operation, modification and maintenance. However, to
# facilitate maintenance we ask that before distributing modified
# versions of this software, you first contact the authors at
# [email protected].
import tests
setComboBox(findWidget('OOF2:Navigation:PageMenu'), 'Pixel Selection')
checkpoint page installed Pixel Selection
findWidget('OOF2:Pixel Selection Page:Pane').set_position(281)
checkpoint pixel page updated
checkpoint pixel page sensitized
assert tests.sensitization0()
assert tests.pixelSelectionPageNoMSCheck()
setComboBox(findWidget('OOF2:Navigation:PageMenu'), 'Microstructure')
checkpoint page installed Microstructure
findWidget('OOF2:Microstructure Page:Pane').set_position(150)
findWidget('OOF2:Microstructure Page:NewFromFile').clicked()
checkpoint toplevel widget mapped Dialog-Load Image and create Microstructure
findWidget('Dialog-Load Image and create Microstructure').resize(342, 144)
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint microstructure page sensitized
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('e')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('ex')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('exa')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('exam')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examp')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('exampl')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('example')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/c')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/co')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/com')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/comp')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/compo')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/compos')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/composi')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/composit')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/compositi')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/compositio')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/composition')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/composition.')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/composition.p')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/composition.pn')
findWidget('Dialog-Load Image and create Microstructure:filename').set_text('examples/composition.png')
findWidget('Dialog-Load Image and create Microstructure:width:Auto').clicked()
findWidget('Dialog-Load Image and create Microstructure:width:Text').set_text('1')
findWidget('Dialog-Load Image and create Microstructure:width:Text').set_text('1.')
findWidget('Dialog-Load Image and create Microstructure:width:Text').set_text('1.0')
findWidget('Dialog-Load Image and create Microstructure:gtk-ok').clicked()
findWidget('OOF2:Microstructure Page:Pane').set_position(153)
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint pixel page sensitized
checkpoint active area status updated
checkpoint mesh bdy page updated
checkpoint Field page sensitized
checkpoint Materials page updated
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page grouplist
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint Solver page sensitized
# checkpoint interface page updated
checkpoint microstructure page sensitized
checkpoint OOF.Microstructure.Create_From_ImageFile
findMenu(findWidget('OOF2:MenuBar'), 'Windows:Graphics:New').activate()
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(250)
checkpoint Move Node toolbox info updated
checkpoint toplevel widget mapped OOF2 Graphics 1
checkpoint selection info updated
checkpoint selection info updated
checkpoint selection info updated
checkpoint selection info updated
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(693)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.Windows.Graphics.New
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(250)
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(693)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2 Graphics 1').resize(800, 400)
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(250)
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(693)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(259)
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(717)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2 Graphics 1').resize(800, 400)
setComboBox(findWidget('OOF2:Navigation:PageMenu'), 'Pixel Selection')
checkpoint page installed Pixel Selection
checkpoint pixel page updated
checkpoint pixel page sensitized
assert tests.pixelSelectionPageStatusCheck(0, 10000)
assert tests.pixelSelectionSizeCheck('composition.png', 0)
assert tests.sensitization1()
findWidget('OOF2:Pixel Selection Page:Pane').set_position(281)
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(259)
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(717)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
setComboBox(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:TBChooser'), 'Pixel Selection')
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(259)
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(717)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(259)
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(717)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
setComboBox(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:TBScroll:Pixel Selection:Method:Chooser'), 'Burn')
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(259)
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(717)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(259)
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(717)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 6.3043478260870e-02,y=-2.2391304347826e-01,state=0,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 6.3043478260870e-02,y=-2.2391304347826e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
findWidget('OOF2 Messages 1').resize(548, 200)
findWidget('OOF2:Pixel Selection Page:Pane').set_position(281)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint selection info updated
checkpoint pixel page sensitized
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Burn
assert tests.pixelSelectionPageStatusCheck(1938, 10000)
assert tests.pixelSelectionSizeCheck('composition.png', 1938)
assert tests.sensitization2()
findWidget('OOF2:Navigation:Prev').clicked()
checkpoint page installed Image
findWidget('OOF2').resize(593, 350)
findWidget('OOF2:Image Page:Pane').set_position(380)
findWidget('OOF2:Navigation:Prev').clicked()
checkpoint page installed Microstructure
findWidget('OOF2:Microstructure Page:Pane').set_position(165)
findWidget('OOF2:Microstructure Page:Pane:PixelGroups:New').clicked()
checkpoint toplevel widget mapped Dialog-Create new pixel group
findWidget('Dialog-Create new pixel group').resize(249, 72)
findWidget('Dialog-Create new pixel group:name:Auto').clicked()
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint microstructure page sensitized
findWidget('Dialog-Create new pixel group:name:Text').set_text('l')
findWidget('Dialog-Create new pixel group:name:Text').set_text('lo')
findWidget('Dialog-Create new pixel group:name:Text').set_text('low')
findWidget('Dialog-Create new pixel group:name:Text').set_text('lowe')
findWidget('Dialog-Create new pixel group:name:Text').set_text('lower')
findWidget('Dialog-Create new pixel group:name:Text').set_text('lowerl')
findWidget('Dialog-Create new pixel group:name:Text').set_text('lowerle')
findWidget('Dialog-Create new pixel group:name:Text').set_text('lowerlef')
findWidget('Dialog-Create new pixel group:name:Text').set_text('lowerleft')
findWidget('Dialog-Create new pixel group:gtk-ok').clicked()
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint OOF.PixelGroup.New
checkpoint microstructure page sensitized
checkpoint meshable button set
findWidget('OOF2:Microstructure Page:Pane').set_position(212)
findWidget('OOF2:Microstructure Page:Pane:PixelGroups:Add').clicked()
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelGroup.AddSelection
setComboBox(findWidget('OOF2:Navigation:PageMenu'), 'Pixel Selection')
checkpoint page installed Pixel Selection
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
checkpoint pixel page updated
checkpoint pixel page sensitized
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Invert
assert tests.pixelSelectionPageStatusCheck(8062, 10000)
assert tests.pixelSelectionSizeCheck('composition.png', 8062)
assert tests.sensitization2()
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Undo').clicked()
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint pixel page sensitized
checkpoint pixel page updated
checkpoint selection info updated
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Undo
assert tests.pixelSelectionPageStatusCheck(1938, 10000)
assert tests.pixelSelectionSizeCheck('composition.png', 1938)
assert tests.sensitization3()
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Clear').clicked()
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Clear
assert tests.pixelSelectionPageStatusCheck(0, 10000)
assert tests.pixelSelectionSizeCheck('composition.png', 0)
assert tests.sensitization4()
setComboBox(findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Chooser'), 'Select Group')
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Select_Group
assert tests.pixelSelectionPageStatusCheck(1938, 10000)
assert tests.pixelSelectionSizeCheck('composition.png', 1938)
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 1.6739130434783e-01,y=-9.2826086956522e-01,state=0,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 1.6739130434783e-01,y=-9.2826086956522e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint pixel page sensitized
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Burn
setComboBox(findWidget('OOF2:Navigation:PageMenu'), 'Microstructure')
checkpoint page installed Microstructure
findWidget('OOF2:Microstructure Page:Pane').set_position(212)
findWidget('OOF2:Microstructure Page:Pane:PixelGroups:New').clicked()
checkpoint toplevel widget mapped Dialog-Create new pixel group
findWidget('Dialog-Create new pixel group').resize(249, 72)
findWidget('Dialog-Create new pixel group:name:Text').set_text('')
findWidget('Dialog-Create new pixel group:name:Text').set_text('u')
findWidget('Dialog-Create new pixel group:name:Text').set_text('up')
findWidget('Dialog-Create new pixel group:name:Text').set_text('upp')
checkpoint meshable button set
checkpoint microstructure page sensitized
findWidget('Dialog-Create new pixel group:name:Text').set_text('uppe')
findWidget('Dialog-Create new pixel group:name:Text').set_text('upper')
findWidget('Dialog-Create new pixel group:name:Text').set_text('upperl')
findWidget('Dialog-Create new pixel group:name:Text').set_text('upperle')
findWidget('Dialog-Create new pixel group:name:Text').set_text('upperlef')
findWidget('Dialog-Create new pixel group:name:Text').set_text('upperleft')
findWidget('Dialog-Create new pixel group:gtk-ok').clicked()
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint OOF.PixelGroup.New
findWidget('OOF2:Microstructure Page:Pane:PixelGroups:Add').clicked()
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelGroup.AddSelection
setComboBox(findWidget('OOF2:Navigation:PageMenu'), 'Pixel Selection')
checkpoint page installed Pixel Selection
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
checkpoint pixel page updated
checkpoint pixel page sensitized
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint pixel page sensitized
checkpoint selection info updated
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Select_Group
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
assert tests.pixelSelectionPageStatusCheck(1938, 10000)
setComboBox(findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Chooser'), 'Add Group')
setComboBox(findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Add Group:group'), 'upperleft')
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Add_Group
assert tests.pixelSelectionPageStatusCheck(3060, 10000)
assert tests.pixelSelectionSizeCheck('composition.png', 3060)
setComboBox(findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Chooser'), 'Unselect Group')
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Unselect_Group
assert tests.pixelSelectionPageStatusCheck(1122, 10000)
assert tests.pixelSelectionSizeCheck('composition.png', 1122)
setComboBox(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:TBScroll:Pixel Selection:Method:Chooser'), 'Rectangle')
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(259)
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(717)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(259)
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(717)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 1.9782608695652e-01,y=-5.8043478260870e-01,state=0,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 2.0217391304348e-01,y=-5.6304347826087e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 2.0652173913043e-01,y=-5.3260869565217e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 2.1086956521739e-01,y=-5.1086956521739e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 2.1956521739130e-01,y=-4.8478260869565e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 2.2391304347826e-01,y=-4.4565217391304e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 2.5000000000000e-01,y=-4.0217391304348e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 2.7608695652174e-01,y=-3.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 3.0217391304348e-01,y=-3.2826086956522e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 3.2826086956522e-01,y=-2.9347826086957e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 3.5000000000000e-01,y=-2.6739130434783e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 3.6304347826087e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 3.7173913043478e-01,y=-2.5000000000000e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 3.8043478260870e-01,y=-2.5000000000000e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 3.9782608695652e-01,y=-2.4565217391304e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 4.1521739130435e-01,y=-2.4565217391304e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 4.2826086956522e-01,y=-2.4565217391304e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 4.5000000000000e-01,y=-2.5000000000000e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 4.7173913043478e-01,y=-2.5434782608696e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 4.9347826086957e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 5.1086956521739e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 5.3260869565217e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 5.4565217391304e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 5.8478260869565e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 6.1521739130435e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 6.1956521739130e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 6.4130434782609e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 6.5000000000000e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 6.5434782608696e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 6.6304347826087e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 6.7608695652174e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 6.8478260869565e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 6.9347826086957e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 6.9782608695652e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 7.0217391304348e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 7.0652173913043e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 7.1086956521739e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 7.1521739130435e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 7.1956521739130e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 7.2391304347826e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 7.2826086956522e-01,y=-2.5869565217391e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 7.3260869565217e-01,y=-2.5434782608696e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.MOTION_NOTIFY,x= 7.3260869565217e-01,y=-2.6304347826087e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 7.3260869565217e-01,y=-2.6304347826087e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint selection info updated
checkpoint pixel page sensitized
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.Graphics_1.Toolbox.Pixel_Select.Rectangle
assert tests.pixelSelectionSizeCheck('composition.png', 1815)
assert tests.pixelSelectionPageStatusCheck(1815, 10000)
setComboBox(findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Chooser'), 'Intersect Group')
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint pixel page updated
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Intersect_Group
assert tests.pixelSelectionPageStatusCheck(384, 10000)
assert tests.pixelSelectionSizeCheck('composition.png', 384)
assert tests.sensitization5()
setComboBox(findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Chooser'), 'Despeckle')
setComboBox(findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Chooser'), 'Expand')
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint selection info updated
checkpoint pixel page updated
checkpoint pixel page sensitized
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Expand
assert tests.pixelSelectionPageStatusCheck(472, 10000)
setComboBox(findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Chooser'), 'Shrink')
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Shrink:radius').set_text('.0')
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Shrink:radius').set_text('4.0')
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint selection info updated
checkpoint pixel page updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Shrink
assert tests.pixelSelectionPageStatusCheck(156, 10000)
setComboBox(findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Chooser'), 'Color Range')
findWidget('OOF2').resize(593, 532)
findWidget('OOF2:Pixel Selection Page:Pane').set_position(221)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Color Range:reference:RGBColor:Blue:slider').get_adjustment().set_value( 1.5873015873016e-02)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Color Range:reference:RGBColor:Blue:slider').get_adjustment().set_value( 3.1746031746032e-02)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Color Range:reference:RGBColor:Blue:slider').get_adjustment().set_value( 6.8253968253968e-01)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Color Range:reference:RGBColor:Blue:slider').get_adjustment().set_value( 9.3650793650794e-01)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Color Range:reference:RGBColor:Blue:slider').get_adjustment().set_value( 9.8412698412698e-01)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Color Range:reference:RGBColor:Blue:slider').get_adjustment().set_value( 1.0000000000000e+00)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Color Range:range:DeltaRGB:delta_red:slider').get_adjustment().set_value( 1.0000000000000e-02)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Color Range:range:DeltaRGB:delta_green:slider').get_adjustment().set_value( 1.0000000000000e-02)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Color Range:range:DeltaRGB:delta_blue:slider').get_adjustment().set_value( 1.0000000000000e-02)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
findWidget('OOF2:Pixel Selection Page:Pane').set_position(221)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint pixel page updated
checkpoint pixel page sensitized
checkpoint selection info updated
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Color_Range
assert tests.pixelSelectionPageStatusCheck(1428, 10000)
setComboBox(findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Chooser'), 'Copy')
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
setComboBox(findWidget('OOF2:Navigation:PageMenu'), 'Microstructure')
checkpoint page installed Microstructure
findWidget('OOF2:Microstructure Page:Pane').set_position(212)
findWidget('OOF2:Microstructure Page:Copy').clicked()
checkpoint toplevel widget mapped Dialog-Copy microstructure
findWidget('Dialog-Copy microstructure').resize(249, 72)
findWidget('Dialog-Copy microstructure:gtk-ok').clicked()
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2:Microstructure Page:Pane').set_position(165)
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint Field page sensitized
checkpoint Materials page updated
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page grouplist
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint microstructure page sensitized
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint OOF.Microstructure.Copy
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2:Microstructure Page:Pane').set_position(165)
findWidget('OOF2:Microstructure Page:Pane').set_position(212)
setComboBox(findWidget('OOF2:Navigation:PageMenu'), 'Pixel Selection')
checkpoint page installed Pixel Selection
checkpoint pixel page updated
checkpoint pixel page sensitized
assert tests.chooserCheck('OOF2:Pixel Selection Page:Microstructure', ['composition.png', 'microstructure'])
assert tests.chooserStateCheck('OOF2:Pixel Selection Page:Microstructure', 'composition.png')
assert tests.pixelSelectionPageStatusCheck(1428, 10000)
assert tests.pixelSelectionSizeCheck('microstructure', 0)
assert tests.pixelSelectionSizeCheck('composition.png', 1428)
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
setComboBox(findWidget('OOF2:Pixel Selection Page:Microstructure'), 'microstructure')
checkpoint pixel page updated
checkpoint pixel page sensitized
assert tests.pixelSelectionPageStatusCheck(0, 10000)
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Copy
assert tests.pixelSelectionPageStatusCheck(1428, 10000)
assert tests.pixelSelectionSizeCheck('microstructure', 1428)
setComboBox(findWidget('OOF2:Navigation:PageMenu'), 'Skeleton')
checkpoint page installed Skeleton
findWidget('OOF2:Skeleton Page:Pane').set_position(249)
checkpoint skeleton page sensitized
setComboBox(findWidget('OOF2:Skeleton Page:Microstructure'), 'composition.png')
findWidget('OOF2:Skeleton Page:New').clicked()
checkpoint toplevel widget mapped Dialog-New skeleton
checkpoint skeleton page sensitized
checkpoint skeleton page sensitized
findWidget('Dialog-New skeleton').resize(334, 152)
findWidget('Dialog-New skeleton:gtk-ok').clicked()
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2:Skeleton Page:Pane').set_position(249)
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page groups sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint contourmap info updated for Graphics_1
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint skeleton page sensitized
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Pixel Info updated
checkpoint selection info updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint selection info updated
checkpoint mesh bdy page updated
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint skeleton page sensitized
checkpoint Solver page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.Skeleton.New
setComboBox(findWidget('OOF2:Navigation:PageMenu'), 'Pixel Selection')
checkpoint page installed Pixel Selection
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
checkpoint pixel page updated
checkpoint pixel page sensitized
setComboBox(findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Chooser'), 'Select Element Pixels')
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(259)
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(717)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
setComboBox(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:TBChooser'), 'Skeleton Selection')
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(259)
checkpoint Graphics_1 Element sensitized
checkpoint selection info updated
checkpoint Graphics_1 Element sensitized
checkpoint selection info updated
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(717)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2').set_position(259)
findWidget('OOF2 Graphics 1:Pane0:Pane1').set_position(717)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 5.6304347826087e-01,y=-6.2826086956522e-01,state=0,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 5.6304347826087e-01,y=-6.2826086956522e-01,state=256,window=findCanvasGdkWindow('Graphics_1')))
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint Graphics_1 Element sensitized
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint Graphics_1 Element sensitized
checkpoint skeleton selection page groups sensitized
checkpoint selection info updated
checkpoint Graphics_1 Element sensitized
checkpoint selection info updated
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.Graphics_1.Toolbox.Select_Element.Single_Element
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Select_Element_Pixels
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Clear').clicked()
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Clear
assert tests.pixelSelectionPageStatusCheck(0, 10000)
assert tests.pixelSelectionSizeCheck('microstructure', 0)
assert tests.pixelSelectionSizeCheck('composition.png', 1428)
findWidget('OOF2 Graphics 1').resize(800, 400)
setComboBox(findWidget('OOF2:Pixel Selection Page:Microstructure'), 'composition.png')
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Clear').clicked()
checkpoint pixel page sensitized
checkpoint pixel page updated
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Clear
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
assert tests.pixelSelectionPageStatusCheck(0, 10000)
assert tests.pixelSelectionSizeCheck('composition.png', 0)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Select_Element_Pixels
assert tests.pixelSelectionPageStatusCheck(625, 10000)
assert tests.pixelSelectionSizeCheck('composition.png', 625)
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:TBScroll:Skeleton Selection:Element:Clear').clicked()
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page groups sensitized
checkpoint selection info updated
checkpoint Graphics_1 Element sensitized
checkpoint Graphics_1 Element sensitized
checkpoint selection info updated
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.Graphics_1.Toolbox.Select_Element.Clear
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:TBScroll:Skeleton Selection:Select:Element').clicked()
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:TBScroll:Skeleton Selection:Select:Segment').clicked()
checkpoint Graphics_1 Element sensitized
checkpoint selection info updated
checkpoint Graphics_1 Element sensitized
checkpoint Graphics_1 Segment sensitized
checkpoint selection info updated
checkpoint selection info updated
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 2.5000000000000e-01,y=-3.7173913043478e-01,state=1,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 2.5000000000000e-01,y=-3.7173913043478e-01,state=257,window=findCanvasGdkWindow('Graphics_1')))
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint Graphics_1 Segment sensitized
checkpoint selection info updated
checkpoint Graphics_1 Segment sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.Graphics_1.Toolbox.Select_Segment.Single_Segment
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_PRESS,x= 3.0652173913043e-01,y=-2.5000000000000e-01,state=1,window=findCanvasGdkWindow('Graphics_1')))
canvasobj = findCanvasRoot(findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:Canvas'), windowname='Graphics_1')
canvasobj.emit('event', event(gtk.gdk.BUTTON_RELEASE,x= 3.0652173913043e-01,y=-2.5000000000000e-01,state=257,window=findCanvasGdkWindow('Graphics_1')))
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint Graphics_1 Segment sensitized
checkpoint Graphics_1 Segment sensitized
checkpoint selection info updated
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.Graphics_1.Toolbox.Select_Segment.Single_Segment
setComboBox(findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Method:Chooser'), 'Select Segment Pixels')
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Select_Segment_Pixels
assert tests.pixelSelectionSizeCheck('composition.png', 675)
assert tests.pixelSelectionPageStatusCheck(675, 10000)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:Clear').clicked()
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Clear
assert tests.pixelSelectionSizeCheck('composition.png', 0)
assert tests.pixelSelectionPageStatusCheck(0, 10000)
findWidget('OOF2:Pixel Selection Page:Pane:SelectionModification:OK').clicked()
findWidget('OOF2:Pixel Selection Page:Pane').set_position(324)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint microstructure page sensitized
checkpoint pixel page updated
checkpoint pixel page sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.PixelSelection.Select_Segment_Pixels
assert tests.pixelSelectionSizeCheck('composition.png', 50)
assert tests.pixelSelectionPageStatusCheck(50, 10000)
findWidget('OOF2 Graphics 1:Pane0:Pane1:Pane2:TBScroll:Skeleton Selection:Segment:Clear').clicked()
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
checkpoint selection info updated
checkpoint Graphics_1 Segment sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint OOF.Graphics_1.Toolbox.Select_Segment.Clear
setComboBox(findWidget('OOF2:Navigation:PageMenu'), 'Microstructure')
checkpoint page installed Microstructure
findWidget('OOF2:Microstructure Page:Pane').set_position(212)
findWidget('OOF2:Microstructure Page:Delete').clicked()
checkpoint toplevel widget mapped Questioner
findWidget('Questioner').resize(209, 94)
findWidget('Questioner:gtk-yes').clicked()
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint Graphics_1 Pixel Info updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint selection info updated
checkpoint selection info updated
checkpoint Graphics_1 Element sensitized
checkpoint Graphics_1 Segment sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint microstructure page sensitized
checkpoint Graphics_1 Pixel Info updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint selection info updated
checkpoint selection info updated
checkpoint Graphics_1 Element sensitized
checkpoint Graphics_1 Segment sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint Graphics_1 Pixel Info updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint selection info updated
checkpoint selection info updated
checkpoint Graphics_1 Element sensitized
checkpoint Graphics_1 Segment sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint Graphics_1 Pixel Info updated
checkpoint selection info updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint selection info updated
checkpoint Graphics_1 Element sensitized
checkpoint Graphics_1 Segment sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint Materials page updated
checkpoint boundary page updated
checkpoint skeleton selection page grouplist
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
checkpoint skeleton selection page updated
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Microstructure.Delete
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2:Microstructure Page:Pane').set_position(212)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2:Microstructure Page:Delete').clicked()
checkpoint toplevel widget mapped Questioner
findWidget('Questioner').resize(209, 94)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2:Microstructure Page:Pane').set_position(212)
findWidget('Questioner:gtk-yes').clicked()
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2:Microstructure Page:Pane').set_position(212)
findWidget('OOF2 Graphics 1:Pane0').set_position(278)
findWidget('OOF2:Microstructure Page:Pane').set_position(162)
checkpoint Graphics_1 Pixel Info updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint selection info updated
checkpoint selection info updated
checkpoint Graphics_1 Element sensitized
checkpoint Graphics_1 Segment sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint microstructure page sensitized
checkpoint Graphics_1 Pixel Info updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint selection info updated
checkpoint selection info updated
checkpoint Graphics_1 Element sensitized
checkpoint Graphics_1 Segment sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint Graphics_1 Pixel Info updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint selection info updated
checkpoint selection info updated
checkpoint Graphics_1 Element sensitized
checkpoint Graphics_1 Segment sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint skeleton page sensitized
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox writable changed
checkpoint Move Node toolbox info updated
checkpoint Graphics_1 Move Nodes sensitized
checkpoint Graphics_1 Pixel Info updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint selection info updated
checkpoint selection info updated
checkpoint Graphics_1 Element sensitized
checkpoint Graphics_1 Segment sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint mesh bdy page updated
checkpoint Field page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page grouplist
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page updated
checkpoint Solver page sensitized
checkpoint skeleton selection page groups sensitized
checkpoint Field page sensitized
checkpoint pixel page sensitized
checkpoint Solver page sensitized
checkpoint Graphics_1 Pixel Info updated
checkpoint selection info updated
checkpoint selection info updated
checkpoint Graphics_1 Pixel Selection sensitized
checkpoint selection info updated
checkpoint selection info updated
checkpoint Graphics_1 Element sensitized
checkpoint Graphics_1 Segment sensitized
checkpoint contourmap info updated for Graphics_1
checkpoint pixel page updated
checkpoint active area status updated
checkpoint pixel page sensitized
checkpoint mesh bdy page updated
checkpoint Field page sensitized
checkpoint Materials page updated
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint mesh page subproblems sensitized
checkpoint mesh page sensitized
checkpoint boundary page updated
checkpoint skeleton selection page grouplist
checkpoint Solver page sensitized
checkpoint skeleton selection page updated
checkpoint skeleton selection page selection sensitized
checkpoint skeleton selection page groups sensitized
# checkpoint interface page updated
checkpoint meshable button set
checkpoint meshable button set
checkpoint microstructure page sensitized
checkpoint microstructure page sensitized
checkpoint Field page sensitized
checkpoint Solver page sensitized
checkpoint OOF.Microstructure.Delete
findMenu(findWidget('OOF2:MenuBar'), 'File:Save:Python_Log').activate()
checkpoint toplevel widget mapped Dialog-Python_Log
findWidget('Dialog-Python_Log').resize(194, 72)
findWidget('Dialog-Python_Log:filename').set_text('p')
findWidget('Dialog-Python_Log:filename').set_text('pi')
findWidget('Dialog-Python_Log:filename').set_text('pix')
findWidget('Dialog-Python_Log:filename').set_text('pixs')
findWidget('Dialog-Python_Log:filename').set_text('pixse')
findWidget('Dialog-Python_Log:filename').set_text('pixsel')
findWidget('Dialog-Python_Log:filename').set_text('pixsel.')
findWidget('Dialog-Python_Log:filename').set_text('pixsel.l')
findWidget('Dialog-Python_Log:filename').set_text('pixsel.lo')
findWidget('Dialog-Python_Log:filename').set_text('pixsel.log')
findWidget('Dialog-Python_Log:gtk-ok').clicked()
checkpoint OOF.File.Save.Python_Log
assert tests.filediff('pixsel.log')
findMenu(findWidget('OOF2:MenuBar'), 'File:Quit').activate()
checkpoint OOF.Graphics_1.File.Close
| [
"[email protected]"
] | |
8fcd5967724a920ecab4cddcb4f9f250368c769d | cff311c1c13ebcca75aedc26256d90dd9b1b6784 | /CMSSW_tools/scripts/modules/SaveObjectsToFile.py | 52ce8dc5302147e1b1ba792aac933576771e589e | [] | no_license | soarnsoar/Gen_validation | a892f021716fc47254b24f27ee6a2b46fa206601 | 0c0c043a32ce282fb9fdd32d30a421c81c070072 | refs/heads/master | 2021-08-20T08:19:58.389059 | 2020-05-20T08:09:48 | 2020-05-20T08:09:48 | 184,736,378 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 370 | py | '''
void SaveObjectToFile(TString filepath, TObject *a){
TFile outputFile (filepath,"RECREATE");
a->Write();
outputFile.Write();
outputFile.Close();
}
'''
import ROOT
def SaveObjectsToFile(filepath, object_list):
outputFile=ROOT.TFile(filepath,'RECREATE')
for obj in object_list:
obj.Write()
outputFile.Write()
outputFile.Close()
| [
"[email protected]"
] | |
feca063f029be88bf3b2f12f54f620e47bbaea08 | 7d1e23982439f530f3c615f3ac94f59861bc2325 | /text_data/gnosis_door_dict.py | 021724ed49eb93f70af7b699eb0aaa0561827792 | [] | no_license | daskorod/RPG_project | 3104afafd19038f1c9da0d9aca6f489f9629093b | 3b42a7f3131830d3b728f5d65332750fa032ec03 | refs/heads/master | 2020-05-24T04:24:33.198371 | 2017-11-30T19:29:48 | 2017-11-30T19:29:48 | 84,820,405 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,923 | py | text=[{0: ('Дверь заперта. Вы стучитесь и слышите оттуда голос: "Кто ты такой? Это элитный дом для элитных людей. Здесь не место всякому отребью".', 'next', '1. Я добрый рыцарь церкви, именем Господа, впустите меня. 2. Я простой человек, прощу прощения (уйти).'), 10: ('Ага, знаем мы вашего Бога. Слуга злого Демиурга ты, а не Бога. Проваливай отсюда, соматик грёбаный.', 'end', 'Диалог завершён. (Можете идти).', ('go', 0)), 20: ('Ну и проваливай отсюда, простой человек.', 'end', 'Диалог завершён. (Можете идти).', ('go', 0))}, {0: ('Дверь заперта. Вы стучитесь и слышите оттуда голос: "Кто ты такой? Это элитный дом для элитных людей. Здесь не место всякому отребью".', 'passage', 'Нажмите E', ('go', 2))}, {0: ('Вы вспоминаете книгу, которую прочитали. Вы чувствуете, как обладаете каким-то сокровенным знанием и чувствуете себя достаточно элитным и особенным, чтобы войти в это общество. Меж тем голос за дверью повторяет: "Ну так кто ты такой?"', 'next', '1. Я пневматик. 2. Я простой человек, прощу прощения (уйти).'), 10: ('Заходи!', 'open', 'Нажмите E'), 20: ('Ну и проваливай отсюда, простой человек.', 'end', 'Диалог завершён. (Можете идти).', ('go', 0))}] | [
"[email protected]"
] | |
01b469ea944a60bb9607500e6a98fe4390599d52 | df30f97d316e899b07b223bc86cfe53345627f06 | /problems/test9/2.py | 9b5d2ab1827366945381d2f24f0e904c29a9ec9b | [] | no_license | GH-Lim/AlgorithmPractice | c6a3aa99fa639aa23d685ae14c1754e0605eaa98 | e7b8de2075348fb9fcc34c1d7f211fdea3a4deb0 | refs/heads/master | 2021-06-18T17:21:10.923380 | 2021-04-18T03:43:26 | 2021-04-18T03:43:26 | 199,591,747 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,373 | py | from heapq import heappop, heappush
def calc_day(month, day):
res = 0
for i in range(month):
if i in {1, 3, 5, 7, 8, 10, 12}:
res += 31
elif i in {4, 6, 9, 11}:
res += 30
else:
res += 28
return res + day
def solution(n, customers):
times = []
rest = []
work = []
cnt = [0] * (n + 1)
for i in range(1, n + 1):
heappush(rest, (0, i))
for customer in customers:
date, time, minute = customer.split()
MM, DD = map(int, date.split('/'))
hh, mm, ss = map(int, time.split(':'))
d = calc_day(MM, DD)
arrival = d * 24 * 3600 + hh * 3600 + mm * 60 + ss
if times and times[-1][0] > arrival:
arrival += 365 * 24 * 3600
times.append((arrival, int(minute) * 60))
for arr_time in times:
t, m = arr_time
while work:
fin_time, num = heappop(work)
if t >= fin_time:
heappush(rest, (fin_time, num))
else:
heappush(work, (fin_time, num))
break
if rest:
_, num = heappop(rest)
cnt[num] += 1
heappush(work, (t + m, num))
else:
fin_time, num = heappop(work)
cnt[num] += 1
heappush(work, (fin_time + m, num))
return max(cnt) | [
"[email protected]"
] | |
6b5ab4b2754b36920528896f2aaa8f093602b0f2 | e3376c04ecca6eaf0186f8a38eef245e03ddbe92 | /Modulo1/04_Cuarta_semana.py/Funciones de Orden Superior/Funcion_orden_superior_02_Ejemplo.py | 1539a70d7c21e30a3055dd4840705c6279583b55 | [] | no_license | lamorenove/Ejercicios-Python | d0b31810ba5d0e6d4ab922b5e325ed76cc57e324 | ee26d1dd30db8c7e088a668e11dc635b779d933a | refs/heads/master | 2023-06-24T05:46:33.383620 | 2021-07-22T17:20:23 | 2021-07-22T17:20:23 | 385,313,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 698 | py | def crear_funcion(operador):
if operador == "-":
def resta(valor1=0, valor2=0):
return valor1 - valor2
return resta
elif operador == "*":
def multiplicacion(valor1=0, valor2=0):
return valor1 * valor2
return multiplicacion
elif operador == "/":
def division(valor1=0, valor2=0):
return valor1 / valor2
return division
funcion_resta = crear_funcion("-")
resultado = operacion(funcion_resta, 30, 10)
funcion_multiplicacion = crear_funcion("*")
resultado = operacion(funcion_multiplicacion, 30, 10)
funcion_division = crear_funcion("/")
resultado = operacion(funcion_division, 30, 10)
print(resultado)
| [
"[email protected]"
] | |
d8a88aace4b211c001c0a664802ffaaf72121697 | dcd840c1ef56db1cd4aa1ca170ab374d3a4c10b6 | /src/programy/clients/args.py | afa29cceefbe78854d412a5590ff0ba529b67c39 | [
"MIT"
] | permissive | zippyy/program-y | e1c9ce3be6cbbba2853842999c2277d574755eb3 | 9267a3dfcbb10ea109b187dbb3767d61ca4da841 | refs/heads/master | 2020-04-02T17:52:34.382681 | 2018-10-24T16:28:50 | 2018-10-24T16:28:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,264 | py | """
Copyright (c) 2016-2018 Keith Sterling http://www.keithsterling.com
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated
documentation files (the "Software"), to deal in the Software without restriction, including without limitation
the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software,
and to permit persons to whom the Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the
Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO
THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""
import logging
import argparse
class ClientArguments(object):
def __init__(self, client, parser=None):
self._bot_root = "."
self._logging = logging.DEBUG
self._config_name = "config.yaml"
self._config_format = "yaml"
self._no_loop = False
def parse_args(self, client):
pass
@property
def bot_root(self):
return self._bot_root
@bot_root.setter
def bot_root(self, root):
self._bot_root = root
@property
def logging(self):
return self._logging
@property
def config_filename(self):
return self._config_name
@property
def config_format(self):
return self._config_format
@property
def noloop(self):
return self._no_loop
class CommandLineClientArguments(ClientArguments):
def __init__(self, client, parser=None):
self.args = None
self._bot_root = None
self._logging = None
self._config_name = None
self._config_format = None
self._no_loop = False
ClientArguments.__init__(self, client)
if parser is None:
self.parser = argparse.ArgumentParser()
else:
self.parser = parser
self.parser.add_argument('--bot_root', dest='bot_root', help='root folder for all bot configuration data')
self.parser.add_argument('--config', dest='config', help='configuration file location')
self.parser.add_argument('--cformat', dest='cformat', help='configuration file format (yaml|json|ini)')
self.parser.add_argument('--logging', dest='logging', help='logging configuration file')
self.parser.add_argument('--noloop', dest='noloop', action='store_true', help='do not enter conversation loop')
client.add_client_arguments(self.parser)
def parse_args(self, client):
self.args = self.parser.parse_args()
self._bot_root = self.args.bot_root
self._logging = self.args.logging
self._config_name = self.args.config
self._config_format = self.args.cformat
self._no_loop = self.args.noloop
client.parse_args(self, self.args)
| [
"[email protected]"
] | |
ac88203f03fb256732a6ddee77bdc257f143d26a | a0d2a90c21ff3e05e0fd939698a6dfb7e54d16d9 | /GServer/mac_cmd/debuger.py | b571a240ddf3f8d694124c7a0afd34b03cc65be0 | [
"MIT"
] | permissive | soybean217/lora-python | 4a72407607d2201a91b5e0a7dcd115d7788b7e65 | 9c4324f81bae8b20f6c353447189f724a5cf54c6 | refs/heads/master | 2022-12-13T08:24:13.267783 | 2017-12-06T08:20:40 | 2017-12-06T08:20:40 | 102,331,484 | 0 | 0 | MIT | 2022-12-07T23:56:50 | 2017-09-04T07:24:49 | Python | UTF-8 | Python | false | false | 718 | py | from time import ctime,time
def timeStumpFunc(args):
def get_function(function):
def wrappedFunc(*nkw):
time_start = time()*1000
result = function(*nkw)
time_casted = time()*1000 - time_start
print('Function', args, 'cast %f ms' % time_casted)
return result
return wrappedFunc
return get_function
def debuger(args):
def get_function(function):
def wrapped_function(*nkw):
print(args, 'begin!')
# print('input type:',type(*nkw),'len:',len(*nkw))
result = function(*nkw)
print(args, 'done!')
return result
return wrapped_function
return get_function
| [
"[email protected]"
] | |
fb06704c09d4561b67183d828d13a1595f5c9985 | d2eb7bd335175edd844a3e6c1c633ee0dc2dbb25 | /contests_atcoder/abc176/abc176_d.py | 5d96807a125631d40ae1146b10f7bee768af4a30 | [
"BSD-2-Clause"
] | permissive | stdiorion/competitive-programming | 5020a12b85f1e691ceb0cacd021606a9dc58b72c | e7cf8ef923ccefad39a1727ca94c610d650fcb76 | refs/heads/main | 2023-03-27T01:13:42.691586 | 2021-03-08T08:05:53 | 2021-03-08T08:05:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,808 | py | from itertools import accumulate,chain,combinations,groupby,permutations,product
from collections import deque,Counter
from bisect import bisect_left,bisect_right
from math import gcd,sqrt,sin,cos,tan,degrees,radians
from fractions import Fraction
from decimal import Decimal
import sys
input = lambda: sys.stdin.readline().rstrip()
#from sys import setrecursionlimit
#setrecursionlimit(10**7)
MOD=10**9+7
INF=float('inf')
h, w = map(int, input().split())
ch, cw = map(lambda x: int(x) - 1, input().split())
dh, dw = map(lambda x: int(x) - 1, input().split())
field = [list(input()) for _ in range(h)]
warp_needed = [[INF] * w for _ in range(h)]
d = deque()
d.append((ch, cw))
warp_needed[ch][cw] = 0
d_afterwarp = deque()
def walk_from(p):
return [(p[0] - 1, p[1]), (p[0], p[1] - 1), (p[0] + 1, p[1]), (p[0], p[1] + 1)]
def warp_from(p):
ret = []
for i in range(-2, 3):
for j in range(-2, 3):
if abs(i) + abs(j) > 1:
ret.append((p[0] + i, p[1] + j))
return ret
warp_count = 0
while True:
if d:
now = d.popleft()
for dst in walk_from(now):
if 0 <= dst[0] < h and 0 <= dst[1] < w and field[dst[0]][dst[1]] != "#" and warp_needed[dst[0]][dst[1]] > warp_count:
warp_needed[dst[0]][dst[1]] = warp_count
d.append(dst)
for dst in warp_from(now):
if 0 <= dst[0] < h and 0 <= dst[1] < w and field[dst[0]][dst[1]] != "#" and warp_needed[dst[0]][dst[1]] > warp_count + 1:
warp_needed[dst[0]][dst[1]] = warp_count + 1
d_afterwarp.append(dst)
elif d_afterwarp:
d = d_afterwarp
d_afterwarp = deque()
warp_count += 1
else:
break
print(warp_needed[dh][dw] if warp_needed[dh][dw] != INF else -1) | [
"[email protected]"
] | |
e2ecca12b421baaa62443f60509bb4cab2d71d1c | df3853b41ed05d86f5bcd992fcc265f637c67784 | /1sem/Lab7/Lab7_Task6.py | d6c64f14b12411e13c504f482958f2a1aeaf6f10 | [] | no_license | KseniaMIPT/Adamasta | 6ab0121519581dbbbf6ae788d1da85f545f718d1 | e91c34c80834c3f4bf176bc4bf6bf790f9f72ca3 | refs/heads/master | 2021-01-10T16:48:31.141709 | 2016-11-23T21:02:25 | 2016-11-23T21:02:25 | 43,350,507 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 380 | py | import matplotlib.pyplot as plt
f = open('input.txt', 'r')
file_lines = f.readlines()
words_list = []
for i in range(len(file_lines)):
words_list += file_lines[i].split()
words_len = [len(word) for word in words_list]
number_of_words_with_length = [words_len.count(length) for length in range(min(words_len), max(words_len))]
plt.plot(number_of_words_with_length, )
plt.show() | [
"[email protected]"
] | |
7f2dcbafba43ae2baa347247eac3a5cde1f0b8f6 | 3431ace8cae7b804f977a631f231dd1a4cb4200e | /1 first draft/sudoku 4.py | 34e595e3afa22622880ee995df0aee86bdb256e5 | [] | no_license | rayyan-khan/7-sudoku | 6f9f0dbf735dc6c8c956db453efae956e9d8fd23 | e1407e5caa78fb215e6a3818da9b9c448f69f2ea | refs/heads/master | 2022-02-22T10:35:41.806166 | 2018-12-20T14:38:00 | 2018-12-20T14:38:00 | 161,864,475 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,752 | py | import sys, time
# need to fix bug
# input:
INPUT = open(sys.argv[1], 'r') if len(sys.argv) == 2 else open('puzzles.txt', 'r')
# set up global variables:
INP = '.'*81
def setGlobals(pzl):
global PZLSIZE, CSTRSIZE, SUBHEIGHT, SUBWIDTH, SYMSET, ROWCSTR, COLCSTR, SUBCSTR, CSTRS, NBRS
pzl = ''.join([n for n in pzl if n != '.'])
PZLSIZE = len(INP)
CSTRSIZE = int(len(INP) ** .5)
SUBHEIGHT, SUBWIDTH = int(CSTRSIZE ** .5), int(CSTRSIZE ** .5) \
if int(CSTRSIZE ** .5 // 1) == int(CSTRSIZE ** .5) \
else (int(CSTRSIZE ** .5 // 1), int(CSTRSIZE ** .5 // 1 + 1))
SYMSET = {n for n in pzl} - {'.'}
if len(SYMSET) != CSTRSIZE:
otherSyms = [n for n in '123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ0']
while len(SYMSET) < CSTRSIZE:
SYMSET.add(otherSyms.pop(0))
ROWCSTR = [{index for index in range(row*CSTRSIZE, (row + 1)*CSTRSIZE)}
for row in range(CSTRSIZE)]
COLCSTR = [{index for index in range(col, col + PZLSIZE - SUBWIDTH*SUBHEIGHT + 1, SUBWIDTH*SUBHEIGHT)}
for col in range(CSTRSIZE)]
SUBCSTR = [{boxRow + boxColOffset + subRow * CSTRSIZE + subCol
for subRow in range(SUBHEIGHT) for subCol in range(SUBWIDTH)}
for boxRow in range(0, PZLSIZE, SUBHEIGHT * CSTRSIZE) for boxColOffset in range(0, CSTRSIZE, SUBWIDTH)]
CSTRS = ROWCSTR + COLCSTR + SUBCSTR
NBRS = [set().union(*[cset for cset in CSTRS if n in cset]) - {n} for n in range(PZLSIZE)]
setGlobals(INP)
# helper methods
def printPzl(pzl):
cstrsize = int(len(pzl) ** .5)
subheight, subwidth = int(cstrsize ** .5), int(cstrsize ** .5) \
if int(cstrsize ** .5 // 1) == int(cstrsize ** .5) \
else (int(cstrsize ** .5 // 1), int(cstrsize ** .5 // 1 + 1))
rowLen = subwidth*(int(cstrsize/subheight))
for row in range(cstrsize):
print(' '.join(pzl[rowLen*row: rowLen*(row + 1)]))
def checkSum(pzl):
return sum(ord(n) for n in pzl) - PZLSIZE*ord('0')
def getBestPos(pzl):
bestPos = 0 # positions that fewest symbols can go into
mostNbrs = 0
for index in range(PZLSIZE):
if pzl[index] != '.':
continue
nbrSet = set()
for nbrInd in NBRS[index]:
if pzl[nbrInd] != '.':
nbrSet.add(pzl[nbrInd])
if len(nbrSet) > mostNbrs:
mostNbrs = len(nbrSet)
bestPos = index
return bestPos
def getBestSyms(pzl):
bestSyms = set()
mostPlaced = 0
for sym in SYMSET:
placed = pzl.count(sym)
if placed > mostPlaced:
mostPlaced = placed
bestSyms = set()
if placed == mostPlaced:
bestSyms.add(sym)
return bestSyms
# solve
def solve(pzl):
if pzl.find('.') == -1:
return pzl
bestPos = getBestPos(pzl)
for sym in SYMSET - {pzl[n] for n in NBRS[bestPos]}:
#setAllPosForSym = {index for index in range(PZLSIZE) if pzl[index] != sym}
# cs in CSTRS:
# setAllPosForSymInCS = cs & setAllPosForSym
#for pos in setAllPosForSymInCS:
pzlMove = pzl[:bestPos] + sym + pzl[bestPos + 1:]
newPzl = solve(pzlMove)
if newPzl:
return newPzl
return ''
# run
time51 = time.clock()
for line in enumerate(INPUT.readlines()):
start = time.clock()
pzlNum, INP = line
if pzlNum == 50:
print('Time for 51: {}'.format(time.clock() - time51))
INP = INP.strip()
setGlobals(INP)
solution = solve(INP)
print('{}: Time: {} Sum: {} \n'.format(pzlNum + 1, round(time.clock() - start, 3), checkSum(solution)), end='')
if solution == '':
print('No solution -- i.e. theres a bug here')
#else:
# printPzl(solution)
| [
"[email protected]"
] | |
4a0127fc8f2d7ed62d6915a40c23a4b28235f5be | 164ffe077dde59373ad9fadcfd727f279a1cfe93 | /jni_build/jni/include/external/bazel_tools/tools/build_defs/docker/testenv.py | a6f4189e96ac6ae0798d809c9dd2acddebf4e171 | [] | no_license | Basofe/Community_Based_Repository_Traffic_Signs | 524a4cfc77dc6ed3b279556e4201ba63ee8cf6bd | a20da440a21ed5160baae4d283c5880b8ba8e83c | refs/heads/master | 2021-01-22T21:17:37.392145 | 2017-09-28T21:35:58 | 2017-09-28T21:35:58 | 85,407,197 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 773 | py | # Copyright 2015 The Bazel Authors. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Path to the test data."""
import os
import os.path
TESTDATA_PATH = os.path.join(
os.getcwd(),
"tools/build_defs/docker/testdata",
)
| [
"[email protected]"
] | |
4b32b7205890b2163ed6f8361383352233344849 | b140b104b6de0c8a924db008a48d9798e046919e | /byte/module_using_sys.py | 0098ec94e831916455510b9078702d9bbc2d84fb | [] | no_license | saibi/python | ad206fbfe752198492c939578607f1c31223d3c3 | fd94a623241c28dffe60350496a5c858c6f912e8 | refs/heads/main | 2023-09-01T08:20:33.379923 | 2023-08-31T01:32:17 | 2023-08-31T01:32:17 | 74,268,329 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 181 | py | #!/usr/bin/env python
# -*- coding: utf8 -*-
import sys
print('the command line arguments are:')
for i in sys.argv:
print(i)
print('\n\nThe PYTHONPATH is', sys.path, '\n')
| [
"[email protected]"
] | |
e99e71df9008cda3e264baeefafc564222bbb05f | e92bcb90e3bfb4b6076cd9deffc3e67c9770f122 | /spk/haproxy/src/app/application/auth.py | 7cedb692302bd19331b9d202589ec62b2f837eea | [
"BSD-3-Clause"
] | permissive | SynoCommunity/spksrc | e708e77af58b95259e802229b19495ad011b536b | b0e665b8dcc6c3eedd7814cc8e3d957842b9c01d | refs/heads/master | 2023-09-03T13:19:35.212489 | 2023-09-02T11:17:53 | 2023-09-02T11:17:53 | 2,565,137 | 2,602 | 1,388 | NOASSERTION | 2023-09-14T21:56:02 | 2011-10-12T20:25:50 | Makefile | UTF-8 | Python | false | false | 2,088 | py | # -*- coding: utf-8 -*-
from collections import namedtuple
from flask import abort, request
from functools import wraps, partial
from subprocess import check_output
import grp
import os
import pwd
__all__ = ['authenticate', 'requires_auth']
def authenticate():
"""Authenticate a user using Synology's authenticate.cgi
If the user is authenticated, returns a nametuple with the
username and its groups, if not returns None. For example::
>>> authenticate()
User(name='admin', groups=['administrators'])
:rtype: namedtuple or None
"""
User = namedtuple('User', ['name', 'groups'])
with open(os.devnull, 'w') as devnull:
user = check_output(['/usr/syno/synoman/webman/modules/authenticate.cgi'], stderr=devnull).strip()
if not user:
return None
groups = [g.gr_name for g in grp.getgrall() if user in g.gr_mem]
groups.append(grp.getgrgid(pwd.getpwnam(user).pw_gid).gr_name)
return User(user, set(groups))
def requires_auth(f=None, groups=None, users=None):
"""Require a user to be authenticated. If he is not, this aborts
on 403.
The condition to be authorized is for the user to be authenticated
and in one of the listed groups (if any) or one of the listed users
(if any)
:param function f: the decorated function
:param list groups: groups whitelist
:param list users: users whitelist
"""
if f is None:
return partial(requires_auth, groups=groups, users=users)
@wraps(f)
def decorated(*args, **kwargs):
user = authenticate()
if user is None: # Not authenticated
abort(403)
# A user is authorized if he is in the groups whitelist or the users whitelist
authorized = False
if groups is not None and len(set(groups) & user.groups) > 0: # Authorized group
authorized = True
if users is not None and user.name in users: # Authorized user
authorized = True
if not authorized:
abort(403)
return f(*args, **kwargs)
return decorated
| [
"[email protected]"
] | |
e3530234a6047e169a09f7a802ba4ee2672cb2e6 | 3705110f5d8fc536b9d6fb8473482babac491dd7 | /build/src/django-doc-wiki-0.2.0BETA/doc_wiki/models.py | 68d89f5bc1fb6fec0dd13ae3fc483e6e6fdf2d0d | [
"Apache-2.0"
] | permissive | taylanpince/wiki | c726933258142b19b226c066f755bbcdb9196498 | 227abb4991a071494394e2bbae25775e4baa6d1d | refs/heads/master | 2020-06-07T10:53:13.479818 | 2012-02-28T16:56:26 | 2012-02-28T16:56:26 | 479,720 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,576 | py | from django.core.cache import cache
from django.db import models
from django.utils.safestring import mark_safe
from django.utils.translation import ugettext_lazy as _
from doc_wiki import settings
from doc_wiki.managers import WikiPageManager
from doc_wiki.parsers import parse_markdown
class WikiPage(models.Model):
"""
A wiki page based on a document in the file system
"""
slug = models.SlugField(_("Slug"), max_length=255)
path = models.FilePathField(_("Path"), path=settings.DIRECTORY_PATH, recursive=False, max_length=255)
content = models.TextField(_("Content"), blank=True)
timestamp = models.DateTimeField(_("Time Stamp"), auto_now=True)
admin_objects = models.Manager()
objects = WikiPageManager()
class Meta:
verbose_name = _("Wiki Page")
verbose_name_plural = _("Wiki Pages")
def __unicode__(self):
return u"Wiki Page: %s" % self.slug
@models.permalink
def get_absolute_url(self):
return ("doc_wiki_page", (), {
"slug": self.slug,
})
@property
def content_html(self):
"""
Parses the content field using markdown and pygments, caches the results
"""
key = "wiki_pages_content_%d" % self.pk
html = cache.get(key)
if not html:
html = parse_markdown(self.content)
cache.set(key, html, 60 * 60 * 24 * 30)
return mark_safe(html)
def save(self):
if self.pk:
cache.delete("wiki_pages_content_%d" % self.pk)
super(WikiPage, self).save()
| [
"[email protected]"
] | |
c051681c4d71382457478f2977678850900a2d9d | d2f50124ff3bec70b9b3139ecb063b06e526781d | /biable/migrations/0061_cliente_competencia.py | 1df9f28fe4cf97ab807d908f104f46d2fed7fb1d | [] | no_license | odecsarrollo/odecopack-componentes | e8d993f089bf53bbf3c53d1265e70ac5c06b59b8 | b583a115fb30205d358d97644c38d66636b573ff | refs/heads/master | 2022-12-12T00:33:02.874268 | 2020-08-13T18:45:01 | 2020-08-13T18:45:01 | 189,262,705 | 0 | 0 | null | 2022-12-08T11:23:46 | 2019-05-29T16:37:21 | Python | UTF-8 | Python | false | false | 457 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-07 21:43
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('biable', '0060_auto_20170207_1638'),
]
operations = [
migrations.AddField(
model_name='cliente',
name='competencia',
field=models.BooleanField(default=False),
),
]
| [
"[email protected]"
] | |
91b6d2e72d8bf21fdc5272aba6d8db8abc9be7f4 | 4085874e861c40f94b0ba82fdbff273762c26eb2 | /test/functional/test_runner.py | 7c6da04298b14a5ed5aca971d5401d888bda0870 | [
"MIT"
] | permissive | zortcoin/zortcoin | 82632c9ae46d57ef77ea7e79ab000f2e002dfaf3 | 379a1d01cc40c5c8ea50bdc41eded0dfbd1724f1 | refs/heads/master | 2022-07-27T20:33:34.543723 | 2021-08-25T16:25:17 | 2021-08-25T16:25:17 | 380,851,757 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 30,238 | py | #!/usr/bin/env python3
# Copyright (c) 2014-2020 The Zortcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Run regression test suite.
This module calls down into individual test cases via subprocess. It will
forward all unrecognized arguments onto the individual test scripts.
For a description of arguments recognized by test scripts, see
`test/functional/test_framework/test_framework.py:ZortcoinTestFramework.main`.
"""
import argparse
from collections import deque
import configparser
import datetime
import os
import time
import shutil
import subprocess
import sys
import tempfile
import re
import logging
import unittest
# Formatting. Default colors to empty strings.
BOLD, GREEN, RED, GREY = ("", ""), ("", ""), ("", ""), ("", "")
try:
# Make sure python thinks it can write unicode to its stdout
"\u2713".encode("utf_8").decode(sys.stdout.encoding)
TICK = "✓ "
CROSS = "✖ "
CIRCLE = "○ "
except UnicodeDecodeError:
TICK = "P "
CROSS = "x "
CIRCLE = "o "
if os.name != 'nt' or sys.getwindowsversion() >= (10, 0, 14393):
if os.name == 'nt':
import ctypes
kernel32 = ctypes.windll.kernel32 # type: ignore
ENABLE_VIRTUAL_TERMINAL_PROCESSING = 4
STD_OUTPUT_HANDLE = -11
STD_ERROR_HANDLE = -12
# Enable ascii color control to stdout
stdout = kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
stdout_mode = ctypes.c_int32()
kernel32.GetConsoleMode(stdout, ctypes.byref(stdout_mode))
kernel32.SetConsoleMode(stdout, stdout_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
# Enable ascii color control to stderr
stderr = kernel32.GetStdHandle(STD_ERROR_HANDLE)
stderr_mode = ctypes.c_int32()
kernel32.GetConsoleMode(stderr, ctypes.byref(stderr_mode))
kernel32.SetConsoleMode(stderr, stderr_mode.value | ENABLE_VIRTUAL_TERMINAL_PROCESSING)
# primitive formatting on supported
# terminal via ANSI escape sequences:
BOLD = ('\033[0m', '\033[1m')
GREEN = ('\033[0m', '\033[0;32m')
RED = ('\033[0m', '\033[0;31m')
GREY = ('\033[0m', '\033[1;30m')
TEST_EXIT_PASSED = 0
TEST_EXIT_SKIPPED = 77
TEST_FRAMEWORK_MODULES = [
"address",
"blocktools",
"muhash",
"key",
"script",
"segwit_addr",
"util",
]
EXTENDED_SCRIPTS = [
# These tests are not run by default.
# Longest test should go first, to favor running tests in parallel
'feature_pruning.py',
'feature_dbcrash.py',
]
BASE_SCRIPTS = [
# Scripts that are run by default.
# Longest test should go first, to favor running tests in parallel
'wallet_hd.py',
'wallet_hd.py --descriptors',
'wallet_backup.py',
'wallet_backup.py --descriptors',
# vv Tests less than 5m vv
'mining_getblocktemplate_longpoll.py',
'feature_maxuploadtarget.py',
'feature_block.py',
'rpc_fundrawtransaction.py',
'rpc_fundrawtransaction.py --descriptors',
'p2p_compactblocks.py',
'feature_segwit.py --legacy-wallet',
# vv Tests less than 2m vv
'wallet_basic.py',
'wallet_basic.py --descriptors',
'wallet_labels.py',
'wallet_labels.py --descriptors',
'p2p_segwit.py',
'p2p_timeouts.py',
'p2p_tx_download.py',
'mempool_updatefromblock.py',
'wallet_dump.py --legacy-wallet',
'wallet_listtransactions.py',
'wallet_listtransactions.py --descriptors',
'feature_taproot.py',
# vv Tests less than 60s vv
'p2p_sendheaders.py',
'wallet_importmulti.py --legacy-wallet',
'mempool_limit.py',
'rpc_txoutproof.py',
'wallet_listreceivedby.py',
'wallet_listreceivedby.py --descriptors',
'wallet_abandonconflict.py',
'wallet_abandonconflict.py --descriptors',
'feature_csv_activation.py',
'rpc_rawtransaction.py',
'rpc_rawtransaction.py --descriptors',
'wallet_address_types.py',
'wallet_address_types.py --descriptors',
'feature_bip68_sequence.py',
'p2p_feefilter.py',
'feature_reindex.py',
'feature_abortnode.py',
# vv Tests less than 30s vv
'wallet_keypool_topup.py',
'wallet_keypool_topup.py --descriptors',
'feature_fee_estimation.py',
'interface_zmq.py',
'interface_zortcoin_cli.py',
'mempool_resurrect.py',
'wallet_txn_doublespend.py --mineblock',
'tool_wallet.py',
'tool_wallet.py --descriptors',
'wallet_txn_clone.py',
'wallet_txn_clone.py --segwit',
'rpc_getchaintips.py',
'rpc_misc.py',
'interface_rest.py',
'mempool_spend_coinbase.py',
'wallet_avoidreuse.py',
'wallet_avoidreuse.py --descriptors',
'mempool_reorg.py',
'mempool_persist.py',
'wallet_multiwallet.py',
'wallet_multiwallet.py --descriptors',
'wallet_multiwallet.py --usecli',
'wallet_createwallet.py',
'wallet_createwallet.py --usecli',
'wallet_createwallet.py --descriptors',
'wallet_watchonly.py --legacy-wallet',
'wallet_watchonly.py --usecli --legacy-wallet',
'wallet_reorgsrestore.py',
'interface_http.py',
'interface_rpc.py',
'rpc_psbt.py',
'rpc_psbt.py --descriptors',
'rpc_users.py',
'rpc_whitelist.py',
'feature_proxy.py',
'rpc_signrawtransaction.py',
'rpc_signrawtransaction.py --descriptors',
'wallet_groups.py',
'p2p_addrv2_relay.py',
'wallet_groups.py --descriptors',
'p2p_disconnect_ban.py',
'rpc_decodescript.py',
'rpc_blockchain.py',
'rpc_deprecated.py',
'wallet_disable.py',
'wallet_disable.py --descriptors',
'p2p_addr_relay.py',
'p2p_getaddr_caching.py',
'p2p_getdata.py',
'rpc_net.py',
'wallet_keypool.py',
'wallet_keypool.py --descriptors',
'wallet_descriptor.py --descriptors',
'p2p_nobloomfilter_messages.py',
'p2p_filter.py',
'rpc_setban.py',
'p2p_blocksonly.py',
'mining_prioritisetransaction.py',
'p2p_invalid_locator.py',
'p2p_invalid_block.py',
'p2p_invalid_messages.py',
'p2p_invalid_tx.py',
'feature_assumevalid.py',
'example_test.py',
'wallet_txn_doublespend.py',
'wallet_txn_doublespend.py --descriptors',
'feature_backwards_compatibility.py',
'feature_backwards_compatibility.py --descriptors',
'wallet_txn_clone.py --mineblock',
'feature_notifications.py',
'rpc_getblockfilter.py',
'rpc_invalidateblock.py',
'feature_rbf.py',
'mempool_packages.py',
'mempool_package_onemore.py',
'rpc_createmultisig.py',
'rpc_createmultisig.py --descriptors',
'feature_versionbits_warning.py',
'rpc_preciousblock.py',
'wallet_importprunedfunds.py',
'wallet_importprunedfunds.py --descriptors',
'p2p_leak_tx.py',
'p2p_eviction.py',
'rpc_signmessage.py',
'rpc_generateblock.py',
'rpc_generate.py',
'wallet_balance.py',
'wallet_balance.py --descriptors',
'feature_nulldummy.py',
'feature_nulldummy.py --descriptors',
'mempool_accept.py',
'mempool_expiry.py',
'wallet_import_rescan.py --legacy-wallet',
'wallet_import_with_label.py --legacy-wallet',
'wallet_importdescriptors.py --descriptors',
'wallet_upgradewallet.py',
'rpc_bind.py --ipv4',
'rpc_bind.py --ipv6',
'rpc_bind.py --nonloopback',
'mining_basic.py',
'feature_signet.py',
'wallet_bumpfee.py',
'wallet_bumpfee.py --descriptors',
'wallet_implicitsegwit.py --legacy-wallet',
'rpc_named_arguments.py',
'wallet_listsinceblock.py',
'wallet_listsinceblock.py --descriptors',
'p2p_leak.py',
'wallet_encryption.py',
'wallet_encryption.py --descriptors',
'feature_dersig.py',
'feature_cltv.py',
'rpc_uptime.py',
'wallet_resendwallettransactions.py',
'wallet_resendwallettransactions.py --descriptors',
'wallet_fallbackfee.py',
'wallet_fallbackfee.py --descriptors',
'rpc_dumptxoutset.py',
'feature_minchainwork.py',
'rpc_estimatefee.py',
'rpc_getblockstats.py',
'wallet_create_tx.py',
'wallet_send.py',
'wallet_create_tx.py --descriptors',
'p2p_fingerprint.py',
'feature_uacomment.py',
'wallet_coinbase_category.py',
'wallet_coinbase_category.py --descriptors',
'feature_filelock.py',
'feature_loadblock.py',
'p2p_dos_header_tree.py',
'p2p_unrequested_blocks.py',
'p2p_blockfilters.py',
'feature_includeconf.py',
'feature_asmap.py',
'mempool_unbroadcast.py',
'mempool_compatibility.py',
'rpc_deriveaddresses.py',
'rpc_deriveaddresses.py --usecli',
'p2p_ping.py',
'rpc_scantxoutset.py',
'feature_logging.py',
'p2p_node_network_limited.py',
'p2p_permissions.py',
'feature_blocksdir.py',
'wallet_startup.py',
'feature_config_args.py',
'feature_settings.py',
'rpc_getdescriptorinfo.py',
'rpc_getpeerinfo_deprecation.py',
'rpc_help.py',
'rpc_invalid_address_message.py',
'feature_help.py',
'feature_shutdown.py',
'p2p_ibd_txrelay.py',
# Don't append tests at the end to avoid merge conflicts
# Put them in a random line within the section that fits their approximate run-time
]
# Place EXTENDED_SCRIPTS first since it has the 3 longest running tests
ALL_SCRIPTS = EXTENDED_SCRIPTS + BASE_SCRIPTS
NON_SCRIPTS = [
# These are python files that live in the functional tests directory, but are not test scripts.
"combine_logs.py",
"create_cache.py",
"test_runner.py",
]
def main():
# Parse arguments and pass through unrecognised args
parser = argparse.ArgumentParser(add_help=False,
usage='%(prog)s [test_runner.py options] [script options] [scripts]',
description=__doc__,
epilog='''
Help text and arguments for individual test script:''',
formatter_class=argparse.RawTextHelpFormatter)
parser.add_argument('--ansi', action='store_true', default=sys.stdout.isatty(), help="Use ANSI colors and dots in output (enabled by default when standard output is a TTY)")
parser.add_argument('--combinedlogslen', '-c', type=int, default=0, metavar='n', help='On failure, print a log (of length n lines) to the console, combined from the test framework and all test nodes.')
parser.add_argument('--coverage', action='store_true', help='generate a basic coverage report for the RPC interface')
parser.add_argument('--ci', action='store_true', help='Run checks and code that are usually only enabled in a continuous integration environment')
parser.add_argument('--exclude', '-x', help='specify a comma-separated-list of scripts to exclude.')
parser.add_argument('--extended', action='store_true', help='run the extended test suite in addition to the basic tests')
parser.add_argument('--help', '-h', '-?', action='store_true', help='print help text and exit')
parser.add_argument('--jobs', '-j', type=int, default=4, help='how many test scripts to run in parallel. Default=4.')
parser.add_argument('--keepcache', '-k', action='store_true', help='the default behavior is to flush the cache directory on startup. --keepcache retains the cache from the previous testrun.')
parser.add_argument('--quiet', '-q', action='store_true', help='only print dots, results summary and failure logs')
parser.add_argument('--tmpdirprefix', '-t', default=tempfile.gettempdir(), help="Root directory for datadirs")
parser.add_argument('--failfast', action='store_true', help='stop execution after the first test failure')
parser.add_argument('--filter', help='filter scripts to run by regular expression')
args, unknown_args = parser.parse_known_args()
if not args.ansi:
global BOLD, GREEN, RED, GREY
BOLD = ("", "")
GREEN = ("", "")
RED = ("", "")
GREY = ("", "")
# args to be passed on always start with two dashes; tests are the remaining unknown args
tests = [arg for arg in unknown_args if arg[:2] != "--"]
passon_args = [arg for arg in unknown_args if arg[:2] == "--"]
# Read config generated by configure.
config = configparser.ConfigParser()
configfile = os.path.abspath(os.path.dirname(__file__)) + "/../config.ini"
config.read_file(open(configfile, encoding="utf8"))
passon_args.append("--configfile=%s" % configfile)
# Set up logging
logging_level = logging.INFO if args.quiet else logging.DEBUG
logging.basicConfig(format='%(message)s', level=logging_level)
# Create base test directory
tmpdir = "%s/test_runner_₿_🏃_%s" % (args.tmpdirprefix, datetime.datetime.now().strftime("%Y%m%d_%H%M%S"))
os.makedirs(tmpdir)
logging.debug("Temporary test directory at %s" % tmpdir)
enable_zortcoind = config["components"].getboolean("ENABLE_ZORTCOInD")
if not enable_zortcoind:
print("No functional tests to run.")
print("Rerun ./configure with --with-daemon and then make")
sys.exit(0)
# Build list of tests
test_list = []
if tests:
# Individual tests have been specified. Run specified tests that exist
# in the ALL_SCRIPTS list. Accept names with or without a .py extension.
# Specified tests can contain wildcards, but in that case the supplied
# paths should be coherent, e.g. the same path as that provided to call
# test_runner.py. Examples:
# `test/functional/test_runner.py test/functional/wallet*`
# `test/functional/test_runner.py ./test/functional/wallet*`
# `test_runner.py wallet*`
# but not:
# `test/functional/test_runner.py wallet*`
# Multiple wildcards can be passed:
# `test_runner.py tool* mempool*`
for test in tests:
script = test.split("/")[-1]
script = script + ".py" if ".py" not in script else script
if script in ALL_SCRIPTS:
test_list.append(script)
else:
print("{}WARNING!{} Test '{}' not found in full test list.".format(BOLD[1], BOLD[0], test))
elif args.extended:
# Include extended tests
test_list += ALL_SCRIPTS
else:
# Run base tests only
test_list += BASE_SCRIPTS
# Remove the test cases that the user has explicitly asked to exclude.
if args.exclude:
exclude_tests = [test.split('.py')[0] for test in args.exclude.split(',')]
for exclude_test in exclude_tests:
# Remove <test_name>.py and <test_name>.py --arg from the test list
exclude_list = [test for test in test_list if test.split('.py')[0] == exclude_test]
for exclude_item in exclude_list:
test_list.remove(exclude_item)
if not exclude_list:
print("{}WARNING!{} Test '{}' not found in current test list.".format(BOLD[1], BOLD[0], exclude_test))
if args.filter:
test_list = list(filter(re.compile(args.filter).search, test_list))
if not test_list:
print("No valid test scripts specified. Check that your test is in one "
"of the test lists in test_runner.py, or run test_runner.py with no arguments to run all tests")
sys.exit(0)
if args.help:
# Print help for test_runner.py, then print help of the first script (with args removed) and exit.
parser.print_help()
subprocess.check_call([sys.executable, os.path.join(config["environment"]["SRCDIR"], 'test', 'functional', test_list[0].split()[0]), '-h'])
sys.exit(0)
check_script_list(src_dir=config["environment"]["SRCDIR"], fail_on_warn=args.ci)
check_script_prefixes()
if not args.keepcache:
shutil.rmtree("%s/test/cache" % config["environment"]["BUILDDIR"], ignore_errors=True)
run_tests(
test_list=test_list,
src_dir=config["environment"]["SRCDIR"],
build_dir=config["environment"]["BUILDDIR"],
tmpdir=tmpdir,
jobs=args.jobs,
enable_coverage=args.coverage,
args=passon_args,
combined_logs_len=args.combinedlogslen,
failfast=args.failfast,
use_term_control=args.ansi,
)
def run_tests(*, test_list, src_dir, build_dir, tmpdir, jobs=1, enable_coverage=False, args=None, combined_logs_len=0, failfast=False, use_term_control):
args = args or []
# Warn if zortcoind is already running
try:
# pgrep exits with code zero when one or more matching processes found
if subprocess.run(["pgrep", "-x", "zortcoind"], stdout=subprocess.DEVNULL).returncode == 0:
print("%sWARNING!%s There is already a zortcoind process running on this system. Tests may fail unexpectedly due to resource contention!" % (BOLD[1], BOLD[0]))
except OSError:
# pgrep not supported
pass
# Warn if there is a cache directory
cache_dir = "%s/test/cache" % build_dir
if os.path.isdir(cache_dir):
print("%sWARNING!%s There is a cache directory here: %s. If tests fail unexpectedly, try deleting the cache directory." % (BOLD[1], BOLD[0], cache_dir))
# Test Framework Tests
print("Running Unit Tests for Test Framework Modules")
test_framework_tests = unittest.TestSuite()
for module in TEST_FRAMEWORK_MODULES:
test_framework_tests.addTest(unittest.TestLoader().loadTestsFromName("test_framework.{}".format(module)))
result = unittest.TextTestRunner(verbosity=1, failfast=True).run(test_framework_tests)
if not result.wasSuccessful():
logging.debug("Early exiting after failure in TestFramework unit tests")
sys.exit(False)
tests_dir = src_dir + '/test/functional/'
flags = ['--cachedir={}'.format(cache_dir)] + args
if enable_coverage:
coverage = RPCCoverage()
flags.append(coverage.flag)
logging.debug("Initializing coverage directory at %s" % coverage.dir)
else:
coverage = None
if len(test_list) > 1 and jobs > 1:
# Populate cache
try:
subprocess.check_output([sys.executable, tests_dir + 'create_cache.py'] + flags + ["--tmpdir=%s/cache" % tmpdir])
except subprocess.CalledProcessError as e:
sys.stdout.buffer.write(e.output)
raise
#Run Tests
job_queue = TestHandler(
num_tests_parallel=jobs,
tests_dir=tests_dir,
tmpdir=tmpdir,
test_list=test_list,
flags=flags,
use_term_control=use_term_control,
)
start_time = time.time()
test_results = []
max_len_name = len(max(test_list, key=len))
test_count = len(test_list)
for i in range(test_count):
test_result, testdir, stdout, stderr = job_queue.get_next()
test_results.append(test_result)
done_str = "{}/{} - {}{}{}".format(i + 1, test_count, BOLD[1], test_result.name, BOLD[0])
if test_result.status == "Passed":
logging.debug("%s passed, Duration: %s s" % (done_str, test_result.time))
elif test_result.status == "Skipped":
logging.debug("%s skipped" % (done_str))
else:
print("%s failed, Duration: %s s\n" % (done_str, test_result.time))
print(BOLD[1] + 'stdout:\n' + BOLD[0] + stdout + '\n')
print(BOLD[1] + 'stderr:\n' + BOLD[0] + stderr + '\n')
if combined_logs_len and os.path.isdir(testdir):
# Print the final `combinedlogslen` lines of the combined logs
print('{}Combine the logs and print the last {} lines ...{}'.format(BOLD[1], combined_logs_len, BOLD[0]))
print('\n============')
print('{}Combined log for {}:{}'.format(BOLD[1], testdir, BOLD[0]))
print('============\n')
combined_logs_args = [sys.executable, os.path.join(tests_dir, 'combine_logs.py'), testdir]
if BOLD[0]:
combined_logs_args += ['--color']
combined_logs, _ = subprocess.Popen(combined_logs_args, universal_newlines=True, stdout=subprocess.PIPE).communicate()
print("\n".join(deque(combined_logs.splitlines(), combined_logs_len)))
if failfast:
logging.debug("Early exiting after test failure")
break
print_results(test_results, max_len_name, (int(time.time() - start_time)))
if coverage:
coverage_passed = coverage.report_rpc_coverage()
logging.debug("Cleaning up coverage data")
coverage.cleanup()
else:
coverage_passed = True
# Clear up the temp directory if all subdirectories are gone
if not os.listdir(tmpdir):
os.rmdir(tmpdir)
all_passed = all(map(lambda test_result: test_result.was_successful, test_results)) and coverage_passed
# This will be a no-op unless failfast is True in which case there may be dangling
# processes which need to be killed.
job_queue.kill_and_join()
sys.exit(not all_passed)
def print_results(test_results, max_len_name, runtime):
results = "\n" + BOLD[1] + "%s | %s | %s\n\n" % ("TEST".ljust(max_len_name), "STATUS ", "DURATION") + BOLD[0]
test_results.sort(key=TestResult.sort_key)
all_passed = True
time_sum = 0
for test_result in test_results:
all_passed = all_passed and test_result.was_successful
time_sum += test_result.time
test_result.padding = max_len_name
results += str(test_result)
status = TICK + "Passed" if all_passed else CROSS + "Failed"
if not all_passed:
results += RED[1]
results += BOLD[1] + "\n%s | %s | %s s (accumulated) \n" % ("ALL".ljust(max_len_name), status.ljust(9), time_sum) + BOLD[0]
if not all_passed:
results += RED[0]
results += "Runtime: %s s\n" % (runtime)
print(results)
class TestHandler:
"""
Trigger the test scripts passed in via the list.
"""
def __init__(self, *, num_tests_parallel, tests_dir, tmpdir, test_list, flags, use_term_control):
assert num_tests_parallel >= 1
self.num_jobs = num_tests_parallel
self.tests_dir = tests_dir
self.tmpdir = tmpdir
self.test_list = test_list
self.flags = flags
self.num_running = 0
self.jobs = []
self.use_term_control = use_term_control
def get_next(self):
while self.num_running < self.num_jobs and self.test_list:
# Add tests
self.num_running += 1
test = self.test_list.pop(0)
portseed = len(self.test_list)
portseed_arg = ["--portseed={}".format(portseed)]
log_stdout = tempfile.SpooledTemporaryFile(max_size=2**16)
log_stderr = tempfile.SpooledTemporaryFile(max_size=2**16)
test_argv = test.split()
testdir = "{}/{}_{}".format(self.tmpdir, re.sub(".py$", "", test_argv[0]), portseed)
tmpdir_arg = ["--tmpdir={}".format(testdir)]
self.jobs.append((test,
time.time(),
subprocess.Popen([sys.executable, self.tests_dir + test_argv[0]] + test_argv[1:] + self.flags + portseed_arg + tmpdir_arg,
universal_newlines=True,
stdout=log_stdout,
stderr=log_stderr),
testdir,
log_stdout,
log_stderr))
if not self.jobs:
raise IndexError('pop from empty list')
# Print remaining running jobs when all jobs have been started.
if not self.test_list:
print("Remaining jobs: [{}]".format(", ".join(j[0] for j in self.jobs)))
dot_count = 0
while True:
# Return first proc that finishes
time.sleep(.5)
for job in self.jobs:
(name, start_time, proc, testdir, log_out, log_err) = job
if proc.poll() is not None:
log_out.seek(0), log_err.seek(0)
[stdout, stderr] = [log_file.read().decode('utf-8') for log_file in (log_out, log_err)]
log_out.close(), log_err.close()
if proc.returncode == TEST_EXIT_PASSED and stderr == "":
status = "Passed"
elif proc.returncode == TEST_EXIT_SKIPPED:
status = "Skipped"
else:
status = "Failed"
self.num_running -= 1
self.jobs.remove(job)
if self.use_term_control:
clearline = '\r' + (' ' * dot_count) + '\r'
print(clearline, end='', flush=True)
dot_count = 0
return TestResult(name, status, int(time.time() - start_time)), testdir, stdout, stderr
if self.use_term_control:
print('.', end='', flush=True)
dot_count += 1
def kill_and_join(self):
"""Send SIGKILL to all jobs and block until all have ended."""
procs = [i[2] for i in self.jobs]
for proc in procs:
proc.kill()
for proc in procs:
proc.wait()
class TestResult():
def __init__(self, name, status, time):
self.name = name
self.status = status
self.time = time
self.padding = 0
def sort_key(self):
if self.status == "Passed":
return 0, self.name.lower()
elif self.status == "Failed":
return 2, self.name.lower()
elif self.status == "Skipped":
return 1, self.name.lower()
def __repr__(self):
if self.status == "Passed":
color = GREEN
glyph = TICK
elif self.status == "Failed":
color = RED
glyph = CROSS
elif self.status == "Skipped":
color = GREY
glyph = CIRCLE
return color[1] + "%s | %s%s | %s s\n" % (self.name.ljust(self.padding), glyph, self.status.ljust(7), self.time) + color[0]
@property
def was_successful(self):
return self.status != "Failed"
def check_script_prefixes():
"""Check that test scripts start with one of the allowed name prefixes."""
good_prefixes_re = re.compile("^(example|feature|interface|mempool|mining|p2p|rpc|wallet|tool)_")
bad_script_names = [script for script in ALL_SCRIPTS if good_prefixes_re.match(script) is None]
if bad_script_names:
print("%sERROR:%s %d tests not meeting naming conventions:" % (BOLD[1], BOLD[0], len(bad_script_names)))
print(" %s" % ("\n ".join(sorted(bad_script_names))))
raise AssertionError("Some tests are not following naming convention!")
def check_script_list(*, src_dir, fail_on_warn):
"""Check scripts directory.
Check that there are no scripts in the functional tests directory which are
not being run by pull-tester.py."""
script_dir = src_dir + '/test/functional/'
python_files = set([test_file for test_file in os.listdir(script_dir) if test_file.endswith(".py")])
missed_tests = list(python_files - set(map(lambda x: x.split()[0], ALL_SCRIPTS + NON_SCRIPTS)))
if len(missed_tests) != 0:
print("%sWARNING!%s The following scripts are not being run: %s. Check the test lists in test_runner.py." % (BOLD[1], BOLD[0], str(missed_tests)))
if fail_on_warn:
# On CI this warning is an error to prevent merging incomplete commits into master
sys.exit(1)
class RPCCoverage():
"""
Coverage reporting utilities for test_runner.
Coverage calculation works by having each test script subprocess write
coverage files into a particular directory. These files contain the RPC
commands invoked during testing, as well as a complete listing of RPC
commands per `zortcoin-cli help` (`rpc_interface.txt`).
After all tests complete, the commands run are combined and diff'd against
the complete list to calculate uncovered RPC commands.
See also: test/functional/test_framework/coverage.py
"""
def __init__(self):
self.dir = tempfile.mkdtemp(prefix="coverage")
self.flag = '--coveragedir=%s' % self.dir
def report_rpc_coverage(self):
"""
Print out RPC commands that were unexercised by tests.
"""
uncovered = self._get_uncovered_rpc_commands()
if uncovered:
print("Uncovered RPC commands:")
print("".join((" - %s\n" % command) for command in sorted(uncovered)))
return False
else:
print("All RPC commands covered.")
return True
def cleanup(self):
return shutil.rmtree(self.dir)
def _get_uncovered_rpc_commands(self):
"""
Return a set of currently untested RPC commands.
"""
# This is shared from `test/functional/test_framework/coverage.py`
reference_filename = 'rpc_interface.txt'
coverage_file_prefix = 'coverage.'
coverage_ref_filename = os.path.join(self.dir, reference_filename)
coverage_filenames = set()
all_cmds = set()
# Consider RPC generate covered, because it is overloaded in
# test_framework/test_node.py and not seen by the coverage check.
covered_cmds = set({'generate'})
if not os.path.isfile(coverage_ref_filename):
raise RuntimeError("No coverage reference found")
with open(coverage_ref_filename, 'r', encoding="utf8") as coverage_ref_file:
all_cmds.update([line.strip() for line in coverage_ref_file.readlines()])
for root, _, files in os.walk(self.dir):
for filename in files:
if filename.startswith(coverage_file_prefix):
coverage_filenames.add(os.path.join(root, filename))
for filename in coverage_filenames:
with open(filename, 'r', encoding="utf8") as coverage_file:
covered_cmds.update([line.strip() for line in coverage_file.readlines()])
return all_cmds - covered_cmds
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
7985931ad924c0fffe6fde629612ec580893d2ec | 8f48d12b88048e424ebb0d72ca6dfab5cf12ae0f | /0600_0999/917.py | 032ca01c50e412e01c507c9dde9a100310423937 | [] | no_license | renjieliu/leetcode | e1caf13c18a8107ed9252588b339fb76bcb1b246 | 4668b64fcb9320b6c316d8608fc61911ce43b6c7 | refs/heads/master | 2023-03-18T18:16:06.187741 | 2023-03-14T20:31:59 | 2023-03-14T20:31:59 | 128,823,819 | 7 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,765 | py | class Solution: # RL 20210914: 2 pointers approach
def reverseOnlyLetters(self, s: str) -> str:
s = list(s)
l = 0
r = len(s)-1
letter = lambda x: 1 if (65<=ord(x)<=90 or 97 <= ord(x)<=122) else 0
while l <= r: #2 pointers, once meet letters on the left and right, just swap them
while l < r and letter(s[l]) == 0:
l+=1
while r > l and letter(s[r]) == 0:
r-=1
s[l], s[r] = s[r], s[l]
l+=1
r-=1
# print(s)
return "".join(s)
# previous approach
# class Solution:
# def reverseOnlyLetters(self, s: str) -> str:
# tmp = ""
# s = list(s)
# for i in range(len(s)):
# curr = s[i]
# if 65 <= ord(curr) <= 90 or 97<=ord(curr) <=122:
# tmp+=curr
# s[i] = "A"
# tmp = list(tmp)
# for i in range(len(s)):
# if s[i] == "A":
# s[i] = tmp.pop() #pop from the tail for reverse
#
# return "".join(s)
# previous approach
# def reverseOnlyLetters(S: 'str'):
# temp = ""
# for i in S:
# if 65<=ord(i)<=90 or 97 <= ord(i) <=122:
# temp += i
#
# temp = temp [::-1] #reverse
# output = ""
# curr = 0
# for i in S:
# if not(65<=ord(i)<=90 or 97 <= ord(i) <=122): #if it's not a letter, then put it to the current position
# output+=i
# else:
# output+=temp[curr] #if it's a letter, then find the letter in the reversed string.
# curr+=1
#
# return output
#
# print(reverseOnlyLetters("ab-cd"))
# print(reverseOnlyLetters("a-bC-dEf-ghIj"))
# print(reverseOnlyLetters("Test1ng-Leet=code-Q!"))
| [
"[email protected]"
] | |
1696cb9419dcb9e751dbe8da54a9e2b67dc2f20e | 1f09834b125de2da3060af78d92012a7ddc16c75 | /gusto/settings.py | 91b8fd7a35d2c224357d9dae1dbcefa638719188 | [] | no_license | letsy1337/gusto | e8db9459146c49911935fcf467b4046060468af5 | 4dc45449d79674d7e0ed304315e05f9ec9c7d395 | refs/heads/main | 2023-03-03T00:43:26.252307 | 2021-02-13T14:39:44 | 2021-02-13T14:39:44 | 330,979,000 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,166 | py | """
Django settings for gusto project.
Generated by 'django-admin startproject' using Django 3.1.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
DB_NAME = os.environ.get('DB_NAME')
DB_PASSWORD = os.environ.get('DB_PASSWORD')
DB_HOST = os.environ.get('DB_HOST')
DB_USER = os.environ.get('DB_USER')
SECRET_KEY = os.environ.get('SECRET_KEY')
# BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
# SECRET_KEY = 'm-cr%^%eeylqe_^dm_w)c=n6m)j^ab8m38z889^7ad00)0#c%y'
SECRET_KEY = SECRET_KEY
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['gusto-stud.herokuapp.com']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'main_gusto',
'menu_gusto',
'events_gusto',
'users_messages',
'accounts'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'django.middleware.security.SecurityMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
ROOT_URLCONF = 'gusto.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates']
,
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'gusto.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': BASE_DIR / 'db.sqlite3',
# }
# }
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME': DB_NAME,
'USER': DB_USER,
'PASSWORD': DB_PASSWORD,
'HOST': DB_HOST,
'PORT': '5432',
}
}
import dj_database_url
db = dj_database_url.config()
DATABASES['default'].update(db)
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
STATIC_ROOT = os.path.join(BASE_DIR, 'staticfiles')
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
# Extra places for collectstatic to find static files.
| [
"[email protected]"
] | |
ad58ed10f0a6e5ccb17585b08ed905795c5bdd48 | 1c9999d51cfe4491685ec6eb865c299251f4fd60 | /1-b-lstm-return-sequences-states/__init__.py | 3e4ec51525567476778abb17f805e0aa729f2c47 | [] | no_license | bjbluejita/deep-learning-notebook | d7a081443eda570eb6d36fd7d2983f6077b43962 | 0e3598a20214dd78deb4f5e6809f7789722f6f5d | refs/heads/master | 2023-05-27T15:29:32.639505 | 2023-05-14T02:16:59 | 2023-05-14T02:16:59 | 252,683,247 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 147 | py | '''
@Project: deep-learning-with-keras-notebooks
@Package
@author: ly
@date Date: 2019年02月22日 10:19
@Description:
@URL:
@version: V1.0
''' | [
"[email protected]"
] | |
86ad171ff6405302c17d09989ab9eb6063e09ce9 | 52e6310fab09209583c075f42963099858fb1f4f | /backend/mobile_8_oct_dev_12930/settings.py | ce46b616137bb9169a90c984380402b2c16c074e | [] | no_license | crowdbotics-apps/mobile-8-oct-dev-12930 | 01f002d825b9be091d6a736549654ccc8805d3e1 | 35f5acfac8580c158145aeb51f1a42a911bd4417 | refs/heads/master | 2022-12-26T14:34:01.544851 | 2020-10-08T14:09:00 | 2020-10-08T14:09:00 | 302,234,786 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,112 | py | """
Django settings for mobile_8_oct_dev_12930 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.sites",
]
LOCAL_APPS = [
"home",
"users.apps.UsersConfig",
]
THIRD_PARTY_APPS = [
"rest_framework",
"rest_framework.authtoken",
"rest_auth",
"rest_auth.registration",
"bootstrap4",
"allauth",
"allauth.account",
"allauth.socialaccount",
"allauth.socialaccount.providers.google",
"django_extensions",
"drf_yasg",
# start fcm_django push notifications
"fcm_django",
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "mobile_8_oct_dev_12930.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "mobile_8_oct_dev_12930.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {"default": env.db()}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = "/static/"
MIDDLEWARE += ["whitenoise.middleware.WhiteNoiseMiddleware"]
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning(
"You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails."
)
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
] | |
9148d6a0ecbe711f958235eefce04d3ff1b0e9db | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_255/ch129_2020_04_01_18_23_34_386742.py | a7848f92f19eb5a4c9eeb1c5bb902fe5b642b7c0 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 156 | py | def verifica_quadrado_perfeito(n):
x=1
while n>0:
n-=x
x+=2
return n
if n==0:
print('True')
elif n<0:
print('False') | [
"[email protected]"
] | |
fa20b747629dca817cd808fb369cabbc10565862 | 219992b56f8e5cd8b47534d98417dd8ac795110b | /com/ibm/testing/dict&file.py | 74572cc02da9e8ce622b4624a64b89f3d6598af0 | [] | no_license | haohaixingyun/dig-python | 63844877de0acad04d07d7119e381b9bb4a97395 | 4e8c3e3cb1ba98f39d65095b4d3b09ba115e586b | refs/heads/master | 2021-01-13T08:45:59.669829 | 2016-10-26T05:54:07 | 2016-10-26T05:54:07 | 71,970,759 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 553 | py | '''
Created on Mar 30, 2016
@author: yunxinghai
'''
def main():
dicts = {}
dicts['a'] = 'alpha'
dicts['g'] = 'gamma'
dicts['o'] = 'omega'
print dicts
print dicts.keys()
if 'a' in dicts:
print dicts['a']
for key in dicts :
print key
print dicts[key]
f = open('C:\workplacebus\\business\\yunxinghai_Ethan\\PROD_DIMNSN\\rshr1.prod_dimnsn_debug.tbl.sql','rU')
for line in f:
print line
f.close()
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
c87ea613a666f0beff7a49aabdc7c5360e990c10 | 94ca446c0f17d640f45941fa7c83530ef2fbc099 | /wrs-remote-clients-2.0.2/distributedcloud-client-1.0.0/kingbirdclient/commands/v1/quota_manager.py | 808b321b524a455161e55daba05f244193e5038f | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | rmoorewrs/tic-windows-remote-clients | c1c2b8924e90ffd2951571bc098ec9873ffd3988 | ae16ee78a720852304d79f8b86dfe44e920cc72d | refs/heads/master | 2023-05-25T13:55:55.603100 | 2019-05-31T20:59:28 | 2019-05-31T20:59:28 | 189,649,925 | 0 | 0 | NOASSERTION | 2023-05-22T20:43:59 | 2019-05-31T19:46:28 | Python | UTF-8 | Python | false | false | 8,943 | py | # Copyright (c) 2016 Ericsson AB.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from osc_lib.command import command
from kingbirdclient.commands.v1 import base
from kingbirdclient import exceptions
def format(quotas=None):
columns = (
'Quota',
'Limit'
)
if quotas:
data = (
quotas._data,
quotas._Limit,
)
else:
data = (tuple('<none>' for _ in range(len(columns))),)
return columns, data
def detailformat(quotas=None):
columns = (
'Quota',
'Usage',
'Limit',
)
if quotas:
data = (
quotas._data,
quotas._Usage,
quotas._Limit,
)
else:
data = (tuple('<none>' for _ in range(len(columns))),)
return columns, data
class ListDefaults(base.KingbirdLister):
"""List all default quotas."""
def _get_format_function(self):
return format
def _get_resources(self, parsed_args):
kingbird_client = self.app.client_manager.sync_engine
return kingbird_client.quota_manager.list_defaults()
class GlobalLimits(base.KingbirdLister):
"""Lists the global limit of a tenant."""
def _get_format_function(self):
return format
def get_parser(self, parsed_args):
parser = super(GlobalLimits, self).get_parser(parsed_args)
parser.add_argument(
'--tenant',
help='Lists global limit of a specified tenant-id.'
' Admin tenant can perform this operation.'
)
return parser
def _get_resources(self, parsed_args):
kingbird_client = self.app.client_manager.sync_engine
target_tenant_id = parsed_args.tenant
return kingbird_client.quota_manager.global_limits(target_tenant_id)
class UpdateGlobalLimits(base.KingbirdLister):
"""Update the quotas for a tenant."""
def _get_format_function(self):
return format
def get_parser(self, parsed_args):
parser = super(UpdateGlobalLimits, self).get_parser(parsed_args)
parser.add_argument(
'tenant',
help='ID of tenant to set the quotas .'
)
parser.add_argument(
'--metadata_items',
help='New value for the "metadata-items" quota'
)
parser.add_argument(
'--subnet',
help='New value for the "subnet" quota'
)
parser.add_argument(
'--network',
help='New value for the "network" quota'
)
parser.add_argument(
'--floatingip',
help='New value for the "floatingip" quota'
)
parser.add_argument(
'--gigabytes',
help='New value for the "gigabytes" quota'
)
parser.add_argument(
'--backup_gigabytes',
help='New value for the "backup_gigabytes" quota'
)
parser.add_argument(
'--ram',
help='New value for the "ram" quota'
)
parser.add_argument(
'--floating_ips',
help='New value for the "floating_ips" quota'
)
parser.add_argument(
'--snapshots',
help='New value for the "snapshots" quota'
)
parser.add_argument(
'--security_group_rule',
help='New value for the "security_group_rule" quota'
)
parser.add_argument(
'--instances',
help='New value for the "instances" quota'
)
parser.add_argument(
'--key_pairs',
help='New value for the "key_pairs" quota'
)
parser.add_argument(
'--volumes',
help='New value for the "volumes" quota'
)
parser.add_argument(
'--router',
help='New value for the "router" quota'
)
parser.add_argument(
'--security_group',
help='New value for the "security_group" quota'
)
parser.add_argument(
'--cores',
help='New value for the "cores" quota'
)
parser.add_argument(
'--backups',
help='New value for the "backups" quota'
)
parser.add_argument(
'--fixed_ips',
help='New value for the "fixed_ips" quota'
)
parser.add_argument(
'--port',
help='New value for the "port" quota'
)
parser.add_argument(
'--security_groups',
help='New value for the "security_groups" quota'
)
return parser
def _get_resources(self, parsed_args):
target_tenant_id = parsed_args.tenant
kingbird_client = self.app.client_manager.sync_engine
kwargs = {
"metadata_items": parsed_args.metadata_items,
"subnet": parsed_args.subnet,
"network": parsed_args.network,
"floatingip": parsed_args.floatingip,
"gigabytes": parsed_args.gigabytes,
"backup_gigabytes": parsed_args.backup_gigabytes,
"ram": parsed_args.ram,
"floating_ips": parsed_args.floating_ips,
"snapshots": parsed_args.snapshots,
"security_group_rule": parsed_args.security_group_rule,
"instances": parsed_args.instances,
"key_pairs": parsed_args.key_pairs,
"volumes": parsed_args.volumes,
"router": parsed_args.router,
"security_group": parsed_args.security_group,
"cores": parsed_args.cores,
"backups": parsed_args.backups,
"fixed_ips": parsed_args.fixed_ips,
"port": parsed_args.port,
"security_groups": parsed_args.security_groups
}
return kingbird_client.quota_manager.\
update_global_limits(target_tenant_id, **kwargs)
class ShowQuotaDetail(base.KingbirdLister):
"""List the Detail limit for a tenant."""
def _get_format_function(self):
return detailformat
def get_parser(self, parsed_args):
parser = super(ShowQuotaDetail, self).get_parser(parsed_args)
parser.add_argument(
'--tenant',
help='Lists global limit of a specified tenant-id.'
' Admin tenant can perform this operation.'
)
return parser
def _get_resources(self, parsed_args):
kingbird_client = self.app.client_manager.sync_engine
target_tenant_id = parsed_args.tenant
return kingbird_client.quota_manager.quota_detail(target_tenant_id)
class SyncQuota(command.Command):
"""On Demand quota sync for a tenant."""
def get_parser(self, prog_name):
parser = super(SyncQuota, self).get_parser(prog_name)
parser.add_argument(
'tenant',
help='tenant-id to delete quotas.'
)
return parser
def take_action(self, parsed_args):
kingbird_client = self.app.client_manager.sync_engine
target_tenant = parsed_args.tenant
try:
kingbird_client.quota_manager.sync_quota(target_tenant)
print("Request to sync quota for tenant %s has been triggered." %
(parsed_args.tenant))
except Exception as e:
print(e)
error_msg = "Unable to sync quota for tenant %s." \
% (parsed_args.tenant)
raise exceptions.KingbirdClientException(error_msg)
class DeleteQuota(command.Command):
"""Delete quota for a tenant."""
def get_parser(self, prog_name):
parser = super(DeleteQuota, self).get_parser(prog_name)
parser.add_argument(
'tenant',
help='ID of tenant to delete quotas.'
)
return parser
def take_action(self, parsed_args):
kingbird_client = self.app.client_manager.sync_engine
target_tenant = parsed_args.tenant
try:
kingbird_client.quota_manager.\
delete_quota(target_tenant)
print("Request to delete quotas"
" for tenant %s has been accepted." %
(parsed_args.tenant))
except Exception as e:
print(e)
error_msg = "Unable to delete quota for specified resource."
raise exceptions.KingbirdClientException(error_msg)
| [
"[email protected]"
] | |
494c300b90dcab7df2391d17dba61414da9b7717 | 6f56da8db171d4a6c006b5d944437bf061069faf | /XCat.v.0.0.1/source/XCat_Output/XCat_Report/XCat_Report.py | 0ad012e217548f3517e089f1db38b8443e388b19 | [] | no_license | afarahi/XCat | 16819bef7087e994907c413dd6331cdebde72ffb | 498602eb7f61696d169f071185115345c68bcf86 | refs/heads/master | 2021-01-21T01:59:36.907059 | 2013-05-03T05:12:07 | 2013-05-03T05:12:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 628 | py | import subprocess
import os
import shlex
from XCat_tex_Construction import Report_File
def Creat_Report(Halo,Input,Output):
print "Creating report ..."
Report_File(Halo,Input,Output)
proc=subprocess.Popen(shlex.split('pdflatex Report/report.tex'))
proc.communicate()
proc=subprocess.Popen(shlex.split('pdflatex Report/report.tex'))
proc.communicate()
proc=subprocess.Popen(shlex.split('cp report.pdf Report'))
proc.communicate()
os.unlink('report.aux')
os.unlink('report.log')
os.unlink('report.out')
os.unlink('report.pdf')
print "Report is created successfully."
| [
"[email protected]"
] | |
9c9a52c1aeac8287ffe746dd8321a6e56ee87c08 | c65af972b843e4f11a9aa9005104ac54a283032d | /practice4/database1.py | 51320f63d12e8e845b44c63342cb448e4382cbd0 | [] | no_license | ljeleven/mypython | a63438c4246606082f000967a5d47256fa297aeb | b652338be3937543f0b35a9111dd0d346eb913b5 | refs/heads/master | 2023-05-24T19:30:37.001198 | 2020-04-09T15:40:40 | 2020-04-09T15:40:40 | 240,815,098 | 0 | 0 | null | 2023-05-22T22:41:00 | 2020-02-16T01:46:29 | Python | UTF-8 | Python | false | false | 282 | py | #__author:"longjin"
#date: 2019/7/16
# -*- coding: UTF-8 -*-
import sys
import pymysql
#建立连接
conn = pymysql.connect(db='test', user='root', passwd='123456', charset='utf8')
cur = conn.cursor()
sql = 'create table a(id int, hh varchar(30));'
cur.execute(sql)
conn.commit()
| [
"[email protected]"
] | |
13c763556353a32f1c5b4a0b92c8e72b0f38065a | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/2/gsu.py | 63b5d5079abcfe088d8c44bde67af7940ded787e | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'gsU':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
b5b17b33bb1e9becdce712f99761e03f6a3d7555 | d41c9d92dcf7a214ae2edc8780cc85cf08fed62e | /featuresynth/featuregenerator/upscale.py | 395cdc5391de7d10023cc4d7433b2afa4bb090fa | [] | no_license | JohnVinyard/music-synthesis | f57acce20345ff641e8d355848fbe7561fc74b4c | 0b3368e8f5f3d495c3ef4eb49087e2be2593a7ab | refs/heads/master | 2020-09-29T00:08:15.061156 | 2020-04-23T17:46:21 | 2020-04-23T17:46:21 | 226,898,712 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,173 | py | from torch import nn
from torch.nn import functional as F
import torch
from ..util.modules import DilatedStack
class OneDimensionalSpectrogramGenerator(nn.Module):
def __init__(self, out_channels, noise_dim):
super().__init__()
self.noise_dim = noise_dim
self.out_channels = out_channels
self.initial = nn.Linear(noise_dim, 8 * 1024)
self.main = nn.Sequential(
nn.ConvTranspose1d(1024, 512, 4, 2, 1), # 16
nn.ConvTranspose1d(512, 512, 4, 2, 1), # 32
nn.ConvTranspose1d(512, 512, 4, 2, 1), # 64
nn.ConvTranspose1d(512, 256, 4, 2, 1), # 128
nn.ConvTranspose1d(256, 256, 4, 2, 1), # 256
nn.ConvTranspose1d(256, 128, 4, 2, 1), # 512
)
def forward(self, x):
x = x.view(-1, self.noise_dim)
x = F.leaky_relu(self.initial(x), 0.2)
x = x.view(-1, 1024, 8)
for i, layer in enumerate(self.main):
if i == len(self.main) - 1:
x = layer(x)
else:
x = F.leaky_relu(layer(x), 0.2)
return x
class NearestNeighborOneDimensionalSpectrogramGenerator(nn.Module):
def __init__(self, out_channels, noise_dim):
super().__init__()
self.noise_dim = noise_dim
self.out_channels = out_channels
self.initial = nn.Linear(noise_dim, 8 * 1024)
self.main = nn.Sequential(
nn.Conv1d(1024, 1024, 3, 1, 1),
nn.LeakyReLU(0.2),
nn.Upsample(scale_factor=2), # 16
nn.Conv1d(1024, 512, 7, 1, 3),
nn.LeakyReLU(0.2),
nn.Upsample(scale_factor=2), # 32
nn.Conv1d(512, 512, 7, 1, 3),
nn.LeakyReLU(0.2),
nn.Upsample(scale_factor=2), # 64
nn.Conv1d(512, 256, 7, 1, 3),
nn.LeakyReLU(0.2),
nn.Upsample(scale_factor=2), # 128
nn.Conv1d(256, 256, 7, 1, 3),
nn.LeakyReLU(0.2),
nn.Upsample(scale_factor=2), # 256
nn.Conv1d(256, 256, 7, 1, 3),
nn.LeakyReLU(0.2),
nn.Upsample(scale_factor=2), # 512
nn.Conv1d(256, 256, 7, 1, 3),
nn.LeakyReLU(0.2),
nn.Conv1d(256, 128, 7, 1, 3),
)
def forward(self, x):
x = x.view(-1, self.noise_dim)
x = F.leaky_relu(self.initial(x), 0.2)
x = x.view(-1, 1024, 8)
x = self.main(x)
return x
class SpectrogramFeatureGenerator(nn.Module):
def __init__(self, out_channels, noise_dim):
super().__init__()
self.noise_dim = noise_dim
self.out_channels = out_channels
self.initial = nn.Linear(noise_dim, 4 * 4 * 1024)
self.stack = nn.Sequential(
nn.ConvTranspose2d(1024, 512, (4, 4), (2, 2), (1, 1)),
# (8, 8)
nn.ConvTranspose2d(512, 256, (4, 4), (2, 2), (1, 1)),
# (16, 16)
nn.ConvTranspose2d(256, 128, (4, 4), (2, 2), (1, 1)),
# (32, 32)
nn.ConvTranspose2d(128, 128, (4, 4), (2, 2), (1, 1)),
# (64, 64)
nn.ConvTranspose2d(128, 64, (4, 4), (2, 2), (1, 1)),
# (128, 128)
nn.ConvTranspose2d(64, 32, (3, 4), (1, 2), (1, 1)),
# (128, 256)
nn.ConvTranspose2d(32, 1, (3, 4), (1, 2), (1, 1)),
# (128, 512)
)
def forward(self, x):
x = x.view(-1, self.noise_dim)
x = F.leaky_relu(self.initial(x), 0.2)
x = x.view(x.shape[0], -1, 4, 4)
for i, layer in enumerate(self.stack):
if i == len(self.stack) - 1:
x = layer(x)
else:
x = F.leaky_relu(layer(x), 0.2)
x = x.view(x.shape[0], self.out_channels, -1)
return x
class PredictiveGenerator(nn.Module):
def __init__(self):
super().__init__()
# self.main = nn.Sequential(
# nn.ReflectionPad2d((1, 1, 1, 1)),
# nn.Conv2d(1, 8, (2, 2), (1, 1), dilation=(1, 1)),
# nn.LeakyReLU(0.2),
#
# nn.ReflectionPad2d((1, 1, 1, 1)),
# nn.Conv2d(8, 16, (2, 2), (1, 1), dilation=(2, 2)),
# nn.LeakyReLU(0.2),
#
# nn.ReflectionPad2d((2, 2, 2, 2)),
# nn.Conv2d(16, 32, (2, 2), (1, 1), dilation=(4, 4)),
# nn.LeakyReLU(0.2),
#
# nn.ReflectionPad2d((4, 4, 4, 4)),
# nn.Conv2d(32, 64, (2, 2), (1, 1), dilation=(8, 8)),
# nn.LeakyReLU(0.2),
#
# nn.ReflectionPad2d((8, 8, 8, 8)),
# nn.Conv2d(64, 128, (2, 2), (1, 1), dilation=(16, 16)),
# nn.LeakyReLU(0.2),
#
# nn.ReflectionPad2d((16, 16, 16, 16)),
# nn.Conv2d(128, 128, (2, 2), (1, 1), dilation=(32, 32)),
# nn.LeakyReLU(0.2),
#
# nn.ReflectionPad2d((32, 32, 32, 32)),
# nn.Conv2d(128, 1, (2, 2), (1, 1), dilation=(64, 64)),
# )
# self.encoder = nn.Sequential(
# nn.Conv2d(1, 16, (3, 3), (2, 2), (1, 1)), # 64
# nn.Conv2d(16, 32, (3, 3), (2, 2), (1, 1)), # 32
# nn.Conv2d(32, 64, (3, 3), (2, 2), (1, 1)), # 16
# nn.Conv2d(64, 128, (3, 3), (2, 2), (1, 1)), # 8
# nn.Conv2d(128, 256, (3, 3), (2, 2), (1, 1)), # 4
# nn.Conv2d(256, 512, (3, 3), (2, 2), (1, 1)), # 2
# nn.Conv2d(512, 1024, (2, 2), (1, 1), (0, 0)), # 1
# )
#
# self.decoder = nn.Sequential(
# nn.Conv2d(1024, 512 * 4, (1, 1), (1, 1), (0, 0)), # reshape 2
# nn.ConvTranspose2d(512, 256, (4, 4), (2, 2), (1, 1)), # 4
# nn.ConvTranspose2d(256, 128, (4, 4), (2, 2), (1, 1)), # 8
# nn.ConvTranspose2d(128, 64, (4, 4), (2, 2), (1, 1)), # 16
# nn.ConvTranspose2d(64, 32, (4, 4), (2, 2), (1, 1)), # 32
# nn.ConvTranspose2d(32, 16, (4, 4), (2, 2), (1, 1)), # 64
# nn.ConvTranspose2d(16, 1, (4, 4), (2, 2), (1, 1)), # 128
# )
# self.encoder = nn.Sequential(
# nn.Conv1d(128, 256, 7, 2, 3), # 64
# nn.Conv1d(256, 512, 7, 2, 3), # 32
# nn.Conv1d(512, 1024, 7, 2, 3), # 16
# nn.Conv1d(1024, 1024, 3, 2, 1), # 8
# nn.Conv1d(1024, 1024, 3, 2, 1), # 4
# nn.Conv1d(1024, 2048, 3, 2, 1), # 2
#
# nn.ConvTranspose1d(2048, 1024, 4, 2, 1), # 4
# nn.ConvTranspose1d(1024, 1024, 4, 2, 1), # 8
# nn.ConvTranspose1d(1024, 1024, 4, 2, 1), # 16
# nn.ConvTranspose1d(1024, 512, 8, 2, 3), # 32
# nn.ConvTranspose1d(512, 256, 8, 2, 3), # 64
# nn.ConvTranspose1d(256, 128, 8, 2, 3), # 128
# )
self.stack = DilatedStack(
128,
512,
2,
[1, 2, 4, 8, 16, 32, 64, 1, 2, 4, 8, 16, 32, 64, 1, 1],
lambda x: F.leaky_relu(x, 0.2),
residual=True,
reflection_padding=True)
self.to_frames = nn.Conv1d(512, 128, 1, 1, 0)
# def generate(self, primer, steps=30):
# with torch.no_grad():
# x = primer[:, :, 128:]
#
# for i in range(steps):
# # conditioning is the last 128 frames of the sequence
# conditioning = x[:, :, -128:]
# predicted = self.inference(conditioning)
# x = torch.cat([x, predicted], dim=-1)
# return x
def inference(self, x):
batch, channels, frames = x.shape
orig = x = x[:, None, :, :]
x = self.main(x)
x = x[:, :, :128, :128]
x = x.view(batch, channels, frames)
return x
# def main(self, x):
# for i, layer in enumerate(self.encoder):
# if i == len(self.encoder) - 1:
# x = layer(x)
# else:
# x = F.leaky_relu(layer(x), 0.2)
#
# for i, layer in enumerate(self.decoder):
# if i == len(self.decoder) - 1:
# x = layer(x)
# else:
# x = F.leaky_relu(layer(x), 0.2)
# if i == 0:
# x = x.view(-1, 512, 2, 2)
# return x
def main(self, x):
batch, _, channels, time = x.shape
x = x.view(batch, channels, time)
x = self.stack(x)
x = self.to_frames(x)
x = x.view(batch, 1, channels, time)
return x
# def main(self, x):
# batch, _, channels, time = x.shape
# x = x.view(batch, channels, time)
# for i, layer in enumerate(self.encoder):
# x = layer(x)
# if x.shape[1] != 2048 and i < len(self.encoder) - 1:
# x = F.leaky_relu(x, 0.2)
# x = x.view(batch, 1, channels, time)
# return x
def forward(self, x):
batch, channels, frames = x.shape
x = x[:, None, :, :]
# the conditioning is the first half
orig = x = x[..., :128]
x = self.main(x)
x = x[:, :, :128, :128]
x = torch.cat([orig, x], dim=-1)
x = x.view(batch, channels, -1)
return x | [
"[email protected]"
] | |
f584e020f872e7dd88e708fa1581b0bfb46638b1 | d3efc82dfa61fb82e47c82d52c838b38b076084c | /ETF/Redemption_SA/YW_ETFSS_SZSH_057.py | ff2e617be2fb70e66168d46d6d6a8a81aebb76c2 | [] | no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,106 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
import time
sys.path.append("/home/yhl2/workspace/xtp_test/ETF")
from import_common import *
sys.path.append("/home/yhl2/workspace/xtp_test/ETF/etf_service")
sys.path.append("/home/yhl2/workspace/xtp_test/utils")
from QueryOrderErrorMsg import queryOrderErrorMsg
class YW_ETFSS_SZSH_057(xtp_test_case):
def test_YW_ETFSS_SZSH_057(self):
# -----------ETF申购-------------
title = '深圳ETF赎回--错误的数量(数量<0)'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'case_ID': 'ATC-204-056',
'期望状态': '废单',
'errorID': 10210301,
'errorMSG': queryOrderErrorMsg(10210301),
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title + ', case_ID=' + case_goal['case_ID'])
unit_info = {
'ticker': '179850', # etf代码
}
# -----------ETF申购-------------
# 定义委托参数信息------------------------------------------
wt_reqs = {
'business_type':
Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_ETF'],
'market':
Api.const.XTP_MARKET_TYPE['XTP_MKT_SZ_A'],
'ticker':
unit_info['ticker'],
'side':
Api.const.XTP_SIDE_TYPE['XTP_SIDE_REDEMPTION'],
'price_type':
Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'],
'quantity':
-1000000,
}
EtfParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = etfServiceTest(Api, case_goal, wt_reqs)
etf_creation_log(case_goal, rs)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
1d1a404ef43d5caee5af9c265b3ab30282ddbbc5 | f1600240f3bbadfa8f190c165bd40b0f74110652 | /model/core/CNN.py | 41baea57bc9626cd3025120ff505e9987ed822b7 | [] | no_license | shubhampachori12110095/XMTC | b45801e143710e97ad8098ee028b4c44b22cb110 | b93a8a78c7799461b4853006f5cd7a0fc4fcdc67 | refs/heads/master | 2020-04-22T23:26:19.869742 | 2018-04-29T01:56:55 | 2018-04-29T01:56:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,816 | py | '''
Created on Jan, 2018
@author: FrancesZhou
'''
from __future__ import absolute_import
import numpy as np
import tensorflow as tf
class CNN(object):
def __init__(self, max_seq_len, word_embedding, filter_sizes, label_embedding, num_classify_hidden, args):
self.max_seq_len = max_seq_len
self.word_embedding_dim = word_embedding.shape[-1]
self.filter_sizes = filter_sizes
self.num_filters = args.num_filters
self.pooling_units = args.pooling_units
self.num_classify_hidden = num_classify_hidden
self.label_embedding_dim = label_embedding.shape[-1]
self.dropout_keep_prob = args.dropout_keep_prob
#
self.weight_initializer = tf.contrib.layers.xavier_initializer()
self.const_initializer = tf.constant_initializer()
#
self.word_embedding = tf.constant(word_embedding, dtype=tf.float32)
#self.label_embedding = tf.constant(label_embedding, dtype=tf.float32)
#self.label_embedding = tf.get_variable('label_embedding', [], initializer=label_embedding)
self.label_embedding = tf.get_variable(name='label_embedding', initializer=tf.constant(label_embedding, dtype=tf.float32))
#
# self.x = tf.placeholder(tf.float32, [self.batch_size, self.max_seq_len, self.word_embedding_dim])
# self.seqlen = tf.placeholder(tf.int32, [self.batch_size])
# self.label_prop = tf.placeholder(tf.float32, [self.batch_size])
#
# self.x = tf.placeholder(tf.int32, [self.batch_size, self.max_seq_len])
# self.y = tf.placeholder(tf.float32, [self.batch_size, 2])
# self.label_embedding_id = tf.placeholder(tf.int32, [self.batch_size])
self.x = tf.placeholder(tf.int32, [None, self.max_seq_len])
self.y = tf.placeholder(tf.float32, [None])
self.label_embedding_id = tf.placeholder(tf.int32, [None])
self.label_prop = tf.placeholder(tf.float32, [None])
def attention_layer(self, hidden_states, label_embeddings, num_hiddens, hidden_dim, label_embedding_dim, name_scope=None):
# hidden_states: [batch_size, num_hiddens, hidden_dim]
# label_embeddings: [batch_size, label_embedding_dim]
with tf.variable_scope(name_scope + 'att_layer'):
w = tf.get_variable('w', [hidden_dim, label_embedding_dim], initializer=self.weight_initializer)
# hidden_states: [batch_size, num, hidden_dim]
# label_embeddings: [batch_size, label_embedding_dim]
# score: h*W*l
s = tf.matmul(tf.reshape(tf.matmul(tf.reshape(hidden_states, [-1, hidden_dim]), w), [-1, num_hiddens, label_embedding_dim]),
tf.expand_dims(label_embeddings, axis=-1))
# s: [batch_size, num, 1]
s = tf.nn.softmax(s, 1)
# s: [batch_size, num, 1]
# hidden_states: [batch_size, num, hidden_dim]
# s_hidden: [batch_size, num, hidden_dim]
s_hidden = tf.multiply(s, hidden_states)
return tf.reduce_sum(s_hidden, axis=1)
def classification_layer(self, features, label_embeddings, hidden_dim, label_embedding_dim):
# features: [batch_size, hidden_dim]
# label_embeddings: [batch_size, label_embedding_dim]
#
with tf.variable_scope('classification_layer'):
with tf.variable_scope('features'):
w_fea = tf.get_variable('w_fea', [hidden_dim, self.num_classify_hidden], initializer=self.weight_initializer)
# features: [batch_size, hidden_dim]
fea_att = tf.matmul(features, w_fea)
with tf.variable_scope('label'):
w_label = tf.get_variable('w_label', [label_embedding_dim, self.num_classify_hidden], initializer=self.weight_initializer)
# label_embedding: [batch_size, label_embedding_dim]
label_att = tf.matmul(label_embeddings, w_label)
b = tf.get_variable('b', [self.num_classify_hidden], initializer=self.const_initializer)
fea_label_plus = tf.add(fea_att, label_att)
fea_label_plus_b = tf.nn.relu(tf.add(fea_label_plus, b))
# fea_label_plus_b: [batch_size, num_classify_hidden]
#
with tf.variable_scope('classify'):
w_classify = tf.get_variable('w_classify', [self.num_classify_hidden, 1], initializer=self.weight_initializer)
#b_classify = tf.get_variable('b_classify', [1], initializer=self.const_initializer)
out = tf.matmul(fea_label_plus_b, w_classify)
# out = tf.add(wz_b_plus, b_classify)
# out: [batch_size, 1]
return tf.squeeze(out)
#return tf.nn.relu(wz_b_plus)
#return wz_b_plus
#return tf.nn.softmax(out, -1)
#return tf.nn.softmax(tf.nn.relu(wz_b_plus), -1)
def build_model(self):
# x: [batch_size, self.max_seq_len]
# y: [batch_size]
# label_embedding_id: [batch_size]
x = tf.nn.embedding_lookup(self.word_embedding, self.x)
# x: [batch_size, self.max_seq_len, word_embedding_dim]
label_embeddings = tf.nn.embedding_lookup(self.label_embedding, self.label_embedding_id)
# label_embeddings: [batch_size, label_embedding_dim]
x_expand = tf.expand_dims(x, axis=-1)
y = self.y
# dropout
# TODO
#x_expand = tf.nn.dropout(x_expand, keep_prob=0.25)
conv_outputs = []
conv_atten_outputs = []
for i, filter_size in enumerate(self.filter_sizes):
with tf.name_scope('convolution-pooling-{0}'.format(filter_size)) as name_scope:
# ============= convolution ============
filter = tf.get_variable('filter-{0}'.format(filter_size),
[filter_size, self.word_embedding_dim, 1, self.num_filters],
initializer=self.weight_initializer)
conv = tf.nn.conv2d(x_expand, filter, strides=[1,1,1,1], padding='VALID', name='conv')
b = tf.get_variable('b-{0}'.format(filter_size), [self.num_filters])
conv_b = tf.nn.relu(tf.nn.bias_add(conv, b), 'relu')
# conv_b: [batch_size, seqence_length-filter_size+1, 1, num_filters]
# ============= max pooling for x-embedding =========
pool_emb = tf.nn.max_pool(conv_b, ksize=[1, self.max_seq_len-filter_size+1, 1, 1],
strides=[1, 1, 1, 1], padding='VALID', name='max-pooling')
# pool_emb: [batch_size, 1, 1, num_filters]
conv_outputs.append(tf.squeeze(pool_emb, [1, 2]))
# ============= dynamic max pooling =================
pool_size = (self.max_seq_len - filter_size + 1) // self.pooling_units
pool_out = tf.nn.max_pool(conv_b, ksize=[1, pool_size, 1, 1],
strides=[1, pool_size, 1, 1], padding='VALID', name='dynamic-max-pooling')
# pool_out: [batch_size, pooling_units, 1, num_filters]
# ============= attention ===============
pool_squeeze = tf.squeeze(pool_out, [-2])
# pool_squeeze: [batch_size, pooling_units, num_filters]
print [None, self.pooling_units, self.num_filters]
print pool_squeeze.get_shape().as_list()
num_hiddens = (self.max_seq_len - filter_size + 1) // pool_size
print num_hiddens
l_feature = self.attention_layer(pool_squeeze, label_embeddings, num_hiddens, self.num_filters, self.label_embedding_dim, name_scope=name_scope)
# l_feature: [batch_size, num_filters]
conv_atten_outputs.append(l_feature)
x_emb = tf.concat(conv_outputs, -1)
all_features = tf.concat(conv_atten_outputs, -1)
# dropout
with tf.name_scope('dropout'):
fea_dropout = tf.nn.dropout(all_features, keep_prob=self.dropout_keep_prob)
with tf.name_scope('output'):
fea_dim = fea_dropout.get_shape().as_list()[-1]
y_ = self.classification_layer(fea_dropout, label_embeddings, fea_dim, self.label_embedding_dim)
# loss
# y: [batch_size]
#loss = tf.reduce_sum(tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=y_))
loss = tf.reduce_sum(
tf.multiply(tf.nn.sigmoid_cross_entropy_with_logits(labels=y, logits=y_), self.label_prop)
)
# if self.use_propensity:
# loss = tf.losses.sigmoid_cross_entropy(y, y_, weights=tf.expand_dims(self.label_prop, -1))
# else:
# loss = tf.losses.sigmoid_cross_entropy(y, y_)
return x_emb, tf.sigmoid(y_), loss
| [
"[email protected]"
] | |
9a03dbf9ee4dcfaac6cc5b242193b5047b594d22 | 75e8f932e1e08c7e71380e6b71d85ddd04f052dd | /SDAPythonBasics/list_ex.py | bd1f0706f87e8b3dfd0e0535ddd461107f069623 | [] | no_license | aeciovc/sda_python_ee4 | fe470a253126ad307c651d252f9f9b489da32835 | 9e1e8be675fcafe4a61c354b55b71f53ad2af0fe | refs/heads/master | 2023-08-29T15:17:34.033331 | 2021-10-31T09:41:57 | 2021-10-31T09:41:57 | 365,678,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,278 | py | """
# Ask the user name/phone cel/phone home/age
# Store those in a dictionary
# Save this dictionary to a list
# and Print out as result this:
[
{
'name': 'Aecio',
'phone_cel': '4535345435',
'phone_home': '34234234',
'age': 22
}
]
"""
list_phone_books = []
name = input("Type your name: ")
phone_cel = input("Type your cel phone number:")
phone_home = input("Type your home number:")
age = input("Type your age:")
phone_book = {} # {}
# the syntax to assign a new key and value ( dict_name[KEY] = VALUE )
phone_book['name'] = name # {'name': 'Aecio'}
phone_book['phone_cel'] = phone_cel # {'name': 'Aecio', 'phone_cel': '345435345'}
phone_book['phone_home'] = phone_home # {'name': 'Aecio', 'phone_cel': '345435345', 'phone_home': '53545435'}
phone_book['age'] = int(age) # {'name': 'Aecio', 'phone_cel': '345435345', 'phone_home': '53545435', 'age': 22}
list_phone_books.append(phone_book) # [{'name': 'Aecio', 'phone_cel': '345435345', 'phone_home': '53545435', 'age': 22}]
print(len(list_phone_books)) # 1
print(list_phone_books) # [{'name': 'Aecio', 'phone_cel': '345435345', 'phone_home': '53545435', 'age': 22}]
| [
"[email protected]"
] | |
22fd525a675865729fdcd29d2e652cc86d653d11 | 3e381dc0a265afd955e23c85dce1e79e2b1c5549 | /hs-S1/icicealtigen.py | 38ca41f7d89dcab101bac3f89dbd3ba9d5af8a91 | [] | no_license | serkancam/byfp2-2020-2021 | 3addeb92a3ff5616cd6dbd3ae7b2673e1a1a1a5e | c67206bf5506239d967c3b1ba75f9e08fdbad162 | refs/heads/master | 2023-05-05T04:36:21.525621 | 2021-05-29T11:56:27 | 2021-05-29T11:56:27 | 322,643,962 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 272 | py | import turtle as t
t.Screen().setup(600,400)
t.shape("turtle")
# çözüm buraya yapılacak
for i in range(3):
for k in range(6):
t.forward(50)
t.right(60)
#ileri atla
t.penup()
t.forward(100)
t.pendown()
t.done() | [
"[email protected]"
] | |
e4f19edfd0046a2a97ff217f1889c4ca0143aa5c | 5af5fa981a0a8598b2b031aaf10c6ba6d2f5c28c | /images/views.py | 52a223a2f7a48ace977ebf42c00165721d3e50e9 | [] | no_license | FMularski/image-house | d15b2fe7d0379cd237c5aef3336a0ad2ee5a136c | 968e7c6a566090d0cf25246e506820dd955b34c0 | refs/heads/main | 2023-07-31T18:49:25.705577 | 2021-09-13T18:56:10 | 2021-09-13T18:56:10 | 404,382,258 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,683 | py | from django.shortcuts import render, reverse, redirect
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from django.contrib import messages
from django.core.paginator import Paginator, EmptyPage
from . import forms, models
def filt3r(request, images):
if request.GET.get('category'):
images = images.filter(category__name=request.GET.get('category'))
return images
def sort(request, images):
if request.GET.get('sort-date') == 'asc':
images = images.order_by('created_at')
elif request.GET.get('sort-date') == 'desc':
images = images.order_by('-created_at')
if request.GET.get('sort-views') == 'asc':
images = images.order_by('views')
elif request.GET.get('sort-views') == 'desc':
images = images.order_by('-views')
if request.GET.get('sort-votes') == 'asc':
images = images.order_by('votes')
elif request.GET.get('sort-votes') == 'desc':
images = images.order_by('-votes')
return images
def paginate(request, images, images_per_page):
paginator = Paginator(images, images_per_page)
try:
page = paginator.page(request.GET.get('page', 1))
except EmptyPage:
page = paginator.page(1)
return page
def sign_in(request):
form = forms.SignInForm()
if request.method == 'POST':
form = forms.SignInForm(data=request.POST)
if form.is_valid():
user = form.user_cache
login(request, user)
return redirect(reverse('home', ))
messages.error(request, 'Invalid credentials.')
context = {'form': form}
return render(request, 'images/sign_in.html', context)
def sign_up(request):
form = forms.SignUpForm()
if request.method == 'POST':
form = forms.SignUpForm(request.POST)
if form.is_valid():
form.save()
messages.success(request, f'User \'{form.cleaned_data.get("username")}\' has been signed up.')
return redirect(reverse('sign_in', ))
context = {'form': form}
return render(request, 'images/sign_up.html', context)
@login_required(login_url='sign_in')
def home(request):
images = models.Image.objects.select_related('user', 'category').all()
most_viewed = images.order_by('-views').first()
most_voted = images.order_by('-votes').first()
most_recent = images.order_by('created_at').first()
images = filt3r(request, images)
images = sort(request, images)
images = paginate(request, images, images_per_page=9)
categories = models.Category.objects.all()
context = {'images': images, 'most_viewed': most_viewed,
'most_voted': most_voted, 'most_recent': most_recent,
'categories': categories}
return render(request, 'images/home.html', context)
@login_required(login_url='sign_in')
def my_images(request):
my_images = models.Image.objects.select_related('user', 'category').filter(user=request.user)
my_images = filt3r(request, my_images)
my_images = sort(request, my_images)
my_images = paginate(request, my_images, images_per_page=9)
categories = models.Category.objects.all()
context = {'images': my_images, 'categories': categories}
return render(request, 'images/my_images.html', context)
@login_required(login_url='sign_in')
def delete_img(request, pk):
models.Image.objects.get(pk=pk).delete()
messages.success(request, 'Image has been deleted.')
return redirect(reverse('my_images', ))
@login_required(login_url='sign_in')
def image(request, pk):
image = models.Image.objects.select_related('user', 'category').get(pk=pk)
image.views += 1
image.save()
context = {'image': image}
return render(request, 'images/image.html', context)
@login_required(login_url='sign_in')
def vote(request, pk, vote):
image = models.Image.objects.get(pk=pk)
image.votes += vote if vote else -1
image.save()
return redirect(reverse('image', kwargs={'pk': pk}))
@login_required(login_url='sign_in')
def add_image(request):
form = forms.ImageForm()
if request.method == 'POST':
form = forms.ImageForm(request.POST, request.FILES)
if form.is_valid():
image = form.save(commit=False)
image.user = request.user
image.save()
messages.success(request, 'Image has been added.')
return redirect(reverse('my_images', ))
context = {'form': form}
return render(request, 'images/add.html', context)
def sign_out(request):
logout(request)
return redirect(reverse('sign_in', ))
| [
"[email protected]"
] | |
1db73a5ea084e5e00837a54631bc558892cc61a6 | efe1546fa1f057cbbbe974bd8478309b6176d641 | /waf/playground/gtest/tests/test1/wscript_build | 2af2f0639e6374c08fa8b907bd37213432c364ab | [
"Apache-2.0"
] | permissive | yankee14/reflow-oven-atmega328p | 2df323aba16ac4f3eac446abc633a5d79a1a55cb | e6792143576f13f0a3a49edfd54dbb2ef851d95a | refs/heads/master | 2022-12-02T21:32:39.513878 | 2019-05-30T06:25:12 | 2019-05-30T06:25:12 | 188,760,664 | 0 | 1 | Apache-2.0 | 2022-11-15T18:22:50 | 2019-05-27T02:52:18 | Python | UTF-8 | Python | false | false | 220 | #! /usr/bin/env python
# encoding: utf-8
bld.program(
features = 'test',
source = 'AccumulatorTest.cpp',
target = 'unit_test_program',
use = 'unittestmain useless GTEST',
ut_cwd = bld.path.abspath(),
)
| [
"[email protected]"
] | ||
49e7c471cc04688d4233e45fa2ec38aa03a969bb | 2b167e29ba07e9f577c20c54cb943861d0ccfa69 | /numerical_analysis_backup/large-scale-multiobj2/core-arch5-guard0-beta0-hebbe/pareto310.py | 8c6607739df901db6b14e0cca2e08dcef97dc67e | [] | no_license | LiYan1988/kthOld_OFC | 17aeeed21e195d1a9a3262ec2e67d6b1d3f9ff0f | b1237577ea68ad735a65981bf29584ebd889132b | refs/heads/master | 2021-01-11T17:27:25.574431 | 2017-01-23T05:32:35 | 2017-01-23T05:32:35 | 79,773,237 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,489 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Aug 4 15:15:10 2016
@author: li
optimize both throughput and connections
"""
#import sys
#sys.path.insert(0, '/home/li/Dropbox/KTH/numerical_analysis/ILPs')
import csv
from gurobipy import *
import numpy as np
from arch5_decomposition_new import Arch5_decompose
np.random.seed(2010)
num_cores=10
num_slots=320
i = 10
filename = 'traffic_matrix_pod250_load50_'+str(i)+'.csv'
# print filename
tm = []
with open(filename) as f:
reader = csv.reader(f)
for idx, row in enumerate(reader):
row = [float(u) for u in row]
tm.append(row)
tm = np.array(tm)
#%% arch2
corev = [10, 12]
connection_ub = []
throughput_ub = []
obj_ub = []
connection_lb = []
throughput_lb = []
obj_lb = []
connection_he = []
throughput_he = []
obj_he = []
for c in corev:
m = Arch5_decompose(tm, num_slots=num_slots, num_cores=c,
alpha=1,beta=0)
m.create_model_routing(mipfocus=1,timelimit=36000,mipgap=0.01, method=3,
threads=20)
connection_ub.append(m.connection_ub_)
throughput_ub.append(m.throughput_ub_)
obj_ub.append(m.obj_ub_)
np.save('core_usagex_i%d_c%d.npy'%(i,c), m.core_usagex)
# m.create_model_sa(mipfocus=1,timelimit=26000,mipgap=0.01, method=2,
# SubMIPNodes=2000, heuristics=0.8, threads=4, presolve=2)
# connection_lb.append(m.connection_lb_)
# throughput_lb.append(m.throughput_lb_)
# obj_lb.append(m.obj_lb_)
# m.write_result_csv('cnklist_lb_%d_%d.csv'%(i,c), m.cnklist_lb)
connection_lb.append(0)
throughput_lb.append(0)
obj_lb.append(0)
# m.heuristic()
# connection_he.append(m.obj_heuristic_connection_)
# throughput_he.append(m.obj_heuristic_throughput_)
# obj_he.append(m.obj_heuristic_)
# m.write_result_csv('cnklist_heuristic_%d_%d.csv'%(i,c),
# m.cnklist_heuristic_)
connection_he.append(0)
throughput_he.append(0)
obj_he.append(0)
result = np.array([corev,
connection_ub,throughput_ub,obj_ub,
connection_lb,throughput_lb,obj_lb,
connection_he,throughput_he,obj_he]).T
file_name = "result_pareto_arch5_old_3_{}.csv".format(i)
with open(file_name, 'w') as f:
writer = csv.writer(f, delimiter=',')
writer.writerow(['beta', 'connection_ub', 'throughput_ub',
'obj_ub', 'connection_lb', 'throughput_lb', 'obj_lb',
'connection_he', 'throughput_he', 'obj_he'])
writer.writerows(result)
| [
"[email protected]"
] | |
830461b71b4a998b1b41c369276838c5086a614f | 457db67d845d47bf9f65d8c4ae8c781fd9c9c74c | /Peaks_detection.py | 943e20ab6af6c11f234caa76ffdd3656a25adf42 | [] | no_license | shanonentropy/DiamondNVDataProcessing | f5b0d3607a6c9d350f22b970d598c030125396e1 | 0c7b72a879f554e8beaf8b846028587ef66aed36 | refs/heads/master | 2021-08-16T14:39:17.239834 | 2017-11-20T01:41:06 | 2017-11-20T01:41:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,499 | py | import sys
from numpy import NaN, Inf, arange, isscalar, asarray, array
def peakdet(v, delta, x = None):
"""
Converted from MATLAB script at http://billauer.co.il/peakdet.html
Returns two arrays
function [maxtab, mintab]=peakdet(v, delta, x)
%PEAKDET Detect peaks in a vector
% [MAXTAB, MINTAB] = PEAKDET(V, DELTA) finds the local
% maxima and minima ("peaks") in the vector V.
% MAXTAB and MINTAB consists of two columns. Column 1
% contains indices in V, and column 2 the found values.
%
% With [MAXTAB, MINTAB] = PEAKDET(V, DELTA, X) the indices
% in MAXTAB and MINTAB are replaced with the corresponding
% X-values.
%
% A point is considered a maximum peak if it has the maximal
% value, and was preceded (to the left) by a value lower by
% DELTA.
% Eli Billauer, 3.4.05 (Explicitly not copyrighted).
% This function is released to the public domain; Any use is allowed.
"""
maxtab = []
mintab = []
if x is None:
x = arange(len(v))
v = asarray(v)
if len(v) != len(x):
sys.exit('Input vectors v and x must have same length')
if not isscalar(delta):
sys.exit('Input argument delta must be a scalar')
if delta <= 0:
sys.exit('Input argument delta must be positive')
mn, mx = Inf, -Inf
mnpos, mxpos = NaN, NaN
lookformax = True
for i in arange(len(v)):
this = v[i]
if this > mx:
mx = this
mxpos = x[i]
if this < mn:
mn = this
mnpos = x[i]
if lookformax:
if this < mx-delta:
maxtab.append((mxpos, mx))
mn = this
mnpos = x[i]
lookformax = False
else:
if this > mn+delta:
mintab.append((mnpos, mn))
mx = this
mxpos = x[i]
lookformax = True
return array(maxtab), array(mintab)
if __name__=="__main__":
from matplotlib.pyplot import plot, scatter, show
series = [0,0,0,2,0,0,0,-2,0,0,0,2,0,0,0,-2,0]
maxtab, mintab = peakdet(series,.3)
plot(series)
scatter(array(maxtab)[:,0], array(maxtab)[:,1], color='blue')
scatter(array(mintab)[:,0], array(mintab)[:,1], color='red')
show()
print array(maxtab)[0]
print series[3] | [
"[email protected]"
] | |
2eb8dd6bf1f6b9ee2584d0c6a4e59d9d43d6ebcd | 3cdb4faf34d8375d6aee08bcc523adadcb0c46e2 | /web/env/lib/python3.6/site-packages/django/db/backends/base/schema.py | 82d6fafc75274b14d721dcbfb9e311635ae4987e | [
"MIT",
"GPL-3.0-only"
] | permissive | rizwansoaib/face-attendence | bc185d4de627ce5adab1cda7da466cb7a5fddcbe | 59300441b52d32f3ecb5095085ef9d448aef63af | refs/heads/master | 2020-04-25T23:47:47.303642 | 2019-09-12T14:26:17 | 2019-09-12T14:26:17 | 173,157,284 | 45 | 12 | MIT | 2020-02-11T23:47:55 | 2019-02-28T17:33:14 | Python | UTF-8 | Python | false | false | 50,348 | py | import hashlib
import logging
from datetime import datetime
from django.db.backends.ddl_references import (
Columns, ForeignKeyName, IndexName, Statement, Table,
)
from django.db.backends.utils import split_identifier
from django.db.models import Index
from django.db.transaction import TransactionManagementError, atomic
from django.utils import timezone
from django.utils.encoding import force_bytes
logger = logging.getLogger('django.db.backends.schema')
def _is_relevant_relation(relation, altered_field):
"""
When altering the given field, must constraints on its model from the given
relation be temporarily dropped?
"""
field = relation.field
if field.many_to_many:
# M2M reverse field
return False
if altered_field.primary_key and field.to_fields == [None]:
# Foreign key constraint on the primary key, which is being altered.
return True
# Is the constraint targeting the field being altered?
return altered_field.name in field.to_fields
def _related_non_m2m_objects(old_field, new_field):
# Filter out m2m objects from reverse relations.
# Return (old_relation, new_relation) tuples.
return zip(
(obj for obj in old_field.model._meta.related_objects if _is_relevant_relation(obj, old_field)),
(obj for obj in new_field.model._meta.related_objects if _is_relevant_relation(obj, new_field))
)
class BaseDatabaseSchemaEditor:
"""
This class and its subclasses are responsible for emitting schema-changing
statements to the databases - model creation/removal/alteration, field
renaming, index fiddling, and so on.
"""
# Overrideable SQL templates
sql_create_table = "CREATE TABLE %(table)s (%(definition)s)"
sql_rename_table = "ALTER TABLE %(old_table)s RENAME TO %(new_table)s"
sql_retablespace_table = "ALTER TABLE %(table)s SET TABLESPACE %(new_tablespace)s"
sql_delete_table = "DROP TABLE %(table)s CASCADE"
sql_create_column = "ALTER TABLE %(table)s ADD COLUMN %(column)s %(definition)s"
sql_alter_column = "ALTER TABLE %(table)s %(changes)s"
sql_alter_column_type = "ALTER COLUMN %(column)s TYPE %(type)s"
sql_alter_column_null = "ALTER COLUMN %(column)s DROP NOT NULL"
sql_alter_column_not_null = "ALTER COLUMN %(column)s SET NOT NULL"
sql_alter_column_default = "ALTER COLUMN %(column)s SET DEFAULT %(default)s"
sql_alter_column_no_default = "ALTER COLUMN %(column)s DROP DEFAULT"
sql_delete_column = "ALTER TABLE %(table)s DROP COLUMN %(column)s CASCADE"
sql_rename_column = "ALTER TABLE %(table)s RENAME COLUMN %(old_column)s TO %(new_column)s"
sql_update_with_default = "UPDATE %(table)s SET %(column)s = %(default)s WHERE %(column)s IS NULL"
sql_create_check = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s CHECK (%(check)s)"
sql_delete_check = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_create_unique = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s UNIQUE (%(columns)s)"
sql_delete_unique = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_create_fk = (
"ALTER TABLE %(table)s ADD CONSTRAINT %(name)s FOREIGN KEY (%(column)s) "
"REFERENCES %(to_table)s (%(to_column)s)%(deferrable)s"
)
sql_create_inline_fk = None
sql_delete_fk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_create_index = "CREATE INDEX %(name)s ON %(table)s (%(columns)s)%(extra)s"
sql_delete_index = "DROP INDEX %(name)s"
sql_create_pk = "ALTER TABLE %(table)s ADD CONSTRAINT %(name)s PRIMARY KEY (%(columns)s)"
sql_delete_pk = "ALTER TABLE %(table)s DROP CONSTRAINT %(name)s"
sql_delete_procedure = 'DROP PROCEDURE %(procedure)s'
def __init__(self, connection, collect_sql=False, atomic=True):
self.connection = connection
self.collect_sql = collect_sql
if self.collect_sql:
self.collected_sql = []
self.atomic_migration = self.connection.features.can_rollback_ddl and atomic
# State-managing methods
def __enter__(self):
self.deferred_sql = []
if self.atomic_migration:
self.atomic = atomic(self.connection.alias)
self.atomic.__enter__()
return self
def __exit__(self, exc_type, exc_value, traceback):
if exc_type is None:
for sql in self.deferred_sql:
self.execute(sql)
if self.atomic_migration:
self.atomic.__exit__(exc_type, exc_value, traceback)
# Core utility functions
def execute(self, sql, params=()):
"""Execute the given SQL statement, with optional parameters."""
# Don't perform the transactional DDL check if SQL is being collected
# as it's not going to be executed anyway.
if not self.collect_sql and self.connection.in_atomic_block and not self.connection.features.can_rollback_ddl:
raise TransactionManagementError(
"Executing DDL statements while in a transaction on databases "
"that can't perform a rollback is prohibited."
)
# Account for non-string statement objects.
sql = str(sql)
# Log the command we're running, then run it
logger.debug("%s; (params %r)", sql, params, extra={'params': params, 'sql': sql})
if self.collect_sql:
ending = "" if sql.endswith(";") else ";"
if params is not None:
self.collected_sql.append((sql % tuple(map(self.quote_value, params))) + ending)
else:
self.collected_sql.append(sql + ending)
else:
with self.connection.cursor() as cursor:
cursor.execute(sql, params)
def quote_name(self, name):
return self.connection.ops.quote_name(name)
@classmethod
def _digest(cls, *args):
"""
Generate a 32-bit digest of a set of arguments that can be used to
shorten identifying names.
"""
h = hashlib.md5()
for arg in args:
h.update(force_bytes(arg))
return h.hexdigest()[:8]
# Field <-> database mapping functions
def column_sql(self, model, field, include_default=False):
"""
Take a field and return its column definition.
The field must already have had set_attributes_from_name() called.
"""
# Get the column's type and use that as the basis of the SQL
db_params = field.db_parameters(connection=self.connection)
sql = db_params['type']
params = []
# Check for fields that aren't actually columns (e.g. M2M)
if sql is None:
return None, None
# Work out nullability
null = field.null
# If we were told to include a default value, do so
include_default = include_default and not self.skip_default(field)
if include_default:
default_value = self.effective_default(field)
if default_value is not None:
if self.connection.features.requires_literal_defaults:
# Some databases can't take defaults as a parameter (oracle)
# If this is the case, the individual schema backend should
# implement prepare_default
sql += " DEFAULT %s" % self.prepare_default(default_value)
else:
sql += " DEFAULT %s"
params += [default_value]
# Oracle treats the empty string ('') as null, so coerce the null
# option whenever '' is a possible value.
if (field.empty_strings_allowed and not field.primary_key and
self.connection.features.interprets_empty_strings_as_nulls):
null = True
if null and not self.connection.features.implied_column_null:
sql += " NULL"
elif not null:
sql += " NOT NULL"
# Primary key/unique outputs
if field.primary_key:
sql += " PRIMARY KEY"
elif field.unique:
sql += " UNIQUE"
# Optionally add the tablespace if it's an implicitly indexed column
tablespace = field.db_tablespace or model._meta.db_tablespace
if tablespace and self.connection.features.supports_tablespaces and field.unique:
sql += " %s" % self.connection.ops.tablespace_sql(tablespace, inline=True)
# Return the sql
return sql, params
def skip_default(self, field):
"""
Some backends don't accept default values for certain columns types
(i.e. MySQL longtext and longblob).
"""
return False
def prepare_default(self, value):
"""
Only used for backends which have requires_literal_defaults feature
"""
raise NotImplementedError(
'subclasses of BaseDatabaseSchemaEditor for backends which have '
'requires_literal_defaults must provide a prepare_default() method'
)
def effective_default(self, field):
"""Return a field's effective database default value."""
if field.has_default():
default = field.get_default()
elif not field.null and field.blank and field.empty_strings_allowed:
if field.get_internal_type() == "BinaryField":
default = bytes()
else:
default = str()
elif getattr(field, 'auto_now', False) or getattr(field, 'auto_now_add', False):
default = datetime.now()
internal_type = field.get_internal_type()
if internal_type == 'DateField':
default = default.date
elif internal_type == 'TimeField':
default = default.time
elif internal_type == 'DateTimeField':
default = timezone.now
else:
default = None
# If it's a callable, call it
if callable(default):
default = default()
# Convert the value so it can be sent to the database.
return field.get_db_prep_save(default, self.connection)
def quote_value(self, value):
"""
Return a quoted version of the value so it's safe to use in an SQL
string. This is not safe against injection from user code; it is
intended only for use in making SQL scripts or preparing default values
for particularly tricky backends (defaults are not user-defined, though,
so this is safe).
"""
raise NotImplementedError()
# Actions
def create_model(self, model):
"""
Create a table and any accompanying indexes or unique constraints for
the given `model`.
"""
# Create column SQL, add FK deferreds if needed
column_sqls = []
params = []
for field in model._meta.local_fields:
# SQL
definition, extra_params = self.column_sql(model, field)
if definition is None:
continue
# Check constraints can go on the column SQL here
db_params = field.db_parameters(connection=self.connection)
if db_params['check']:
definition += " CHECK (%s)" % db_params['check']
# Autoincrement SQL (for backends with inline variant)
col_type_suffix = field.db_type_suffix(connection=self.connection)
if col_type_suffix:
definition += " %s" % col_type_suffix
params.extend(extra_params)
# FK
if field.remote_field and field.db_constraint:
to_table = field.remote_field.model._meta.db_table
to_column = field.remote_field.model._meta.get_field(field.remote_field.field_name).column
if self.sql_create_inline_fk:
definition += " " + self.sql_create_inline_fk % {
"to_table": self.quote_name(to_table),
"to_column": self.quote_name(to_column),
}
elif self.connection.features.supports_foreign_keys:
self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s"))
# Add the SQL to our big list
column_sqls.append("%s %s" % (
self.quote_name(field.column),
definition,
))
# Autoincrement SQL (for backends with post table definition variant)
if field.get_internal_type() in ("AutoField", "BigAutoField"):
autoinc_sql = self.connection.ops.autoinc_sql(model._meta.db_table, field.column)
if autoinc_sql:
self.deferred_sql.extend(autoinc_sql)
# Add any unique_togethers (always deferred, as some fields might be
# created afterwards, like geometry fields with some backends)
for fields in model._meta.unique_together:
columns = [model._meta.get_field(field).column for field in fields]
self.deferred_sql.append(self._create_unique_sql(model, columns))
# Make the table
sql = self.sql_create_table % {
"table": self.quote_name(model._meta.db_table),
"definition": ", ".join(column_sqls)
}
if model._meta.db_tablespace:
tablespace_sql = self.connection.ops.tablespace_sql(model._meta.db_tablespace)
if tablespace_sql:
sql += ' ' + tablespace_sql
# Prevent using [] as params, in the case a literal '%' is used in the definition
self.execute(sql, params or None)
# Add any field index and index_together's (deferred as SQLite3 _remake_table needs it)
self.deferred_sql.extend(self._model_indexes_sql(model))
# Make M2M tables
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.create_model(field.remote_field.through)
def delete_model(self, model):
"""Delete a model from the database."""
# Handle auto-created intermediary models
for field in model._meta.local_many_to_many:
if field.remote_field.through._meta.auto_created:
self.delete_model(field.remote_field.through)
# Delete the table
self.execute(self.sql_delete_table % {
"table": self.quote_name(model._meta.db_table),
})
# Remove all deferred statements referencing the deleted table.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_table(model._meta.db_table):
self.deferred_sql.remove(sql)
def add_index(self, model, index):
"""Add an index on a model."""
self.execute(index.create_sql(model, self))
def remove_index(self, model, index):
"""Remove an index from a model."""
self.execute(index.remove_sql(model, self))
def alter_unique_together(self, model, old_unique_together, new_unique_together):
"""
Deal with a model changing its unique_together. The input
unique_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
"""
olds = {tuple(fields) for fields in old_unique_together}
news = {tuple(fields) for fields in new_unique_together}
# Deleted uniques
for fields in olds.difference(news):
self._delete_composed_index(model, fields, {'unique': True}, self.sql_delete_unique)
# Created uniques
for fields in news.difference(olds):
columns = [model._meta.get_field(field).column for field in fields]
self.execute(self._create_unique_sql(model, columns))
def alter_index_together(self, model, old_index_together, new_index_together):
"""
Deal with a model changing its index_together. The input
index_togethers must be doubly-nested, not the single-nested
["foo", "bar"] format.
"""
olds = {tuple(fields) for fields in old_index_together}
news = {tuple(fields) for fields in new_index_together}
# Deleted indexes
for fields in olds.difference(news):
self._delete_composed_index(model, fields, {'index': True}, self.sql_delete_index)
# Created indexes
for field_names in news.difference(olds):
fields = [model._meta.get_field(field) for field in field_names]
self.execute(self._create_index_sql(model, fields, suffix="_idx"))
def _delete_composed_index(self, model, fields, constraint_kwargs, sql):
columns = [model._meta.get_field(field).column for field in fields]
constraint_names = self._constraint_names(model, columns, **constraint_kwargs)
if len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of constraints for %s(%s)" % (
len(constraint_names),
model._meta.db_table,
", ".join(columns),
))
self.execute(self._delete_constraint_sql(sql, model, constraint_names[0]))
def alter_db_table(self, model, old_db_table, new_db_table):
"""Rename the table a model points to."""
if (old_db_table == new_db_table or
(self.connection.features.ignores_table_name_case and
old_db_table.lower() == new_db_table.lower())):
return
self.execute(self.sql_rename_table % {
"old_table": self.quote_name(old_db_table),
"new_table": self.quote_name(new_db_table),
})
# Rename all references to the old table name.
for sql in self.deferred_sql:
if isinstance(sql, Statement):
sql.rename_table_references(old_db_table, new_db_table)
def alter_db_tablespace(self, model, old_db_tablespace, new_db_tablespace):
"""Move a model's table between tablespaces."""
self.execute(self.sql_retablespace_table % {
"table": self.quote_name(model._meta.db_table),
"old_tablespace": self.quote_name(old_db_tablespace),
"new_tablespace": self.quote_name(new_db_tablespace),
})
def add_field(self, model, field):
"""
Create a field on a model. Usually involves adding a column, but may
involve adding a table instead (for M2M fields).
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.create_model(field.remote_field.through)
# Get the column's definition
definition, params = self.column_sql(model, field, include_default=True)
# It might not actually have a column behind it
if definition is None:
return
# Check constraints can go on the column SQL here
db_params = field.db_parameters(connection=self.connection)
if db_params['check']:
definition += " CHECK (%s)" % db_params['check']
# Build the SQL and run it
sql = self.sql_create_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
"definition": definition,
}
self.execute(sql, params)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if not self.skip_default(field) and self.effective_default(field) is not None:
changes_sql, params = self._alter_column_default_sql(model, None, field, drop=True)
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": changes_sql,
}
self.execute(sql, params)
# Add an index, if required
self.deferred_sql.extend(self._field_indexes_sql(model, field))
# Add any FK constraints later
if field.remote_field and self.connection.features.supports_foreign_keys and field.db_constraint:
self.deferred_sql.append(self._create_fk_sql(model, field, "_fk_%(to_table)s_%(to_column)s"))
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def remove_field(self, model, field):
"""
Remove a field from a model. Usually involves deleting a column,
but for M2Ms may involve deleting a table.
"""
# Special-case implicit M2M tables
if field.many_to_many and field.remote_field.through._meta.auto_created:
return self.delete_model(field.remote_field.through)
# It might not actually have a column behind it
if field.db_parameters(connection=self.connection)['type'] is None:
return
# Drop any FK constraints, MySQL requires explicit deletion
if field.remote_field:
fk_names = self._constraint_names(model, [field.column], foreign_key=True)
for fk_name in fk_names:
self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name))
# Delete the column
sql = self.sql_delete_column % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(field.column),
}
self.execute(sql)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
# Remove all deferred statements referencing the deleted column.
for sql in list(self.deferred_sql):
if isinstance(sql, Statement) and sql.references_column(model._meta.db_table, field.column):
self.deferred_sql.remove(sql)
def alter_field(self, model, old_field, new_field, strict=False):
"""
Allow a field's type, uniqueness, nullability, default, column,
constraints, etc. to be modified.
`old_field` is required to compute the necessary changes.
If `strict` is True, raise errors if the old column does not match
`old_field` precisely.
"""
# Ensure this field is even column-based
old_db_params = old_field.db_parameters(connection=self.connection)
old_type = old_db_params['type']
new_db_params = new_field.db_parameters(connection=self.connection)
new_type = new_db_params['type']
if ((old_type is None and old_field.remote_field is None) or
(new_type is None and new_field.remote_field is None)):
raise ValueError(
"Cannot alter field %s into %s - they do not properly define "
"db_type (are you using a badly-written custom field?)" %
(old_field, new_field),
)
elif old_type is None and new_type is None and (
old_field.remote_field.through and new_field.remote_field.through and
old_field.remote_field.through._meta.auto_created and
new_field.remote_field.through._meta.auto_created):
return self._alter_many_to_many(model, old_field, new_field, strict)
elif old_type is None and new_type is None and (
old_field.remote_field.through and new_field.remote_field.through and
not old_field.remote_field.through._meta.auto_created and
not new_field.remote_field.through._meta.auto_created):
# Both sides have through models; this is a no-op.
return
elif old_type is None or new_type is None:
raise ValueError(
"Cannot alter field %s into %s - they are not compatible types "
"(you cannot alter to or from M2M fields, or add or remove "
"through= on M2M fields)" % (old_field, new_field)
)
self._alter_field(model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict)
def _alter_field(self, model, old_field, new_field, old_type, new_type,
old_db_params, new_db_params, strict=False):
"""Perform a "physical" (non-ManyToMany) field update."""
# Drop any FK constraints, we'll remake them later
fks_dropped = set()
if old_field.remote_field and old_field.db_constraint:
fk_names = self._constraint_names(model, [old_field.column], foreign_key=True)
if strict and len(fk_names) != 1:
raise ValueError("Found wrong number (%s) of foreign key constraints for %s.%s" % (
len(fk_names),
model._meta.db_table,
old_field.column,
))
for fk_name in fk_names:
fks_dropped.add((old_field.column,))
self.execute(self._delete_constraint_sql(self.sql_delete_fk, model, fk_name))
# Has unique been removed?
if old_field.unique and (not new_field.unique or self._field_became_primary_key(old_field, new_field)):
# Find the unique constraint for this field
constraint_names = self._constraint_names(model, [old_field.column], unique=True, primary_key=False)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of unique constraints for %s.%s" % (
len(constraint_names),
model._meta.db_table,
old_field.column,
))
for constraint_name in constraint_names:
self.execute(self._delete_constraint_sql(self.sql_delete_unique, model, constraint_name))
# Drop incoming FK constraints if the field is a primary key or unique,
# which might be a to_field target, and things are going to change.
drop_foreign_keys = (
(
(old_field.primary_key and new_field.primary_key) or
(old_field.unique and new_field.unique)
) and old_type != new_type
)
if drop_foreign_keys:
# '_meta.related_field' also contains M2M reverse fields, these
# will be filtered out
for _old_rel, new_rel in _related_non_m2m_objects(old_field, new_field):
rel_fk_names = self._constraint_names(
new_rel.related_model, [new_rel.field.column], foreign_key=True
)
for fk_name in rel_fk_names:
self.execute(self._delete_constraint_sql(self.sql_delete_fk, new_rel.related_model, fk_name))
# Removed an index? (no strict check, as multiple indexes are possible)
# Remove indexes if db_index switched to False or a unique constraint
# will now be used in lieu of an index. The following lines from the
# truth table show all True cases; the rest are False:
#
# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
# ------------------------------------------------------------------------------
# True | False | False | False
# True | False | False | True
# True | False | True | True
if old_field.db_index and not old_field.unique and (not new_field.db_index or new_field.unique):
# Find the index for this field
meta_index_names = {index.name for index in model._meta.indexes}
# Retrieve only BTREE indexes since this is what's created with
# db_index=True.
index_names = self._constraint_names(model, [old_field.column], index=True, type_=Index.suffix)
for index_name in index_names:
if index_name not in meta_index_names:
# The only way to check if an index was created with
# db_index=True or with Index(['field'], name='foo')
# is to look at its name (refs #28053).
self.execute(self._delete_constraint_sql(self.sql_delete_index, model, index_name))
# Change check constraints?
if old_db_params['check'] != new_db_params['check'] and old_db_params['check']:
constraint_names = self._constraint_names(model, [old_field.column], check=True)
if strict and len(constraint_names) != 1:
raise ValueError("Found wrong number (%s) of check constraints for %s.%s" % (
len(constraint_names),
model._meta.db_table,
old_field.column,
))
for constraint_name in constraint_names:
self.execute(self._delete_constraint_sql(self.sql_delete_check, model, constraint_name))
# Have they renamed the column?
if old_field.column != new_field.column:
self.execute(self._rename_field_sql(model._meta.db_table, old_field, new_field, new_type))
# Rename all references to the renamed column.
for sql in self.deferred_sql:
if isinstance(sql, Statement):
sql.rename_column_references(model._meta.db_table, old_field.column, new_field.column)
# Next, start accumulating actions to do
actions = []
null_actions = []
post_actions = []
# Type change?
if old_type != new_type:
fragment, other_actions = self._alter_column_type_sql(model, old_field, new_field, new_type)
actions.append(fragment)
post_actions.extend(other_actions)
# When changing a column NULL constraint to NOT NULL with a given
# default value, we need to perform 4 steps:
# 1. Add a default for new incoming writes
# 2. Update existing NULL rows with new default
# 3. Replace NULL constraint with NOT NULL
# 4. Drop the default again.
# Default change?
old_default = self.effective_default(old_field)
new_default = self.effective_default(new_field)
needs_database_default = (
old_field.null and
not new_field.null and
old_default != new_default and
new_default is not None and
not self.skip_default(new_field)
)
if needs_database_default:
actions.append(self._alter_column_default_sql(model, old_field, new_field))
# Nullability change?
if old_field.null != new_field.null:
fragment = self._alter_column_null_sql(model, old_field, new_field)
if fragment:
null_actions.append(fragment)
# Only if we have a default and there is a change from NULL to NOT NULL
four_way_default_alteration = (
new_field.has_default() and
(old_field.null and not new_field.null)
)
if actions or null_actions:
if not four_way_default_alteration:
# If we don't have to do a 4-way default alteration we can
# directly run a (NOT) NULL alteration
actions = actions + null_actions
# Combine actions together if we can (e.g. postgres)
if self.connection.features.supports_combined_alters and actions:
sql, params = tuple(zip(*actions))
actions = [(", ".join(sql), sum(params, []))]
# Apply those actions
for sql, params in actions:
self.execute(
self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if four_way_default_alteration:
# Update existing rows with default value
self.execute(
self.sql_update_with_default % {
"table": self.quote_name(model._meta.db_table),
"column": self.quote_name(new_field.column),
"default": "%s",
},
[new_default],
)
# Since we didn't run a NOT NULL change before we need to do it
# now
for sql, params in null_actions:
self.execute(
self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": sql,
},
params,
)
if post_actions:
for sql, params in post_actions:
self.execute(sql, params)
# If primary_key changed to False, delete the primary key constraint.
if old_field.primary_key and not new_field.primary_key:
self._delete_primary_key(model, strict)
# Added a unique?
if self._unique_should_be_added(old_field, new_field):
self.execute(self._create_unique_sql(model, [new_field.column]))
# Added an index? Add an index if db_index switched to True or a unique
# constraint will no longer be used in lieu of an index. The following
# lines from the truth table show all True cases; the rest are False:
#
# old_field.db_index | old_field.unique | new_field.db_index | new_field.unique
# ------------------------------------------------------------------------------
# False | False | True | False
# False | True | True | False
# True | True | True | False
if (not old_field.db_index or old_field.unique) and new_field.db_index and not new_field.unique:
self.execute(self._create_index_sql(model, [new_field]))
# Type alteration on primary key? Then we need to alter the column
# referring to us.
rels_to_update = []
if old_field.primary_key and new_field.primary_key and old_type != new_type:
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Changed to become primary key?
if self._field_became_primary_key(old_field, new_field):
# Make the new one
self.execute(
self.sql_create_pk % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(
self._create_index_name(model._meta.db_table, [new_field.column], suffix="_pk")
),
"columns": self.quote_name(new_field.column),
}
)
# Update all referencing columns
rels_to_update.extend(_related_non_m2m_objects(old_field, new_field))
# Handle our type alters on the other end of rels from the PK stuff above
for old_rel, new_rel in rels_to_update:
rel_db_params = new_rel.field.db_parameters(connection=self.connection)
rel_type = rel_db_params['type']
fragment, other_actions = self._alter_column_type_sql(
new_rel.related_model, old_rel.field, new_rel.field, rel_type
)
self.execute(
self.sql_alter_column % {
"table": self.quote_name(new_rel.related_model._meta.db_table),
"changes": fragment[0],
},
fragment[1],
)
for sql, params in other_actions:
self.execute(sql, params)
# Does it have a foreign key?
if (new_field.remote_field and
(fks_dropped or not old_field.remote_field or not old_field.db_constraint) and
new_field.db_constraint):
self.execute(self._create_fk_sql(model, new_field, "_fk_%(to_table)s_%(to_column)s"))
# Rebuild FKs that pointed to us if we previously had to drop them
if drop_foreign_keys:
for rel in new_field.model._meta.related_objects:
if _is_relevant_relation(rel, new_field) and rel.field.db_constraint:
self.execute(self._create_fk_sql(rel.related_model, rel.field, "_fk"))
# Does it have check constraints we need to add?
if old_db_params['check'] != new_db_params['check'] and new_db_params['check']:
self.execute(
self.sql_create_check % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(
self._create_index_name(model._meta.db_table, [new_field.column], suffix="_check")
),
"column": self.quote_name(new_field.column),
"check": new_db_params['check'],
}
)
# Drop the default if we need to
# (Django usually does not use in-database defaults)
if needs_database_default:
changes_sql, params = self._alter_column_default_sql(model, old_field, new_field, drop=True)
sql = self.sql_alter_column % {
"table": self.quote_name(model._meta.db_table),
"changes": changes_sql,
}
self.execute(sql, params)
# Reset connection if required
if self.connection.features.connection_persists_old_columns:
self.connection.close()
def _alter_column_null_sql(self, model, old_field, new_field):
"""
Hook to specialize column null alteration.
Return a (sql, params) fragment to set a column to null or non-null
as required by new_field, or None if no changes are required.
"""
if (self.connection.features.interprets_empty_strings_as_nulls and
new_field.get_internal_type() in ("CharField", "TextField")):
# The field is nullable in the database anyway, leave it alone.
return
else:
new_db_params = new_field.db_parameters(connection=self.connection)
sql = self.sql_alter_column_null if new_field.null else self.sql_alter_column_not_null
return (
sql % {
'column': self.quote_name(new_field.column),
'type': new_db_params['type'],
},
[],
)
def _alter_column_default_sql(self, model, old_field, new_field, drop=False):
"""
Hook to specialize column default alteration.
Return a (sql, params) fragment to add or drop (depending on the drop
argument) a default to new_field's column.
"""
new_default = self.effective_default(new_field)
default = '%s'
params = [new_default]
if drop:
params = []
elif self.connection.features.requires_literal_defaults:
# Some databases (Oracle) can't take defaults as a parameter
# If this is the case, the SchemaEditor for that database should
# implement prepare_default().
default = self.prepare_default(new_default)
params = []
new_db_params = new_field.db_parameters(connection=self.connection)
sql = self.sql_alter_column_no_default if drop else self.sql_alter_column_default
return (
sql % {
'column': self.quote_name(new_field.column),
'type': new_db_params['type'],
'default': default,
},
params,
)
def _alter_column_type_sql(self, model, old_field, new_field, new_type):
"""
Hook to specialize column type alteration for different backends,
for cases when a creation type is different to an alteration type
(e.g. SERIAL in PostgreSQL, PostGIS fields).
Return a two-tuple of: an SQL fragment of (sql, params) to insert into
an ALTER TABLE statement and a list of extra (sql, params) tuples to
run once the field is altered.
"""
return (
(
self.sql_alter_column_type % {
"column": self.quote_name(new_field.column),
"type": new_type,
},
[],
),
[],
)
def _alter_many_to_many(self, model, old_field, new_field, strict):
"""Alter M2Ms to repoint their to= endpoints."""
# Rename the through table
if old_field.remote_field.through._meta.db_table != new_field.remote_field.through._meta.db_table:
self.alter_db_table(old_field.remote_field.through, old_field.remote_field.through._meta.db_table,
new_field.remote_field.through._meta.db_table)
# Repoint the FK to the other side
self.alter_field(
new_field.remote_field.through,
# We need the field that points to the target model, so we can tell alter_field to change it -
# this is m2m_reverse_field_name() (as opposed to m2m_field_name, which points to our model)
old_field.remote_field.through._meta.get_field(old_field.m2m_reverse_field_name()),
new_field.remote_field.through._meta.get_field(new_field.m2m_reverse_field_name()),
)
self.alter_field(
new_field.remote_field.through,
# for self-referential models we need to alter field from the other end too
old_field.remote_field.through._meta.get_field(old_field.m2m_field_name()),
new_field.remote_field.through._meta.get_field(new_field.m2m_field_name()),
)
def _create_index_name(self, table_name, column_names, suffix=""):
"""
Generate a unique name for an index/unique constraint.
The name is divided into 3 parts: the table name, the column names,
and a unique digest and suffix.
"""
_, table_name = split_identifier(table_name)
hash_suffix_part = '%s%s' % (self._digest(table_name, *column_names), suffix)
max_length = self.connection.ops.max_name_length() or 200
# If everything fits into max_length, use that name.
index_name = '%s_%s_%s' % (table_name, '_'.join(column_names), hash_suffix_part)
if len(index_name) <= max_length:
return index_name
# Shorten a long suffix.
if len(hash_suffix_part) > max_length / 3:
hash_suffix_part = hash_suffix_part[:max_length // 3]
other_length = (max_length - len(hash_suffix_part)) // 2 - 1
index_name = '%s_%s_%s' % (
table_name[:other_length],
'_'.join(column_names)[:other_length],
hash_suffix_part,
)
# Prepend D if needed to prevent the name from starting with an
# underscore or a number (not permitted on Oracle).
if index_name[0] == "_" or index_name[0].isdigit():
index_name = "D%s" % index_name[:-1]
return index_name
def _get_index_tablespace_sql(self, model, fields, db_tablespace=None):
if db_tablespace is None:
if len(fields) == 1 and fields[0].db_tablespace:
db_tablespace = fields[0].db_tablespace
elif model._meta.db_tablespace:
db_tablespace = model._meta.db_tablespace
if db_tablespace is not None:
return ' ' + self.connection.ops.tablespace_sql(db_tablespace)
return ''
def _create_index_sql(self, model, fields, *, name=None, suffix='', using='',
db_tablespace=None, col_suffixes=(), sql=None):
"""
Return the SQL statement to create the index for one or several fields.
`sql` can be specified if the syntax differs from the standard (GIS
indexes, ...).
"""
tablespace_sql = self._get_index_tablespace_sql(model, fields, db_tablespace=db_tablespace)
columns = [field.column for field in fields]
sql_create_index = sql or self.sql_create_index
table = model._meta.db_table
def create_index_name(*args, **kwargs):
nonlocal name
if name is None:
name = self._create_index_name(*args, **kwargs)
return self.quote_name(name)
return Statement(
sql_create_index,
table=Table(table, self.quote_name),
name=IndexName(table, columns, suffix, create_index_name),
using=using,
columns=Columns(table, columns, self.quote_name, col_suffixes=col_suffixes),
extra=tablespace_sql,
)
def _model_indexes_sql(self, model):
"""
Return a list of all index SQL statements (field indexes,
index_together, Meta.indexes) for the specified model.
"""
if not model._meta.managed or model._meta.proxy or model._meta.swapped:
return []
output = []
for field in model._meta.local_fields:
output.extend(self._field_indexes_sql(model, field))
for field_names in model._meta.index_together:
fields = [model._meta.get_field(field) for field in field_names]
output.append(self._create_index_sql(model, fields, suffix="_idx"))
for index in model._meta.indexes:
output.append(index.create_sql(model, self))
return output
def _field_indexes_sql(self, model, field):
"""
Return a list of all index SQL statements for the specified field.
"""
output = []
if self._field_should_be_indexed(model, field):
output.append(self._create_index_sql(model, [field]))
return output
def _field_should_be_indexed(self, model, field):
return field.db_index and not field.unique
def _field_became_primary_key(self, old_field, new_field):
return not old_field.primary_key and new_field.primary_key
def _unique_should_be_added(self, old_field, new_field):
return (not old_field.unique and new_field.unique) or (
old_field.primary_key and not new_field.primary_key and new_field.unique
)
def _rename_field_sql(self, table, old_field, new_field, new_type):
return self.sql_rename_column % {
"table": self.quote_name(table),
"old_column": self.quote_name(old_field.column),
"new_column": self.quote_name(new_field.column),
"type": new_type,
}
def _create_fk_sql(self, model, field, suffix):
from_table = model._meta.db_table
from_column = field.column
_, to_table = split_identifier(field.target_field.model._meta.db_table)
to_column = field.target_field.column
def create_fk_name(*args, **kwargs):
return self.quote_name(self._create_index_name(*args, **kwargs))
return Statement(
self.sql_create_fk,
table=Table(from_table, self.quote_name),
name=ForeignKeyName(from_table, [from_column], to_table, [to_column], suffix, create_fk_name),
column=Columns(from_table, [from_column], self.quote_name),
to_table=Table(field.target_field.model._meta.db_table, self.quote_name),
to_column=Columns(field.target_field.model._meta.db_table, [to_column], self.quote_name),
deferrable=self.connection.ops.deferrable_sql(),
)
def _create_unique_sql(self, model, columns):
def create_unique_name(*args, **kwargs):
return self.quote_name(self._create_index_name(*args, **kwargs))
table = model._meta.db_table
return Statement(
self.sql_create_unique,
table=Table(table, self.quote_name),
name=IndexName(table, columns, '_uniq', create_unique_name),
columns=Columns(table, columns, self.quote_name),
)
def _delete_constraint_sql(self, template, model, name):
return template % {
"table": self.quote_name(model._meta.db_table),
"name": self.quote_name(name),
}
def _constraint_names(self, model, column_names=None, unique=None,
primary_key=None, index=None, foreign_key=None,
check=None, type_=None):
"""Return all constraint names matching the columns and conditions."""
if column_names is not None:
column_names = [
self.connection.introspection.column_name_converter(name)
for name in column_names
]
with self.connection.cursor() as cursor:
constraints = self.connection.introspection.get_constraints(cursor, model._meta.db_table)
result = []
for name, infodict in constraints.items():
if column_names is None or column_names == infodict['columns']:
if unique is not None and infodict['unique'] != unique:
continue
if primary_key is not None and infodict['primary_key'] != primary_key:
continue
if index is not None and infodict['index'] != index:
continue
if check is not None and infodict['check'] != check:
continue
if foreign_key is not None and not infodict['foreign_key']:
continue
if type_ is not None and infodict['type'] != type_:
continue
result.append(name)
return result
def _delete_primary_key(self, model, strict=False):
constraint_names = self._constraint_names(model, primary_key=True)
if strict and len(constraint_names) != 1:
raise ValueError('Found wrong number (%s) of PK constraints for %s' % (
len(constraint_names),
model._meta.db_table,
))
for constraint_name in constraint_names:
self.execute(self._delete_constraint_sql(self.sql_delete_pk, model, constraint_name))
def remove_procedure(self, procedure_name, param_types=()):
sql = self.sql_delete_procedure % {
'procedure': self.quote_name(procedure_name),
'param_types': ','.join(param_types),
}
self.execute(sql)
| [
"[email protected]"
] | |
6589983858de7fac1bfc6bfab9cfdae6dfa84f4d | 4e5141121d8b4015db233cbc71946ec3cfbe5fe6 | /samples/basic/crud/gnmi/models/cisco-ios-xr/Cisco-IOS-XR-aaa-lib-cfg/gn-delete-xr-aaa-lib-cfg-20-ydk.py | 93a22ad7752d9e1443eea3358f521fd01caad79e | [
"Apache-2.0"
] | permissive | itbj/ydk-py-samples | 898c6c9bad9d6f8072892300d42633d82ec38368 | c5834091da0ebedbb11af7bbf780f268aad7040b | refs/heads/master | 2022-11-20T17:44:58.844428 | 2020-07-25T06:18:02 | 2020-07-25T06:18:02 | 282,382,442 | 1 | 0 | null | 2020-07-25T06:04:51 | 2020-07-25T06:04:50 | null | UTF-8 | Python | false | false | 2,653 | py | #!/usr/bin/env python
#
# Copyright 2016 Cisco Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Delete all config data for model Cisco-IOS-XR-aaa-lib-cfg.
usage: gn-delete-xr-aaa-lib-cfg-20-ydk.py [-h] [-v] device
positional arguments:
device gNMI device (http://user:password@host:port)
optional arguments:
-h, --help show this help message and exit
-v, --verbose print debugging messages
"""
from argparse import ArgumentParser
from urlparse import urlparse
from ydk.path import Repository
from ydk.services import CRUDService
from ydk.gnmi.providers import gNMIServiceProvider
from ydk.models.cisco_ios_xr import Cisco_IOS_XR_aaa_lib_cfg \
as xr_aaa_lib_cfg
import os
import logging
YDK_REPO_DIR = os.path.expanduser("~/.ydk/")
if __name__ == "__main__":
"""Execute main program."""
parser = ArgumentParser()
parser.add_argument("-v", "--verbose", help="print debugging messages",
action="store_true")
parser.add_argument("device",
help="gNMI device (http://user:password@host:port)")
args = parser.parse_args()
device = urlparse(args.device)
# log debug messages if verbose argument specified
if args.verbose:
logger = logging.getLogger("ydk")
logger.setLevel(logging.INFO)
handler = logging.StreamHandler()
formatter = logging.Formatter(("%(asctime)s - %(name)s - "
"%(levelname)s - %(message)s"))
handler.setFormatter(formatter)
logger.addHandler(handler)
# create gNMI provider
repository = Repository(YDK_REPO_DIR+device.hostname)
provider = gNMIServiceProvider(repo=repository,
address=device.hostname,
port=device.port,
username=device.username,
password=device.password)
# create CRUD service
crud = CRUDService()
aaa = xr_aaa_lib_cfg.Aaa() # create object
# delete configuration on gNMI device
crud.delete(provider, aaa)
exit()
# End of script
| [
"[email protected]"
] | |
3aa08f7780110a7bdf6645e4e423705b1f892161 | 0cf21c1880d43a9b9384682ce7179897de08508d | /AtCoder Beginner Contest/2019_ABC/2019-11-16(ABC145)/ABC145_C.py | cb436adb1b407097411873168d3c1dfadd4c48bc | [] | no_license | neoneo0106/AtCoder | 44bc54c82c9c1a0ded396cca54b110bc02ca86ea | 1ff28a526e6a1b32cf18bd7daa4e33462daea080 | refs/heads/master | 2022-03-24T00:35:55.664228 | 2022-02-22T14:44:14 | 2022-02-22T14:44:14 | 246,288,963 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 394 | py | import math
def dis(x1, y1, x2, y2):
return ((x1 - x2)**2 + (y1 - y2)**2) ** (1/2)
def kaijo(n):
return math.factorial(n)
n = int(input())
x = [0] * n
y = [0] * n
for i in range(n):
x[i], y[i] = map(int, input().split())
sum = 0
for j in range(n):
for i in range(n):
if i != j:
sum = sum + (dis(x[i], y[i], x[j], y[j])) * kaijo(n-1)
print(sum/kaijo(n)) | [
"[email protected]"
] | |
a39592556defdd0e737b3a3010ee24fc4b6a9448 | ef50bb32ab941f64621ba17b419dd19531da220d | /products/migrations/0003_product_price.py | 7c4097495791fbcc1c6f39e6ee27de4905f04246 | [] | no_license | OleksandrMyshko/test-site | 3de69d5a29e2860016f5fdc5d4c510e493c50e1a | 0bbf84a4eddfa92789f516b52e5b05fa15817262 | refs/heads/master | 2021-07-10T07:28:27.769729 | 2017-09-25T17:22:05 | 2017-09-25T17:22:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 480 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.4 on 2017-08-22 16:30
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0002_productimage_img'),
]
operations = [
migrations.AddField(
model_name='product',
name='price',
field=models.DecimalField(decimal_places=2, default=0, max_digits=10),
),
]
| [
"[email protected]"
] | |
2afd9033fffd379418dabc8536d1fe65740e5269 | 385a63d3c9e6f5815979165001f78ec3d7b90cd2 | /DrivingTDM_SetupMatlabOOP/headerAndFunctionsMotor/ximc/python-profiles/STANDA/8MR191-4233.py | a15889a864330f60af6292ced3cb74513b201575 | [
"BSD-2-Clause"
] | permissive | Rasedujjaman/matlabOOP | 5abb6ec94998fda5e9214ed94cf67a42bf243d4f | e1f025ab9b00a3646719df23852079736d2b5701 | refs/heads/main | 2023-07-23T21:40:53.905045 | 2021-08-31T16:12:39 | 2021-08-31T16:12:39 | 378,249,559 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 22,543 | py | def set_profile_8MR191_4233(lib, id):
worst_result = Result.Ok
result = Result.Ok
feedback_settings = feedback_settings_t()
feedback_settings.IPS = 4000
class FeedbackType_:
FEEDBACK_ENCODER_MEDIATED = 6
FEEDBACK_NONE = 5
FEEDBACK_EMF = 4
FEEDBACK_ENCODER = 1
feedback_settings.FeedbackType = FeedbackType_.FEEDBACK_NONE
class FeedbackFlags_:
FEEDBACK_ENC_TYPE_BITS = 192
FEEDBACK_ENC_TYPE_DIFFERENTIAL = 128
FEEDBACK_ENC_TYPE_SINGLE_ENDED = 64
FEEDBACK_ENC_REVERSE = 1
FEEDBACK_ENC_TYPE_AUTO = 0
feedback_settings.FeedbackFlags = FeedbackFlags_.FEEDBACK_ENC_TYPE_SINGLE_ENDED | FeedbackFlags_.FEEDBACK_ENC_TYPE_AUTO
feedback_settings.CountsPerTurn = 4000
result = lib.set_feedback_settings(id, byref(feedback_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
home_settings = home_settings_t()
home_settings.FastHome = 100
home_settings.uFastHome = 0
home_settings.SlowHome = 10
home_settings.uSlowHome = 0
home_settings.HomeDelta = -1000
home_settings.uHomeDelta = 0
class HomeFlags_:
HOME_USE_FAST = 256
HOME_STOP_SECOND_BITS = 192
HOME_STOP_SECOND_LIM = 192
HOME_STOP_SECOND_SYN = 128
HOME_STOP_SECOND_REV = 64
HOME_STOP_FIRST_BITS = 48
HOME_STOP_FIRST_LIM = 48
HOME_STOP_FIRST_SYN = 32
HOME_STOP_FIRST_REV = 16
HOME_HALF_MV = 8
HOME_MV_SEC_EN = 4
HOME_DIR_SECOND = 2
HOME_DIR_FIRST = 1
home_settings.HomeFlags = HomeFlags_.HOME_USE_FAST | HomeFlags_.HOME_STOP_SECOND_REV | HomeFlags_.HOME_STOP_FIRST_BITS | HomeFlags_.HOME_DIR_FIRST
result = lib.set_home_settings(id, byref(home_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
move_settings = move_settings_t()
move_settings.Speed = 1000
move_settings.uSpeed = 0
move_settings.Accel = 2000
move_settings.Decel = 4000
move_settings.AntiplaySpeed = 1000
move_settings.uAntiplaySpeed = 0
class MoveFlags_:
RPM_DIV_1000 = 1
result = lib.set_move_settings(id, byref(move_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
engine_settings = engine_settings_t()
engine_settings.NomVoltage = 1
engine_settings.NomCurrent = 400
engine_settings.NomSpeed = 4800
engine_settings.uNomSpeed = 0
class EngineFlags_:
ENGINE_LIMIT_RPM = 128
ENGINE_LIMIT_CURR = 64
ENGINE_LIMIT_VOLT = 32
ENGINE_ACCEL_ON = 16
ENGINE_ANTIPLAY = 8
ENGINE_MAX_SPEED = 4
ENGINE_CURRENT_AS_RMS = 2
ENGINE_REVERSE = 1
engine_settings.EngineFlags = EngineFlags_.ENGINE_LIMIT_RPM | EngineFlags_.ENGINE_ACCEL_ON
engine_settings.Antiplay = 575
class MicrostepMode_:
MICROSTEP_MODE_FRAC_256 = 9
MICROSTEP_MODE_FRAC_128 = 8
MICROSTEP_MODE_FRAC_64 = 7
MICROSTEP_MODE_FRAC_32 = 6
MICROSTEP_MODE_FRAC_16 = 5
MICROSTEP_MODE_FRAC_8 = 4
MICROSTEP_MODE_FRAC_4 = 3
MICROSTEP_MODE_FRAC_2 = 2
MICROSTEP_MODE_FULL = 1
engine_settings.MicrostepMode = MicrostepMode_.MICROSTEP_MODE_FRAC_256
engine_settings.StepsPerRev = 200
result = lib.set_engine_settings(id, byref(engine_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
entype_settings = entype_settings_t()
class EngineType_:
ENGINE_TYPE_BRUSHLESS = 5
ENGINE_TYPE_TEST = 4
ENGINE_TYPE_STEP = 3
ENGINE_TYPE_2DC = 2
ENGINE_TYPE_DC = 1
ENGINE_TYPE_NONE = 0
entype_settings.EngineType = EngineType_.ENGINE_TYPE_STEP | EngineType_.ENGINE_TYPE_NONE
class DriverType_:
DRIVER_TYPE_EXTERNAL = 3
DRIVER_TYPE_INTEGRATE = 2
DRIVER_TYPE_DISCRETE_FET = 1
entype_settings.DriverType = DriverType_.DRIVER_TYPE_INTEGRATE
result = lib.set_entype_settings(id, byref(entype_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
power_settings = power_settings_t()
power_settings.HoldCurrent = 50
power_settings.CurrReductDelay = 1000
power_settings.PowerOffDelay = 60
power_settings.CurrentSetTime = 300
class PowerFlags_:
POWER_SMOOTH_CURRENT = 4
POWER_OFF_ENABLED = 2
POWER_REDUCT_ENABLED = 1
power_settings.PowerFlags = PowerFlags_.POWER_SMOOTH_CURRENT | PowerFlags_.POWER_OFF_ENABLED | PowerFlags_.POWER_REDUCT_ENABLED
result = lib.set_power_settings(id, byref(power_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
secure_settings = secure_settings_t()
secure_settings.LowUpwrOff = 800
secure_settings.CriticalIpwr = 4000
secure_settings.CriticalUpwr = 5500
secure_settings.CriticalT = 800
secure_settings.CriticalIusb = 450
secure_settings.CriticalUusb = 520
secure_settings.MinimumUusb = 420
class Flags_:
ALARM_ENGINE_RESPONSE = 128
ALARM_WINDING_MISMATCH = 64
USB_BREAK_RECONNECT = 32
ALARM_FLAGS_STICKING = 16
ALARM_ON_BORDERS_SWAP_MISSET = 8
H_BRIDGE_ALERT = 4
LOW_UPWR_PROTECTION = 2
ALARM_ON_DRIVER_OVERHEATING = 1
secure_settings.Flags = Flags_.ALARM_ENGINE_RESPONSE | Flags_.ALARM_FLAGS_STICKING | Flags_.ALARM_ON_BORDERS_SWAP_MISSET | Flags_.H_BRIDGE_ALERT | Flags_.ALARM_ON_DRIVER_OVERHEATING
result = lib.set_secure_settings(id, byref(secure_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
edges_settings = edges_settings_t()
class BorderFlags_:
BORDERS_SWAP_MISSET_DETECTION = 8
BORDER_STOP_RIGHT = 4
BORDER_STOP_LEFT = 2
BORDER_IS_ENCODER = 1
class EnderFlags_:
ENDER_SW2_ACTIVE_LOW = 4
ENDER_SW1_ACTIVE_LOW = 2
ENDER_SWAP = 1
edges_settings.EnderFlags = EnderFlags_.ENDER_SW2_ACTIVE_LOW | EnderFlags_.ENDER_SW1_ACTIVE_LOW | EnderFlags_.ENDER_SWAP
edges_settings.LeftBorder = -34100
edges_settings.uLeftBorder = 0
edges_settings.RightBorder = 100
edges_settings.uRightBorder = 0
result = lib.set_edges_settings(id, byref(edges_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
pid_settings = pid_settings_t()
pid_settings.KpU = 0
pid_settings.KiU = 0
pid_settings.KdU = 0
pid_settings.Kpf = 0.003599999938160181
pid_settings.Kif = 0.03799999877810478
pid_settings.Kdf = 2.8000000384054147e-05
result = lib.set_pid_settings(id, byref(pid_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
sync_in_settings = sync_in_settings_t()
class SyncInFlags_:
SYNCIN_GOTOPOSITION = 4
SYNCIN_INVERT = 2
SYNCIN_ENABLED = 1
sync_in_settings.ClutterTime = 4
sync_in_settings.Position = 0
sync_in_settings.uPosition = 0
sync_in_settings.Speed = 0
sync_in_settings.uSpeed = 0
result = lib.set_sync_in_settings(id, byref(sync_in_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
sync_out_settings = sync_out_settings_t()
class SyncOutFlags_:
SYNCOUT_ONPERIOD = 64
SYNCOUT_ONSTOP = 32
SYNCOUT_ONSTART = 16
SYNCOUT_IN_STEPS = 8
SYNCOUT_INVERT = 4
SYNCOUT_STATE = 2
SYNCOUT_ENABLED = 1
sync_out_settings.SyncOutFlags = SyncOutFlags_.SYNCOUT_ONSTOP | SyncOutFlags_.SYNCOUT_ONSTART
sync_out_settings.SyncOutPulseSteps = 100
sync_out_settings.SyncOutPeriod = 2000
sync_out_settings.Accuracy = 0
sync_out_settings.uAccuracy = 0
result = lib.set_sync_out_settings(id, byref(sync_out_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
extio_settings = extio_settings_t()
class EXTIOSetupFlags_:
EXTIO_SETUP_INVERT = 2
EXTIO_SETUP_OUTPUT = 1
extio_settings.EXTIOSetupFlags = EXTIOSetupFlags_.EXTIO_SETUP_OUTPUT
class EXTIOModeFlags_:
EXTIO_SETUP_MODE_OUT_BITS = 240
EXTIO_SETUP_MODE_OUT_MOTOR_ON = 64
EXTIO_SETUP_MODE_OUT_ALARM = 48
EXTIO_SETUP_MODE_OUT_MOVING = 32
EXTIO_SETUP_MODE_OUT_ON = 16
EXTIO_SETUP_MODE_IN_BITS = 15
EXTIO_SETUP_MODE_IN_ALARM = 5
EXTIO_SETUP_MODE_IN_HOME = 4
EXTIO_SETUP_MODE_IN_MOVR = 3
EXTIO_SETUP_MODE_IN_PWOF = 2
EXTIO_SETUP_MODE_IN_STOP = 1
EXTIO_SETUP_MODE_IN_NOP = 0
EXTIO_SETUP_MODE_OUT_OFF = 0
extio_settings.EXTIOModeFlags = EXTIOModeFlags_.EXTIO_SETUP_MODE_IN_STOP | EXTIOModeFlags_.EXTIO_SETUP_MODE_IN_NOP | EXTIOModeFlags_.EXTIO_SETUP_MODE_OUT_OFF
result = lib.set_extio_settings(id, byref(extio_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
brake_settings = brake_settings_t()
brake_settings.t1 = 300
brake_settings.t2 = 500
brake_settings.t3 = 300
brake_settings.t4 = 400
class BrakeFlags_:
BRAKE_ENG_PWROFF = 2
BRAKE_ENABLED = 1
brake_settings.BrakeFlags = BrakeFlags_.BRAKE_ENG_PWROFF
result = lib.set_brake_settings(id, byref(brake_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
control_settings = control_settings_t()
control_settings.MaxSpeed[0] = 100
control_settings.MaxSpeed[1] = 1000
control_settings.MaxSpeed[2] = 0
control_settings.MaxSpeed[3] = 0
control_settings.MaxSpeed[4] = 0
control_settings.MaxSpeed[5] = 0
control_settings.MaxSpeed[6] = 0
control_settings.MaxSpeed[7] = 0
control_settings.MaxSpeed[8] = 0
control_settings.MaxSpeed[9] = 0
control_settings.uMaxSpeed[0] = 0
control_settings.uMaxSpeed[1] = 0
control_settings.uMaxSpeed[2] = 0
control_settings.uMaxSpeed[3] = 0
control_settings.uMaxSpeed[4] = 0
control_settings.uMaxSpeed[5] = 0
control_settings.uMaxSpeed[6] = 0
control_settings.uMaxSpeed[7] = 0
control_settings.uMaxSpeed[8] = 0
control_settings.uMaxSpeed[9] = 0
control_settings.Timeout[0] = 1000
control_settings.Timeout[1] = 1000
control_settings.Timeout[2] = 1000
control_settings.Timeout[3] = 1000
control_settings.Timeout[4] = 1000
control_settings.Timeout[5] = 1000
control_settings.Timeout[6] = 1000
control_settings.Timeout[7] = 1000
control_settings.Timeout[8] = 1000
control_settings.MaxClickTime = 300
class Flags_:
CONTROL_BTN_RIGHT_PUSHED_OPEN = 8
CONTROL_BTN_LEFT_PUSHED_OPEN = 4
CONTROL_MODE_BITS = 3
CONTROL_MODE_LR = 2
CONTROL_MODE_JOY = 1
CONTROL_MODE_OFF = 0
control_settings.Flags = Flags_.CONTROL_MODE_LR | Flags_.CONTROL_MODE_OFF
control_settings.DeltaPosition = 1
control_settings.uDeltaPosition = 0
result = lib.set_control_settings(id, byref(control_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
joystick_settings = joystick_settings_t()
joystick_settings.JoyLowEnd = 0
joystick_settings.JoyCenter = 5000
joystick_settings.JoyHighEnd = 10000
joystick_settings.ExpFactor = 100
joystick_settings.DeadZone = 50
class JoyFlags_:
JOY_REVERSE = 1
result = lib.set_joystick_settings(id, byref(joystick_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
ctp_settings = ctp_settings_t()
ctp_settings.CTPMinError = 3
class CTPFlags_:
CTP_ERROR_CORRECTION = 16
REV_SENS_INV = 8
CTP_ALARM_ON_ERROR = 4
CTP_BASE = 2
CTP_ENABLED = 1
ctp_settings.CTPFlags = CTPFlags_.CTP_ERROR_CORRECTION
result = lib.set_ctp_settings(id, byref(ctp_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
uart_settings = uart_settings_t()
uart_settings.Speed = 115200
class UARTSetupFlags_:
UART_STOP_BIT = 8
UART_PARITY_BIT_USE = 4
UART_PARITY_BITS = 3
UART_PARITY_BIT_MARK = 3
UART_PARITY_BIT_SPACE = 2
UART_PARITY_BIT_ODD = 1
UART_PARITY_BIT_EVEN = 0
uart_settings.UARTSetupFlags = UARTSetupFlags_.UART_PARITY_BIT_EVEN
result = lib.set_uart_settings(id, byref(uart_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
controller_name = controller_name_t()
controller_name.ControllerName = bytes([0, 113, 252, 118, 36, 0, 72, 0, 3, 0, 0, 0, 104, 101, 103, 0])
class CtrlFlags_:
EEPROM_PRECEDENCE = 1
result = lib.set_controller_name(id, byref(controller_name))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
emf_settings = emf_settings_t()
emf_settings.L = 0
emf_settings.R = 0
emf_settings.Km = 0
class BackEMFFlags_:
BACK_EMF_KM_AUTO = 4
BACK_EMF_RESISTANCE_AUTO = 2
BACK_EMF_INDUCTANCE_AUTO = 1
result = lib.set_emf_settings(id, byref(emf_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
engine_advansed_setup = engine_advansed_setup_t()
engine_advansed_setup.stepcloseloop_Kw = 50
engine_advansed_setup.stepcloseloop_Kp_low = 1000
engine_advansed_setup.stepcloseloop_Kp_high = 33
result = lib.set_engine_advansed_setup(id, byref(engine_advansed_setup))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
extended_settings = extended_settings_t()
extended_settings.Param1 = 0
result = lib.set_extended_settings(id, byref(extended_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
stage_name = stage_name_t()
stage_name.PositionerName = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
result = lib.set_stage_name(id, byref(stage_name))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
stage_information = stage_information_t()
stage_information.Manufacturer = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
stage_information.PartNumber = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
result = lib.set_stage_information(id, byref(stage_information))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
stage_settings = stage_settings_t()
stage_settings.LeadScrewPitch = 0
stage_settings.Units = bytes([0, 0, 0, 0, 0, 0, 0, 0])
stage_settings.MaxSpeed = 0
stage_settings.TravelRange = 0
stage_settings.SupplyVoltageMin = 0
stage_settings.SupplyVoltageMax = 0
stage_settings.MaxCurrentConsumption = 0
stage_settings.HorizontalLoadCapacity = 0
stage_settings.VerticalLoadCapacity = 0
result = lib.set_stage_settings(id, byref(stage_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
motor_information = motor_information_t()
motor_information.Manufacturer = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
motor_information.PartNumber = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
result = lib.set_motor_information(id, byref(motor_information))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
motor_settings = motor_settings_t()
class MotorType_:
MOTOR_TYPE_BLDC = 3
MOTOR_TYPE_DC = 2
MOTOR_TYPE_STEP = 1
MOTOR_TYPE_UNKNOWN = 0
motor_settings.MotorType = MotorType_.MOTOR_TYPE_UNKNOWN
motor_settings.ReservedField = 0
motor_settings.Poles = 0
motor_settings.Phases = 0
motor_settings.NominalVoltage = 0
motor_settings.NominalCurrent = 0
motor_settings.NominalSpeed = 0
motor_settings.NominalTorque = 0
motor_settings.NominalPower = 0
motor_settings.WindingResistance = 0
motor_settings.WindingInductance = 0
motor_settings.RotorInertia = 0
motor_settings.StallTorque = 0
motor_settings.DetentTorque = 0
motor_settings.TorqueConstant = 0
motor_settings.SpeedConstant = 0
motor_settings.SpeedTorqueGradient = 0
motor_settings.MechanicalTimeConstant = 0
motor_settings.MaxSpeed = 0
motor_settings.MaxCurrent = 0
motor_settings.MaxCurrentTime = 0
motor_settings.NoLoadCurrent = 0
motor_settings.NoLoadSpeed = 0
result = lib.set_motor_settings(id, byref(motor_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
encoder_information = encoder_information_t()
encoder_information.Manufacturer = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
encoder_information.PartNumber = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
result = lib.set_encoder_information(id, byref(encoder_information))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
encoder_settings = encoder_settings_t()
encoder_settings.MaxOperatingFrequency = 0
encoder_settings.SupplyVoltageMin = 0
encoder_settings.SupplyVoltageMax = 0
encoder_settings.MaxCurrentConsumption = 0
encoder_settings.PPR = 0
class EncoderSettings_:
ENCSET_REVOLUTIONSENSOR_ACTIVE_HIGH = 256
ENCSET_REVOLUTIONSENSOR_PRESENT = 64
ENCSET_INDEXCHANNEL_PRESENT = 16
ENCSET_PUSHPULL_OUTPUT = 4
ENCSET_DIFFERENTIAL_OUTPUT = 1
result = lib.set_encoder_settings(id, byref(encoder_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
hallsensor_information = hallsensor_information_t()
hallsensor_information.Manufacturer = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
hallsensor_information.PartNumber = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
result = lib.set_hallsensor_information(id, byref(hallsensor_information))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
hallsensor_settings = hallsensor_settings_t()
hallsensor_settings.MaxOperatingFrequency = 0
hallsensor_settings.SupplyVoltageMin = 0
hallsensor_settings.SupplyVoltageMax = 0
hallsensor_settings.MaxCurrentConsumption = 0
hallsensor_settings.PPR = 0
result = lib.set_hallsensor_settings(id, byref(hallsensor_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
gear_information = gear_information_t()
gear_information.Manufacturer = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
gear_information.PartNumber = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
result = lib.set_gear_information(id, byref(gear_information))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
gear_settings = gear_settings_t()
gear_settings.ReductionIn = 0
gear_settings.ReductionOut = 0
gear_settings.RatedInputTorque = 0
gear_settings.RatedInputSpeed = 0
gear_settings.MaxOutputBacklash = 0
gear_settings.InputInertia = 0
gear_settings.Efficiency = 0
result = lib.set_gear_settings(id, byref(gear_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
accessories_settings = accessories_settings_t()
accessories_settings.MagneticBrakeInfo = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
accessories_settings.MBRatedVoltage = 0
accessories_settings.MBRatedCurrent = 0
accessories_settings.MBTorque = 0
class MBSettings_:
MB_POWERED_HOLD = 2
MB_AVAILABLE = 1
accessories_settings.TemperatureSensorInfo = bytes([0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0])
accessories_settings.TSMin = 0
accessories_settings.TSMax = 0
accessories_settings.TSGrad = 0
class TSSettings_:
TS_AVAILABLE = 8
TS_TYPE_BITS = 7
TS_TYPE_SEMICONDUCTOR = 2
TS_TYPE_THERMOCOUPLE = 1
TS_TYPE_UNKNOWN = 0
accessories_settings.TSSettings = TSSettings_.TS_TYPE_UNKNOWN
class LimitSwitchesSettings_:
LS_SHORTED = 16
LS_SW2_ACTIVE_LOW = 8
LS_SW1_ACTIVE_LOW = 4
LS_ON_SW2_AVAILABLE = 2
LS_ON_SW1_AVAILABLE = 1
result = lib.set_accessories_settings(id, byref(accessories_settings))
if result != Result.Ok:
if worst_result == Result.Ok or worst_result == Result.ValueError:
worst_result = result
return worst_result
| [
"[email protected]"
] | |
191e8c8ebd498df80b64cd84e5b7f15eca56a5f6 | fb82fdf706863465b1f357cd1fa0447474cd8a70 | /ServerComponent/venv/Lib/site-packages/pythonrv/test/mock_and_helpers.py | 296b68023a48690ab41c922fedc65e25c621d2ac | [
"MIT"
] | permissive | CDU55/FakeNews | d79e2a069b3f1392f779d5b2256cd54c696e789a | 707bd48dd78851081d98ad21bbdadfc2720bd644 | refs/heads/main | 2023-02-20T06:27:18.618837 | 2021-01-17T15:14:27 | 2021-01-17T15:14:27 | 305,167,221 | 0 | 1 | MIT | 2020-12-07T19:51:46 | 2020-10-18T18:16:49 | Python | UTF-8 | Python | false | false | 823 | py | # -*- coding: utf-8 -*-
import unittest
import logging.handlers
class MockLoggingHandler(logging.Handler):
def __init__(self, *args, **kwargs):
self.reset()
super(MockLoggingHandler, self).__init__(*args, **kwargs)
def emit(self, record):
self.messages.append(record)
def reset(self):
self.messages = []
class TestLogging(unittest.TestCase):
def setUp(self):
self.logging_handler = MockLoggingHandler()
logging.getLogger('pythonrv').addHandler(self.logging_handler)
def tearDown(self):
logging.getLogger('pythonrv').removeHandler(self.logging_handler)
def assertLog(self, level, msg):
record = self.logging_handler.messages[-1]
self.assertEquals(record.levelno, level)
self.assertEquals(record.getMessage(), msg)
| [
"[email protected]"
] | |
cea172f7fc2168b6a992da358c297658f258989b | 7b7bfbfebd627a3ccfdd52bb7164fa4f94cda7fc | /optic_store/optic_store/doctype/group_discount/test_group_discount.py | 6abaa25f486d759b1930cf23d4f05c363a577971 | [
"MIT"
] | permissive | f-9t9it/optic_store | d117b7ef7c4107ec15d8194fc57d66a18aff5945 | 4682ae99cdb2cbfb1ff99196398d7379b4b6c8f1 | refs/heads/master | 2022-07-01T10:29:54.783550 | 2022-06-21T14:34:40 | 2022-06-21T14:34:40 | 171,165,708 | 23 | 43 | NOASSERTION | 2022-06-21T14:21:16 | 2019-02-17T19:58:33 | Python | UTF-8 | Python | false | false | 209 | py | # -*- coding: utf-8 -*-
# Copyright (c) 2019, 9T9IT and Contributors
# See license.txt
from __future__ import unicode_literals
import frappe
import unittest
class TestGroupDiscount(unittest.TestCase):
pass
| [
"[email protected]"
] | |
bb2c39b81b685aa1f33c7cc8aa706e7e60cb2876 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/414/usersdata/315/80495/submittedfiles/av1_programa2.py | 736f8af8c79f8c3b81b483e1639ddc5f9b14123c | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 818 | py | # -*- coding: utf-8 -*-
matricula = input('Matricula: ')
nota1 = float(input('Digite nota1 com 1 casa decimal: '))
nota2 = float(input('Digite nota2 com 1 casa decimal: '))
nota3 = float(input('Digite nota3 com 1 casa decimal: '))
ME = float(input('Digite ME com 1 casa decimal: '))
MA = (nota1 + (nota2*2) + nota3*3 +ME)/7
if MA >=9:
print (matricula
print ('%.1f'%MA)
print (A)
print (APROVADO)
elif MA>=7.5 and MA<9:
print (matricula
print ('%.1f'%MA)
print (B)
print (APROVADO)
elif MA>=6 and MA<7.5:
print (matricula
print ('%.1f'%MA)
print (C)
print (APROVADO)
elif MA>=4 and MA<6:
print (matricula
print ('%.1f'%MA)
print (D)
print (REPROVADO)
elif MA<4:
print (matricula
print ('%.1f'%MA)
print (E)
print (REPROVADO)
| [
"[email protected]"
] | |
07306b347c9a516d94e561248d0074b5d9a8a4ba | 5c484d9ecd194ad31555303aff004b739fc6b564 | /stats/migrations/0046_offer_last_active_at.py | 16f9b8d611c0ee397ebab74f2f2c0e76da862727 | [] | no_license | bloogrox/hasoffers-kpi | 15b82c9287fc4a62e14e4b637c3d57d03db54233 | 227472f0090856048d1fdb0591ffbb15b575a311 | refs/heads/master | 2021-01-01T17:31:14.448122 | 2017-11-08T16:26:40 | 2017-11-08T16:26:40 | 98,095,120 | 0 | 1 | null | 2017-08-12T11:43:31 | 2017-07-23T12:05:31 | Python | UTF-8 | Python | false | false | 584 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10.6 on 2017-10-18 00:44
from __future__ import unicode_literals
import datetime
from django.db import migrations, models
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('stats', '0045_offer_status'),
]
operations = [
migrations.AddField(
model_name='offer',
name='last_active_at',
field=models.DateTimeField(default=datetime.datetime(2010, 1, 1, 0, 0, tzinfo=utc)),
preserve_default=False,
),
]
| [
"[email protected]"
] | |
45c71549dbaad7e978eac089619642a8467b3a13 | 6067d10ccf61070ff2f7ec00068a4a138d835b48 | /analyze_error_rate_book.py | fa1ec11f8ee79584a35f6f5e71fdd08a08b89484 | [] | no_license | Doreenruirui/OCR | aa3c7f3bc34a98951288ab6608e8e4c7373a2bda | 693562e966e2b18b759aabeac23c068cf8c59a7c | refs/heads/master | 2021-07-09T08:41:04.763718 | 2018-08-07T19:34:34 | 2018-08-07T19:34:34 | 91,371,468 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,416 | py | import os
from os.path import join, exists
import numpy as np
import sys
from collections import OrderedDict
from plot_curve import plotBar, plot
# folder_data = '/Users/doreen/Documents/Experiment/dataset/OCR/'
folder_data = '/gss_gpfs_scratch/dong.r/Dataset/OCR'
#folder_data = '/home/rui/Dataset/OCR'
def merge_error_rate(cur_folder):
cur_folder = join(folder_data, cur_folder)
error = [[], [], []]
num_line = []
books = []
for line in file(join(cur_folder, 'book.man_wit.test.ec.txt')):
items = line.strip('\n').split('\t')
error[0].append(float(items[2]))
num_line.append(int(items[1]))
books.append(items[0])
for line in file(join(cur_folder, 'book.man_wit.test.single.ec.txt')):
items = line.strip('\n').split('\t')
error[1].append(float(items[2]))
for line in file(join(cur_folder, 'book.man_wit.test.avg.ec.txt')):
items = line.strip('\n').split('\t')
error[2].append(float(items[2]))
ngroup = len(books)
print 'AVG better than SINGLE:'
num_avg = 0
with open(join(cur_folder, 'error_rate_per_book.txt'), 'w') as f_:
for i in range(ngroup):
if error[2][i] < error[1][i]:
num_avg += 1
f_.write('\t'.join(map(str, [books[i], num_line[i], error[0][i], error[1][i], error[2][i]])) + '\n')
# print books[i], num_line[i], error[0][i], error[1][i], error[2][i]
# f_.write('+++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++++')
print num_avg
print 'AVG worse than SINGLE:'
num_avg = 0
for i in range(ngroup):
if error[2][i] > error[1][i]:
num_avg += 1
f_.write('\t'.join(map(str, [books[i], num_line[i], error[0][i], error[1][i], error[2][i]])) + '\n')
print num_avg
# stickers = books
# stickers = [ele for ele in range(len(books))]
# lenlabels = ['OCR', 'SINGLE', 'AVG']
# xlabel = 'Book Name'
# ylabel = 'Error Rate'
# title = 'Error Rate Per Book'
# figure_name = 'Results/Error_Rate_Per_Book.png'
# error = [error[0][:10], error[1][:10], error[2][:10]]
# plotBar(ngroup, error, stickers, lenlabels, xlabel, ylabel, title, figure_name, 0.2)
# plot(stickers, error, xlabel, ylabel, [0, 380], [0, 1], lenlabels, title, figure_name)
arg_folder = sys.argv[1]
merge_error_rate(arg_folder)
| [
"[email protected]"
] | |
fda84663dd476b25868d4899cb14568ead0f5dad | 621a40fa363dc0c32c96a4c8fdfe9142877e2ff1 | /ietf/mailtrigger/admin.py | e192a2066c255795a6d46e26d189225602b69f30 | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | omunroe-com/ietfdb2 | d9c40bebe4b25059f810c70dd1370cca30cb3c36 | aeaae292fbd55aca1b6043227ec105e67d73367f | refs/heads/master | 2020-04-04T21:05:56.067430 | 2018-11-05T09:08:27 | 2018-11-05T09:08:27 | 156,273,382 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 528 | py | from django.contrib import admin
from ietf.mailtrigger.models import MailTrigger, Recipient
class RecipientAdmin(admin.ModelAdmin):
list_display = [ 'slug', 'desc', 'template', 'has_code', ]
def has_code(self, obj):
return hasattr(obj,'gather_%s'%obj.slug)
has_code.boolean = True
admin.site.register(Recipient, RecipientAdmin)
class MailTriggerAdmin(admin.ModelAdmin):
list_display = [ 'slug', 'desc', ]
filter_horizontal = [ 'to', 'cc', ]
admin.site.register(MailTrigger, MailTriggerAdmin)
| [
"[email protected]"
] | |
de4237a85539bc2bf65a12af93a1b4f75141497e | 0e7aed5eef2e1d132a7e75dd8f439ae76c87639c | /python/863_All_Nodes_Distance_K_in_Binary_Tree.py | b048d616be21233247e4a98d9029d20749033190 | [
"MIT"
] | permissive | liaison/LeetCode | 2a93df3b3ca46b34f922acdbc612a3bba2d34307 | bf03743a3676ca9a8c107f92cf3858b6887d0308 | refs/heads/master | 2022-09-05T15:04:19.661298 | 2022-08-19T19:29:19 | 2022-08-19T19:29:19 | 52,914,957 | 17 | 4 | null | null | null | null | UTF-8 | Python | false | false | 4,950 | py | """
We are given a binary tree (with root node root), a target node, and an integer value K.
Return a list of the values of all nodes that have a distance K from the target node. The answer can be returned in any order.
The distance between a node and its child nodes is 1.
Input: root = [3,5,1,6,2,0,8,null,null,7,4], target = 5, K = 2
Output: [7,4,1]
@author: Lisong Guo <[email protected]>
"""
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def buildGraph(self, node, parent, graph):
if node is None:
return
if parent is not None:
graph[node].append(parent)
if node.left is not None:
graph[node].append(node.left)
self.buildGraph(node.left, node, graph)
if node.right is not None:
graph[node].append(node.right)
self.buildGraph(node.right, node, graph)
def distanceK(self, root, target, K):
"""
:type root: TreeNode
:type target: TreeNode
:type K: int
:rtype: List[int]
"""
from collections import defaultdict
# vetex: [parent, left, right]
graph = defaultdict(list)
# DFS to build graph
self.buildGraph(root, None, graph)
# BFS to retrieve the nodes with given distance
# Starting from the target node
q = [(target, 0)]
# keep the records, since the graph is all connected
visited = set()
# results
ans = []
while q:
node, distance = q.pop(0)
if node in visited:
continue
visited.add(node)
# we've reached the desired distance/radius
if K == distance:
ans.append(node.val)
# we haven't reached the desired distance, keep going
elif distance < K:
for child in graph[node]:
q.append((child, distance+1))
# exceed the desired distance
# No need to go further
return ans
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution2:
def buildParentMap(self, node, parent, parentMap):
if node is None:
return
parentMap[node] = parent
self.buildParentMap(node.left, node, parentMap)
self.buildParentMap(node.right, node, parentMap)
def distanceK(self, root, target, K):
"""
:type root: TreeNode
:type target: TreeNode
:type K: int
:rtype: List[int]
"""
# node: parent
parentMap = {}
# DFS to build the map that maps a node to its parent.
self.buildParentMap(root, None, parentMap)
# keep the records, since the graph is all connected
visited = set()
# results
ans = []
# Again, DFS to retrieve the nodes within the given distance
# this time with the help of the parentMap.
# Starting from the target node
def dfs(node, distance):
if node is None or node in visited:
return
visited.add(node)
if distance == K:
ans.append(node.val)
elif distance < K:
dfs(node.left, distance+1)
dfs(node.right, distance+1)
dfs(parentMap[node], distance+1)
# else exceed the scope, no need to explore further
dfs(target, 0)
return ans
# Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def distanceK(self, root: TreeNode, target: TreeNode, k: int) -> List[int]:
graph = defaultdict(list)
# build a non-directional graph, i.e. bi-directional graph
def build_graph(node):
nonlocal graph
if not node:
return
for next_node in [node.left, node.right]:
if next_node:
graph[node.val].append(next_node.val)
graph[next_node.val].append(node.val)
build_graph(next_node)
build_graph(root)
# run a BFS/DFS exploration
queue = [(target.val, 0)]
visited = set([target.val])
output = []
while queue:
curr, distance = queue.pop()
if distance == k:
output.append(curr)
elif distance < k:
for next_val in graph[curr]:
if next_val not in visited:
visited.add(next_val)
queue.append((next_val, distance+1))
return output
| [
"[email protected]"
] | |
c706f72bc0673621beb535e16fba9c2156cb3234 | 39f13506f0f55856639a77d8d9ff2832e980d577 | /setup.py | e0fd11043af6a8d71832ec0ccfd4bc4f280fba30 | [] | no_license | HD60Hz-Open/winfspy | 75e0fd24a6d7edfc00f07c6ecf82f0ad2d0759d3 | 8ea5d2c4f510337ac527eaa8982c3e7c6f4e08c3 | refs/heads/master | 2020-09-13T06:42:22.619496 | 2019-11-12T11:15:07 | 2019-11-12T11:15:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,188 | py | #!/usr/bin/env python
import os
import sys
from setuptools import setup, find_packages
os.chdir(os.path.dirname(sys.argv[0]) or ".")
# Awesome hack to load `__version__`
__version__ = None
exec(open("src/winfspy/_version.py", encoding="utf-8").read())
requirements = open("requirements.txt").read().split("\n")
setup(
name="winfspy",
version=__version__,
description="CFFI bindings for WinFSP",
long_description=open("README.rst", "rt").read(),
url="https://github.com/Scille/winfspy",
author="Emmanuel Leblond",
author_email="[email protected]",
classifiers=[
"Development Status :: 4 - Beta",
"Operating System :: Microsoft :: Windows",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"License :: OSI Approved :: BSD License",
],
package_dir={"": "src"},
packages=find_packages(where="src", exclude=["_cffi_src", "_cffi_src.*"]),
install_requires=requirements,
setup_requires=requirements,
cffi_modules=["./src/_cffi_src/build_bindings.py:ffibuilder"],
# for cffi
zip_safe=False,
)
| [
"[email protected]"
] | |
33e574f5532767f44ed9bc98132d94893ef78fff | 17fe32a70be82d9fd6c3268b840226b5567c8b29 | /pycox/__init__.py | d2de313f848c3a8d78f26de71902f3b2887cf0c3 | [
"MIT",
"BSD-2-Clause"
] | permissive | georgehc/dksa | dbb7161a75b8206d3d469bb5b966ed7a0f84d86c | bcd9eab6c9ded47f5b166cf1351b06e26e0c8f90 | refs/heads/master | 2023-08-02T06:15:12.472386 | 2021-10-01T17:47:25 | 2021-10-01T17:47:25 | 282,355,975 | 11 | 2 | null | null | null | null | UTF-8 | Python | false | false | 294 | py | # -*- coding: utf-8 -*-
"""Top-level package for pycox."""
__author__ = """Haavard Kvamme"""
__email__ = '[email protected]'
__version__ = '0.2.1'
import pycox.datasets
import pycox.evaluation
import pycox.preprocessing
import pycox.simulations
import pycox.utils
import pycox.models
| [
"[email protected]"
] | |
4ef736fdd3d0ba141bf8f68f9a6b5d5711963d17 | 0fcf4e4b1c61fad0829828fb0294dd5faceb3eaa | /app/app/settings.py | 12b2893abccd49a2589c099d32292f5162f515e3 | [
"MIT"
] | permissive | MrRezoo/recipe-app-api | 465127526b7d00fb3b454b18a6bdcf1009e57c83 | 6b4c236490f1dd0a6bcce644bc0ae0ffe376ab8d | refs/heads/main | 2023-07-14T22:48:02.635837 | 2021-08-24T09:27:55 | 2021-08-24T09:27:55 | 395,381,816 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,593 | py | """
Django settings for app project.
Generated by 'django-admin startproject' using Django 3.2.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-u(fk8b8qy=v9-em+cgzg_7i6i6kq*_+%0ly))@k6w08)@965c9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
# Local apps
'core.apps.CoreConfig',
'user.apps.UserConfig',
'recipe.apps.RecipeConfig',
# Third party apps
'rest_framework',
'rest_framework.authtoken',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'HOST': os.environ.get('DB_HOST'),
'NAME': os.environ.get('DB_NAME'),
'USER': os.environ.get('DB_USER'),
'PASSWORD': os.environ.get('DB_PASS'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
AUTH_USER_MODEL = 'core.User'
| [
"[email protected]"
] | |
dcc2f0d21fd217cfec6a78e8d4135813fe46aa8c | b7f88c6e703358c6bb4072daf407b1969bccabac | /stiff/wordnet/base.py | b97e58725ff95f7cfd579de28fe244194dbbfd47 | [
"Apache-2.0",
"LicenseRef-scancode-proprietary-license"
] | permissive | frankier/STIFF | edf1a34604991a9a23652073b321478e4809697d | c69060a1ba8ee36b660def9c5215c74bf5310e0c | refs/heads/master | 2023-03-08T17:03:32.317399 | 2020-10-06T12:16:50 | 2020-10-06T12:16:50 | 133,658,142 | 2 | 1 | Apache-2.0 | 2023-02-22T23:29:19 | 2018-05-16T11:51:40 | Python | UTF-8 | Python | false | false | 1,099 | py | from abc import ABC, abstractmethod
from typing import Dict, List, Tuple, Callable, Iterable
from nltk.corpus.reader import Lemma, Synset
from finntk.wordnet.utils import ss2pre
def default_mapper(synset_obj: Synset) -> str:
return ss2pre(synset_obj)
class ExtractableWordnet(ABC):
_synset_mappers: Dict[str, Callable[[Lemma], str]] = {}
@staticmethod
@abstractmethod
def lang() -> str:
pass
@staticmethod
@abstractmethod
def lemma_names() -> Dict[str, List[str]]:
pass
@classmethod
def synset_group_lemmas(
cls, wordnet_lemmas: Dict[str, List[Lemma]]
) -> Iterable[List[Tuple[str, Lemma]]]:
from .utils import synset_group_lemmas
return synset_group_lemmas(wordnet_lemmas, cls)
@classmethod
def canonical_synset_id(cls, wn: str, lemma_obj: Lemma) -> str:
return cls.canonical_synset_id_of_synset(wn, lemma_obj.synset())
@classmethod
def canonical_synset_id_of_synset(cls, wn: str, synset_obj: Synset) -> str:
return cls._synset_mappers.get(wn, default_mapper)(synset_obj)
| [
"[email protected]"
] | |
0a2badee9e4515c51818ced076629a9d87578423 | 41523dd4871e8ed1043d2b3ddf73417fcbdde209 | /day05/中国国旗.py | 1d68bd04ab6bbc59b915825fa15d66adfd4f3000 | [] | no_license | WayneChen1994/Python1805 | 2aa1c611f8902b8373b8c9a4e06354c25f8826d6 | a168cd3b7749afc326ec4326db413378fd3677d5 | refs/heads/master | 2020-03-30T23:19:00.773288 | 2018-11-02T10:47:40 | 2018-11-02T10:47:40 | 151,697,105 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,630 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# author: Wayne.Chen
'''
使用turtle模块画一个中国国旗
'''
import turtle
turtle.begin_fill()
turtle.fillcolor("red")
for x in range(2):
turtle.forward(300)
turtle.left(90)
turtle.forward(200)
turtle.left(90)
turtle.end_fill()
turtle.up()
turtle.left(90)
turtle.forward(150)
turtle.right(90)
turtle.forward(20)
turtle.down()
turtle.begin_fill()
turtle.fillcolor("yellow")
for x in range(5):
turtle.forward(20)
turtle.left(72)
turtle.forward(20)
turtle.right(144)
turtle.end_fill()
turtle.up()
turtle.forward(60)
turtle.left(90)
turtle.forward(30)
turtle.right(144)
turtle.down()
turtle.begin_fill()
turtle.fillcolor("yellow")
for x in range(5):
turtle.forward(7)
turtle.left(72)
turtle.forward(7)
turtle.right(144)
turtle.end_fill()
turtle.up()
turtle.forward(25)
turtle.left(36)
turtle.down()
turtle.begin_fill()
turtle.fillcolor("yellow")
for x in range(5):
turtle.forward(7)
turtle.left(72)
turtle.forward(7)
turtle.right(144)
turtle.end_fill()
turtle.up()
turtle.right(90)
turtle.forward(20)
turtle.left(90)
turtle.down()
turtle.begin_fill()
turtle.fillcolor("yellow")
for x in range(5):
turtle.forward(7)
turtle.left(72)
turtle.forward(7)
turtle.right(144)
turtle.end_fill()
turtle.up()
turtle.right(90)
turtle.forward(10)
turtle.down()
turtle.begin_fill()
turtle.fillcolor("yellow")
for x in range(5):
turtle.forward(7)
turtle.left(72)
turtle.forward(7)
turtle.right(144)
turtle.end_fill()
turtle.hideturtle()
turtle.done()
| [
"[email protected]"
] | |
0111d066ee2b49d9fedaa8b7b93dce650989fde8 | e32a75c44ef9c964bc5f97712c8e0e845ee3f6ca | /train_vqa_vqg_flt_cand_models.py | 03a85bba83ea363002c5bfbe143b23c0809319e4 | [] | no_license | ankita-kalra/ivqa_belief_set | 29c40ec4076433ac412728aea603e4e69ce530eb | 6ebba50ff001e1af6695bb3f4d2643e7072ee153 | refs/heads/master | 2020-04-05T17:17:00.834303 | 2018-08-27T09:59:16 | 2018-08-27T09:59:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,109 | py | from __future__ import division
import tensorflow as tf
import os
# from models.model_creater import get_model_creation_fn
from config import TrainConfig, ModelConfig
import training_util
from readers.vqa_naive_vqg_flt_cand_data_fetcher import AttentionDataReader as Reader
# from readers.semi_naive_data_fetcher import SemiReader as Reader
# from naive_ensemble_model import NaiveEnsembleModel as model_fn
from models.vqa_base import BaseModel as model_fn
tf.flags.DEFINE_string("model_type", "VQA-BaseNorm",
"Select a model to train.")
tf.flags.DEFINE_string("version", "v1",
"Dataset version used for training, v1 for VQA 1.0, v2 "
"for VQA 2.0.")
tf.flags.DEFINE_string("train_dir", "model/%s_%s_fltcand",
"Directory for saving and loading model checkpoints.")
tf.flags.DEFINE_integer("number_of_steps", 1000000, "Number of training steps.")
tf.flags.DEFINE_integer("log_every_n_steps", 10,
"Frequency at which loss and global step are logged.")
tf.flags.DEFINE_string("model_trainset", "kptrain",
"Which split is the model trained on")
tf.flags.DEFINE_boolean("use_var", True,
"Use variational VQA or VQA.")
FLAGS = tf.flags.FLAGS
tf.logging.set_verbosity(tf.logging.INFO)
def train():
_model_suffix = 'var_' if FLAGS.use_var else ''
model_config = ModelConfig()
training_config = TrainConfig()
# Get model
# model_fn = get_model_creation_fn(FLAGS.model_type)
# Create training directory.
train_dir = FLAGS.train_dir % (FLAGS.model_trainset, FLAGS.model_type)
do_counter_sampling = FLAGS.version == 'v2'
if not tf.gfile.IsDirectory(train_dir):
tf.logging.info("Creating training directory: %s", train_dir)
tf.gfile.MakeDirs(train_dir)
g = tf.Graph()
with g.as_default():
# Build the model.
model = model_fn(model_config,
phase='train')
model.build()
# Set up the learning rate
learning_rate = tf.constant(training_config.initial_learning_rate)
def _learning_rate_decay_fn(learn_rate, global_step):
return tf.train.exponential_decay(
learn_rate,
global_step,
decay_steps=training_config.decay_step,
decay_rate=training_config.decay_factor, staircase=False)
learning_rate_decay_fn = _learning_rate_decay_fn
train_op = tf.contrib.layers.optimize_loss(
loss=model.loss,
global_step=model.global_step,
learning_rate=learning_rate,
optimizer=training_config.optimizer,
clip_gradients=training_config.clip_gradients,
learning_rate_decay_fn=learning_rate_decay_fn)
# Set up the Saver for saving and restoring model checkpoints.
saver = tf.train.Saver(max_to_keep=training_config.max_checkpoints_to_keep)
# setup summaries
summary_op = tf.summary.merge_all()
# create reader
model_name = os.path.split(train_dir)[1]
reader = Reader(batch_size=64,
subset=FLAGS.model_trainset,
model_name=model_name,
feat_type='res5c',
version=FLAGS.version,
counter_sampling=do_counter_sampling,
model_suffix=_model_suffix)
# reader = Reader(batch_size=64,
# known_set='kprestval',
# unknown_set='kptrain', # 'kptrain'
# un_ratio=1,
# hide_label=False)
# Run training.
training_util.train(
train_op,
train_dir,
log_every_n_steps=FLAGS.log_every_n_steps,
graph=g,
global_step=model.global_step,
number_of_steps=FLAGS.number_of_steps,
init_fn=model.init_fn,
saver=saver, reader=reader,
feed_fn=model.fill_feed_dict)
def main(_):
with tf.Graph().as_default():
train()
if __name__ == '__main__':
tf.app.run()
| [
"[email protected]"
] | |
10aca41decc98ad1133655ae582bd8a46dab90e5 | 660c4ba43d91999872953ec27c6a72673e3239a0 | /anrg/test_cleaning.py | a2e4168afcf7e7f5cbeb6843274636eed8205397 | [] | no_license | zaxliu/howmuchrainii | 4a50c93be82b15bdab47043a7f5bf17224f277b3 | 3804ba4c6b412aadd400ab793d4245d041338fba | refs/heads/master | 2021-01-09T21:55:47.185470 | 2016-03-15T04:52:07 | 2016-03-15T04:52:07 | 53,915,578 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,470 | py | from cleaning import TargetThresholdFilter, LogPlusOne
import numpy as np
import pandas as pd
from sklearn.datasets import load_boston
from sklearn.pipeline import Pipeline
from sklearn.linear_model import LinearRegression
# # Check basic functionality
# # only works for pandas DataFrame and Series because we are modifying shape in-place
# X1 = pd.DataFrame(np.array([[1, 2], [3, 4], [5, 6], [7, 8]]))
# X2 = pd.DataFrame(np.array([[1, 2], [3, 4], [5, 6], [7, 8]]))
# y1 = pd.Series(np.array([1, 2, 3, 4]))
# y2 = pd.Series(np.array([1, 2, 3, 4]))
# ttf = TargetThresholdFilter(threshold=3)
# ttf.fit_transform(X1, y1)
# ttf.transform(X2, y2)
# print X1
# print y1
# print X2
# print y2
#
# # sklearn pipe compatability
# print "==================="
# X1 = pd.DataFrame(np.array([[1, 2], [3, 4], [5, 6], [7, 8]]))
# X2 = pd.DataFrame(np.array([[1, 2], [3, 4], [5, 6], [7, 8]]))
# y1 = pd.Series(np.array([1, 2, 3, 4]))
# y2 = pd.Series(np.array([1, 2, 3, 4]))
# steps = [('ttf', TargetThresholdFilter(threshold=1)), ('lr', LinearRegression())]
# pip = Pipeline(steps)
# pip.fit(X1, y1)
# print 'X1'
# print X1
# print 'y1'
# print y1
# print 'X2'
# print X2
# print 'predict2'
# print pip.predict(X2)
# log(1+y)
X1 = pd.DataFrame(np.array([[1, 2], [3, 4], [5, 6], [7, 8]]))
y1 = pd.Series(np.array([1, 2, 3, 4]))
y2 = pd.Series(np.array([1, 2, 3, 4]))
lpo = LogPlusOne()
lpo.fit_transform(X1, y1)
print X1
print y1
print lpo.transform(X1)
print lpo.metric(y2, y1)
| [
"[email protected]"
] | |
f83f9b3a5f09f2fd6c191894b8d4e267df122003 | 8be217fe977aa0bcd9e375c75b0fb522f5bf0101 | /univaluedbinarytree965.py | d9f96d60ffccf5ba0e578cdfe667701be77a564f | [] | no_license | blueones/LeetcodePractices | c63a5e773bebea17e988e8bb4962e012d7d402ba | 194375ba0c07e420f420aafec98aede2f9f5d8fa | refs/heads/master | 2021-07-14T14:21:55.389334 | 2021-01-24T22:13:21 | 2021-01-24T22:13:21 | 230,814,709 | 0 | 1 | null | 2020-02-25T02:58:04 | 2019-12-29T23:18:25 | Python | UTF-8 | Python | false | false | 1,585 | py | # Definition for a binary tree node.
# class TreeNode:
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution:
def isUnivalTree(self, root: TreeNode) -> bool:
def dfs(node,value):
if node == None:
return True
if node.val == value:
return dfs(node.left,value) and dfs(node.right,value)
else:
return False
if root == None:
return True
else:
valueN = root.val
return dfs(root,valueN)
class Solution2:
#True recursion. compare if for everynode, node.val =node.left.value=node.right.val
def isUnivalTree(self,root):
if root == None:
return True
if root.right:
if root.right.val!= root.val:
return False
if root.left:
if root.left.val!=root.val:
return False
return self.isUnivalTree(root.left) and self.isUnivalTree(root.right)
class Solution3:
#BFS
def isUnivalTree(self,root):
def bfs(node,valueN):
queueList = [node]
while queueList != []:
currentN = queueList.pop(0)
if currentN != None:
if currentN.val != valueN:
return False
queueList.append(currentN.left)
queueList.append(currentN.right)
return True
if root == None:
return True
return bfs(root,root.val) | [
"[email protected]"
] | |
413f02aa60bcb2e786a66cc04ea9474bea452cf6 | 383845f6cd8e2a32f95e5970d2f7e9fb755b6598 | /5days/day3/19-error.py | b8d052bc39972096ef182b3e732bfa1a61d518fd | [] | no_license | ash/python-tut | e553e9e3a4ecb866e87c7ce9f04d7f517244ac01 | f89f079f13dd29eef6ba293b074a0087272dc8a6 | refs/heads/master | 2022-12-17T03:20:40.633262 | 2018-03-21T15:04:09 | 2018-03-21T15:04:09 | 96,119,629 | 0 | 0 | null | 2022-12-08T00:55:59 | 2017-07-03T14:29:02 | Python | UTF-8 | Python | false | false | 39 | py | print('Hello')
print(1/0)
print('Bye')
| [
"[email protected]"
] | |
068b03e6832ce421b83bd23c56f5f42c8e3c05c0 | 9a1f105ce6385633e7da47fb13eb2e8db66dbddb | /awswrangler/__metadata__.py | 8ec6474ad28c7b7000ec80205d2f87d6c15e164d | [
"Apache-2.0"
] | permissive | datacoral/aws-data-wrangler | c47e2d45f2e643b62479f6b0b8f5fdbd8367af9b | bb9eb52baf374c616289daa932dc855dcd384994 | refs/heads/master | 2021-05-27T04:27:23.700657 | 2020-05-18T13:45:30 | 2020-05-18T13:45:30 | 254,217,334 | 0 | 0 | null | 2020-04-08T22:45:28 | 2020-04-08T22:45:27 | null | UTF-8 | Python | false | false | 266 | py | """Metadata Module.
Source repository: https://github.com/awslabs/aws-data-wrangler
Documentation: https://aws-data-wrangler.readthedocs.io/
"""
__title__ = "awswrangler"
__description__ = "Pandas on AWS."
__version__ = "1.1.2"
__license__ = "Apache License 2.0"
| [
"[email protected]"
] | |
1de069266182493d06adf2a86d6e505eff242889 | 2b5dfacdb7389aefff64c67fac863e3f82d3723e | /source/tygame-sdk/src/tysdk/entity/paythird/paycattuyouweixin.py | 049b97dab1b518acc7e1109f6d54bd64be7e7a9e | [] | no_license | hi-noikiy/hall0 | 54ef76c715f7ac7fec4c9ca175817e12f60fbd6a | 21ea94c5b048bc611fb1557ac0b6e3ef4fdbbc09 | refs/heads/master | 2020-04-08T21:58:55.239106 | 2018-01-15T14:58:32 | 2018-01-15T14:58:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 920 | py | # -*- coding=utf-8 -*-
import copy
from tyframework.context import TyContext
class TuYouPayCatTuyouWeixin(object):
@classmethod
def charge_data(cls, chargeinfo):
try:
del chargeinfo['chargeType']
except:
pass
more_categories = TyContext.Configure.get_global_item_json('more_categories_tuyou_weixin')
charge_cats = copy.deepcopy(more_categories)
price = chargeinfo['chargeTotal']
if price > 500:
for i in xrange(len(charge_cats)):
if 'CAT_PHONECHARGE_CARD' == charge_cats[i]['category']:
del charge_cats[i]
break
for cat in charge_cats:
# cat['desc'] = ''
cat['summary'] = chargeinfo['diamondName']
if 'ali' in cat['paytype']:
cat['tag'] = 'TAG_CHAOZHI'
chargeinfo['chargeCategories'] = charge_cats
| [
"[email protected]"
] | |
8de1cdc5e429d2262b6e0aa7345e4f26efe3ec7e | ff68cde9ba7196dee310d8e0a62810cbaf285e08 | /fresh_shop/user/views.py | d5adde24c627a4552ac824bd9fe75c80e537c6a1 | [] | no_license | guilinxians/fresh_shop | 1fc3d269212652f229c51385ca654c94ce0c580e | 8e6692c9737643bc7202ece9054a95cde88435ab | refs/heads/master | 2020-04-17T21:41:46.670901 | 2019-01-22T09:27:37 | 2019-01-22T09:27:37 | 166,961,820 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,389 | py | from django.contrib.auth.hashers import make_password
from django.http import HttpResponseRedirect
from django.urls import reverse
from django.shortcuts import render
from user.forms import RegisterForm, LoginForm, AddressForm
from user.models import User, UserAddress
def register(request):
if request.method == 'GET':
return render(request, 'register.html')
if request.method == 'POST':
# 使用表单form做校验
form = RegisterForm(request.POST)
if form.is_valid():
# 账号不存在于数据库,密码和确认密码一致,邮箱格式正确
username = form.cleaned_data['user_name']
password = make_password(form.cleaned_data['pwd'])
email = form.cleaned_data['email']
User.objects.create(username=username,
password=password,
email=email)
return HttpResponseRedirect(reverse('user:login'))
else:
# 获取表单验证不通过的错误信息
errors = form.errors
return render(request, 'register.html', {'errors': errors})
def login(request):
if request.method == 'GET':
return render(request, 'login.html')
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
# 用户名存在,密码相同
username = form.cleaned_data['username']
user = User.objects.filter(username=username).first()
request.session['user_id'] = user.id
return HttpResponseRedirect(reverse('goods:index'))
else:
errors = form.errors
return render(request, 'login.html', {'errors': errors})
def logout(request):
if request.method == 'GET':
# 删掉session中的键值对user_id
del request.session['user_id']
# 删除商品信息
if request.session.get('goods'):
del request.session['goods']
return HttpResponseRedirect(reverse('goods:index'))
def user_site(request):
if request.method == 'GET':
user_id = request.session.get('user_id')
user_address = UserAddress.objects.filter(user_id=user_id)
activate = 'site'
return render(request, 'user_center_site.html', {'user_address':user_address, 'activate': activate})
if request.method == 'POST':
form = AddressForm(request.POST)
if form.is_valid():
username = form.cleaned_data['username']
address = form.cleaned_data['address']
postcode = form.cleaned_data['postcode']
mobile = form.cleaned_data['mobile']
user_id = request.session.get('user_id')
UserAddress.objects.create(user_id=user_id,
address=address,
signer_name=username,
signer_mobile=mobile,
signer_postcode=postcode)
return HttpResponseRedirect(reverse('user:user_site'))
else:
errors = form.errors
return render(request, 'user_center_site.html', {'errors': errors})
def user_info(request):
if request.method == 'GET':
activate = 'info'
return render(request, 'user_center_info.html', {'activate':activate})
| [
"[email protected]"
] | |
fb42871c985ed699ec54105fb0e712bffac27ea8 | da853ef2c9946344ae34829355a507052f1af411 | /PycharmProjects/assgn7.py | fbb4d1e9b999d3295845ff7ab7f15710e0c8ea63 | [] | no_license | SubhamSingh1/star | c4f3d2ac0470e81847fef4436c0cbd3e1ea9bf6c | 33531c1f224e0a553d93d877724db673bf6941db | refs/heads/master | 2022-12-21T13:27:03.969571 | 2021-10-01T07:31:17 | 2021-10-01T07:31:17 | 235,774,208 | 0 | 0 | null | 2022-12-14T11:40:12 | 2020-01-23T10:43:20 | Python | UTF-8 | Python | false | false | 118 | py | yr= int(input("Enter the Year."))
if yr%4==0:
print("It's a leap year.")
else:
print("It's not a leap year.") | [
"[email protected]"
] | |
47a4a9784f1ae38914bd692d9743d32514d93d2c | 08136ad3a77e6be56c50a64ffddd88bd2f2b2931 | /Standalone/gevent/pool.py | ab505f205e5da719a63f738946404f9ee1f18b5e | [] | no_license | handwriter/ufo | 2f9ae4a999bcbe532643ce564a9b33cb5b97aca6 | 28af9ef19b525343bda8c149334b16b0b02c4968 | refs/heads/master | 2021-04-05T01:18:45.130347 | 2020-03-19T13:34:00 | 2020-03-19T13:34:00 | 248,509,434 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,259 | py | # Copyright (c) 2009-2011 Denis Bilenko. See LICENSE for details.
"""
Managing greenlets in a group.
The :class:`Group` class in this module abstracts a group of running
greenlets. When a greenlet dies, it's automatically removed from the
group. All running greenlets in a group can be waited on with
:meth:`Group.join`, or all running greenlets can be killed with
:meth:`Group.kill`.
The :class:`Pool` class, which is a subclass of :class:`Group`,
provides a way to limit concurrency: its :meth:`spawn <Pool.spawn>`
method blocks if the number of greenlets in the pool has already
reached the limit, until there is a free slot.
"""
from __future__ import print_function, absolute_import, division
from gevent.hub import GreenletExit, getcurrent, kill as _kill
from gevent.greenlet import joinall, Greenlet
from gevent.queue import Full as QueueFull
from gevent.timeout import Timeout
from gevent.event import Event
from gevent.lock import Semaphore, DummySemaphore
from gevent._compat import izip
from gevent._imap import IMap
from gevent._imap import IMapUnordered
__all__ = [
'Group',
'Pool',
'PoolFull',
]
class GroupMappingMixin(object):
# Internal, non-public API class.
# Provides mixin methods for implementing mapping pools. Subclasses must define:
def spawn(self, func, *args, **kwargs):
"""
A function that runs *func* with *args* and *kwargs*, potentially
asynchronously. Return a value with a ``get`` method that blocks
until the results of func are available, and a ``rawlink`` method
that calls a callback when the results are available.
If this object has an upper bound on how many asyncronously executing
tasks can exist, this method may block until a slot becomes available.
"""
raise NotImplementedError()
def _apply_immediately(self):
"""
should the function passed to apply be called immediately,
synchronously?
"""
raise NotImplementedError()
def _apply_async_use_greenlet(self):
"""
Should apply_async directly call Greenlet.spawn(), bypassing
`spawn`?
Return true when self.spawn would block.
"""
raise NotImplementedError()
def _apply_async_cb_spawn(self, callback, result):
"""
Run the given callback function, possibly
asynchronously, possibly synchronously.
"""
raise NotImplementedError()
def apply_cb(self, func, args=None, kwds=None, callback=None):
"""
:meth:`apply` the given *func(\\*args, \\*\\*kwds)*, and, if a *callback* is given, run it with the
results of *func* (unless an exception was raised.)
The *callback* may be called synchronously or asynchronously. If called
asynchronously, it will not be tracked by this group. (:class:`Group` and :class:`Pool`
call it asynchronously in a new greenlet; :class:`~gevent.threadpool.ThreadPool` calls
it synchronously in the current greenlet.)
"""
result = self.apply(func, args, kwds)
if callback is not None:
self._apply_async_cb_spawn(callback, result)
return result
def apply_async(self, func, args=None, kwds=None, callback=None):
"""
A variant of the :meth:`apply` method which returns a :class:`~.Greenlet` object.
When the returned greenlet gets to run, it *will* call :meth:`apply`,
passing in *func*, *args* and *kwds*.
If *callback* is specified, then it should be a callable which
accepts a single argument. When the result becomes ready
callback is applied to it (unless the call failed).
This method will never block, even if this group is full (that is,
even if :meth:`spawn` would block, this method will not).
.. caution:: The returned greenlet may or may not be tracked
as part of this group, so :meth:`joining <join>` this group is
not a reliable way to wait for the results to be available or
for the returned greenlet to run; instead, join the returned
greenlet.
.. tip:: Because :class:`~.ThreadPool` objects do not track greenlets, the returned
greenlet will never be a part of it. To reduce overhead and improve performance,
:class:`Group` and :class:`Pool` may choose to track the returned
greenlet. These are implementation details that may change.
"""
if args is None:
args = ()
if kwds is None:
kwds = {}
if self._apply_async_use_greenlet():
# cannot call self.spawn() directly because it will block
# XXX: This is always the case for ThreadPool, but for Group/Pool
# of greenlets, this is only the case when they are full...hence
# the weasely language about "may or may not be tracked". Should we make
# Group/Pool always return true as well so it's never tracked by any
# implementation? That would simplify that logic, but could increase
# the total number of greenlets in the system and add a layer of
# overhead for the simple cases when the pool isn't full.
return Greenlet.spawn(self.apply_cb, func, args, kwds, callback)
greenlet = self.spawn(func, *args, **kwds)
if callback is not None:
greenlet.link(pass_value(callback))
return greenlet
def apply(self, func, args=None, kwds=None):
"""
Rough quivalent of the :func:`apply()` builtin function blocking until
the result is ready and returning it.
The ``func`` will *usually*, but not *always*, be run in a way
that allows the current greenlet to switch out (for example,
in a new greenlet or thread, depending on implementation). But
if the current greenlet or thread is already one that was
spawned by this pool, the pool may choose to immediately run
the `func` synchronously.
Any exception ``func`` raises will be propagated to the caller of ``apply`` (that is,
this method will raise the exception that ``func`` raised).
"""
if args is None:
args = ()
if kwds is None:
kwds = {}
if self._apply_immediately():
return func(*args, **kwds)
return self.spawn(func, *args, **kwds).get()
def __map(self, func, iterable):
return [g.get() for g in
[self.spawn(func, i) for i in iterable]]
def map(self, func, iterable):
"""Return a list made by applying the *func* to each element of
the iterable.
.. seealso:: :meth:`imap`
"""
# We can't return until they're all done and in order. It
# wouldn't seem to much matter what order we wait on them in,
# so the simple, fast (50% faster than imap) solution would be:
# return [g.get() for g in
# [self.spawn(func, i) for i in iterable]]
# If the pool size is unlimited (or more than the len(iterable)), this
# is equivalent to imap (spawn() will never block, all of them run concurrently,
# we call get() in the order the iterable was given).
# Now lets imagine the pool if is limited size. Suppose the
# func is time.sleep, our pool is limited to 3 threads, and
# our input is [10, 1, 10, 1, 1] We would start three threads,
# one to sleep for 10, one to sleep for 1, and the last to
# sleep for 10. We would block starting the fourth thread. At
# time 1, we would finish the second thread and start another
# one for time 1. At time 2, we would finish that one and
# start the last thread, and then begin executing get() on the first
# thread.
# Because it's spawn that blocks, this is *also* equivalent to what
# imap would do.
# The one remaining difference is that imap runs in its own
# greenlet, potentially changing the way the event loop runs.
# That's easy enough to do.
g = Greenlet.spawn(self.__map, func, iterable)
return g.get()
def map_cb(self, func, iterable, callback=None):
result = self.map(func, iterable)
if callback is not None:
callback(result)
return result
def map_async(self, func, iterable, callback=None):
"""
A variant of the map() method which returns a Greenlet object that is executing
the map function.
If callback is specified then it should be a callable which accepts a
single argument.
"""
return Greenlet.spawn(self.map_cb, func, iterable, callback)
def __imap(self, cls, func, *iterables, **kwargs):
# Python 2 doesn't support the syntax that lets us mix varargs and
# a named kwarg, so we have to unpack manually
maxsize = kwargs.pop('maxsize', None)
if kwargs:
raise TypeError("Unsupported keyword arguments")
return cls.spawn(func, izip(*iterables), spawn=self.spawn,
_zipped=True, maxsize=maxsize)
def imap(self, func, *iterables, **kwargs):
"""
imap(func, *iterables, maxsize=None) -> iterable
An equivalent of :func:`itertools.imap`, operating in parallel.
The *func* is applied to each element yielded from each
iterable in *iterables* in turn, collecting the result.
If this object has a bound on the number of active greenlets it can
contain (such as :class:`Pool`), then at most that number of tasks will operate
in parallel.
:keyword int maxsize: If given and not-None, specifies the maximum number of
finished results that will be allowed to accumulate awaiting the reader;
more than that number of results will cause map function greenlets to begin
to block. This is most useful if there is a great disparity in the speed of
the mapping code and the consumer and the results consume a great deal of resources.
.. note:: This is separate from any bound on the number of active parallel
tasks, though they may have some interaction (for example, limiting the
number of parallel tasks to the smallest bound).
.. note:: Using a bound is slightly more computationally expensive than not using a bound.
.. tip:: The :meth:`imap_unordered` method makes much better
use of this parameter. Some additional, unspecified,
number of objects may be required to be kept in memory
to maintain order by this function.
:return: An iterable object.
.. versionchanged:: 1.1b3
Added the *maxsize* keyword parameter.
.. versionchanged:: 1.1a1
Accept multiple *iterables* to iterate in parallel.
"""
return self.__imap(IMap, func, *iterables, **kwargs)
def imap_unordered(self, func, *iterables, **kwargs):
"""
imap_unordered(func, *iterables, maxsize=None) -> iterable
The same as :meth:`imap` except that the ordering of the results
from the returned iterator should be considered in arbitrary
order.
This is lighter weight than :meth:`imap` and should be preferred if order
doesn't matter.
.. seealso:: :meth:`imap` for more details.
"""
return self.__imap(IMapUnordered, func, *iterables, **kwargs)
class Group(GroupMappingMixin):
"""
Maintain a group of greenlets that are still running, without
limiting their number.
Links to each item and removes it upon notification.
Groups can be iterated to discover what greenlets they are tracking,
they can be tested to see if they contain a greenlet, and they know the
number (len) of greenlets they are tracking. If they are not tracking any
greenlets, they are False in a boolean context.
.. attribute:: greenlet_class
Either :class:`gevent.Greenlet` (the default) or a subclass.
These are the type of
object we will :meth:`spawn`. This can be
changed on an instance or in a subclass.
"""
greenlet_class = Greenlet
def __init__(self, *args):
assert len(args) <= 1, args
self.greenlets = set(*args)
if args:
for greenlet in args[0]:
greenlet.rawlink(self._discard)
# each item we kill we place in dying, to avoid killing the same greenlet twice
self.dying = set()
self._empty_event = Event()
self._empty_event.set()
def __repr__(self):
return '<%s at 0x%x %s>' % (self.__class__.__name__, id(self), self.greenlets)
def __len__(self):
"""
Answer how many greenlets we are tracking. Note that if we are empty,
we are False in a boolean context.
"""
return len(self.greenlets)
def __contains__(self, item):
"""
Answer if we are tracking the given greenlet.
"""
return item in self.greenlets
def __iter__(self):
"""
Iterate across all the greenlets we are tracking, in no particular order.
"""
return iter(self.greenlets)
def add(self, greenlet):
"""
Begin tracking the *greenlet*.
If this group is :meth:`full`, then this method may block
until it is possible to track the greenlet.
Typically the *greenlet* should **not** be started when
it is added because if this object blocks in this method,
then the *greenlet* may run to completion before it is tracked.
"""
try:
rawlink = greenlet.rawlink
except AttributeError:
pass # non-Greenlet greenlet, like MAIN
else:
rawlink(self._discard)
self.greenlets.add(greenlet)
self._empty_event.clear()
def _discard(self, greenlet):
self.greenlets.discard(greenlet)
self.dying.discard(greenlet)
if not self.greenlets:
self._empty_event.set()
def discard(self, greenlet):
"""
Stop tracking the greenlet.
"""
self._discard(greenlet)
try:
unlink = greenlet.unlink
except AttributeError:
pass # non-Greenlet greenlet, like MAIN
else:
unlink(self._discard)
def start(self, greenlet):
"""
Add the **unstarted** *greenlet* to the collection of greenlets
this group is monitoring, and then start it.
"""
self.add(greenlet)
greenlet.start()
def spawn(self, *args, **kwargs): # pylint:disable=arguments-differ
"""
Begin a new greenlet with the given arguments (which are passed
to the greenlet constructor) and add it to the collection of greenlets
this group is monitoring.
:return: The newly started greenlet.
"""
greenlet = self.greenlet_class(*args, **kwargs)
self.start(greenlet)
return greenlet
# def close(self):
# """Prevents any more tasks from being submitted to the pool"""
# self.add = RaiseException("This %s has been closed" % self.__class__.__name__)
def join(self, timeout=None, raise_error=False):
"""
Wait for this group to become empty *at least once*.
If there are no greenlets in the group, returns immediately.
.. note:: By the time the waiting code (the caller of this
method) regains control, a greenlet may have been added to
this group, and so this object may no longer be empty. (That
is, ``group.join(); assert len(group) == 0`` is not
guaranteed to hold.) This method only guarantees that the group
reached a ``len`` of 0 at some point.
:keyword bool raise_error: If True (*not* the default), if any
greenlet that finished while the join was in progress raised
an exception, that exception will be raised to the caller of
this method. If multiple greenlets raised exceptions, which
one gets re-raised is not determined. Only greenlets currently
in the group when this method is called are guaranteed to
be checked for exceptions.
:return bool: A value indicating whether this group became empty.
If the timeout is specified and the group did not become empty
during that timeout, then this will be a false value. Otherwise
it will be a true value.
.. versionchanged:: 1.2a1
Add the return value.
"""
greenlets = list(self.greenlets) if raise_error else ()
result = self._empty_event.wait(timeout=timeout)
for greenlet in greenlets:
if greenlet.exception is not None:
if hasattr(greenlet, '_raise_exception'):
greenlet._raise_exception()
raise greenlet.exception
return result
def kill(self, exception=GreenletExit, block=True, timeout=None):
"""
Kill all greenlets being tracked by this group.
"""
timer = Timeout._start_new_or_dummy(timeout)
try:
while self.greenlets:
for greenlet in list(self.greenlets):
if greenlet in self.dying:
continue
try:
kill = greenlet.kill
except AttributeError:
_kill(greenlet, exception)
else:
kill(exception, block=False)
self.dying.add(greenlet)
if not block:
break
joinall(self.greenlets)
except Timeout as ex:
if ex is not timer:
raise
finally:
timer.cancel()
def killone(self, greenlet, exception=GreenletExit, block=True, timeout=None):
"""
If the given *greenlet* is running and being tracked by this group,
kill it.
"""
if greenlet not in self.dying and greenlet in self.greenlets:
greenlet.kill(exception, block=False)
self.dying.add(greenlet)
if block:
greenlet.join(timeout)
def full(self):
"""
Return a value indicating whether this group can track more greenlets.
In this implementation, because there are no limits on the number of
tracked greenlets, this will always return a ``False`` value.
"""
return False
def wait_available(self, timeout=None):
"""
Block until it is possible to :meth:`spawn` a new greenlet.
In this implementation, because there are no limits on the number
of tracked greenlets, this will always return immediately.
"""
# MappingMixin methods
def _apply_immediately(self):
# If apply() is called from one of our own
# worker greenlets, don't spawn a new one---if we're full, that
# could deadlock.
return getcurrent() in self
def _apply_async_cb_spawn(self, callback, result):
Greenlet.spawn(callback, result)
def _apply_async_use_greenlet(self):
# cannot call self.spawn() because it will block, so
# use a fresh, untracked greenlet that when run will
# (indirectly) call self.spawn() for us.
return self.full()
class PoolFull(QueueFull):
"""
Raised when a Pool is full and an attempt was made to
add a new greenlet to it in non-blocking mode.
"""
class Pool(Group):
def __init__(self, size=None, greenlet_class=None):
"""
Create a new pool.
A pool is like a group, but the maximum number of members
is governed by the *size* parameter.
:keyword int size: If given, this non-negative integer is the
maximum count of active greenlets that will be allowed in
this pool. A few values have special significance:
* `None` (the default) places no limit on the number of
greenlets. This is useful when you want to track, but not limit,
greenlets. In general, a :class:`Group`
may be a more efficient way to achieve the same effect, but some things
need the additional abilities of this class (one example being the *spawn*
parameter of :class:`gevent.baseserver.BaseServer` and
its subclass :class:`gevent.pywsgi.WSGIServer`).
* ``0`` creates a pool that can never have any active greenlets. Attempting
to spawn in this pool will block forever. This is only useful
if an application uses :meth:`wait_available` with a timeout and checks
:meth:`free_count` before attempting to spawn.
"""
if size is not None and size < 0:
raise ValueError('size must not be negative: %r' % (size, ))
Group.__init__(self)
self.size = size
if greenlet_class is not None:
self.greenlet_class = greenlet_class
if size is None:
factory = DummySemaphore
else:
factory = Semaphore
self._semaphore = factory(size)
def wait_available(self, timeout=None):
"""
Wait until it's possible to spawn a greenlet in this pool.
:param float timeout: If given, only wait the specified number
of seconds.
.. warning:: If the pool was initialized with a size of 0, this
method will block forever unless a timeout is given.
:return: A number indicating how many new greenlets can be put into
the pool without blocking.
.. versionchanged:: 1.1a3
Added the ``timeout`` parameter.
"""
return self._semaphore.wait(timeout=timeout)
def full(self):
"""
Return a boolean indicating whether this pool is full, e.g. if
:meth:`add` would block.
:return: False if there is room for new members, True if there isn't.
"""
return self.free_count() <= 0
def free_count(self):
"""
Return a number indicating *approximately* how many more members
can be added to this pool.
"""
if self.size is None:
return 1
return max(0, self.size - len(self))
def start(self, greenlet, *args, **kwargs): # pylint:disable=arguments-differ
"""
start(greenlet, blocking=True, timeout=None) -> None
Add the **unstarted** *greenlet* to the collection of greenlets
this group is monitoring and then start it.
Parameters are as for :meth:`add`.
"""
self.add(greenlet, *args, **kwargs)
greenlet.start()
def add(self, greenlet, blocking=True, timeout=None): # pylint:disable=arguments-differ
"""
Begin tracking the given **unstarted** greenlet, possibly blocking
until space is available.
Usually you should call :meth:`start` to track and start the greenlet
instead of using this lower-level method, or :meth:`spawn` to
also create the greenlet.
:keyword bool blocking: If True (the default), this function
will block until the pool has space or a timeout occurs. If
False, this function will immediately raise a Timeout if the
pool is currently full.
:keyword float timeout: The maximum number of seconds this
method will block, if ``blocking`` is True. (Ignored if
``blocking`` is False.)
:raises PoolFull: if either ``blocking`` is False and the pool
was full, or if ``blocking`` is True and ``timeout`` was
exceeded.
.. caution:: If the *greenlet* has already been started and
*blocking* is true, then the greenlet may run to completion
while the current greenlet blocks waiting to track it. This would
enable higher concurrency than desired.
.. seealso:: :meth:`Group.add`
.. versionchanged:: 1.3.0 Added the ``blocking`` and
``timeout`` parameters.
"""
if not self._semaphore.acquire(blocking=blocking, timeout=timeout):
# We failed to acquire the semaphore.
# If blocking was True, then there was a timeout. If blocking was
# False, then there was no capacity. Either way, raise PoolFull.
raise PoolFull()
try:
Group.add(self, greenlet)
except:
self._semaphore.release()
raise
def _discard(self, greenlet):
Group._discard(self, greenlet)
self._semaphore.release()
class pass_value(object):
__slots__ = ['callback']
def __init__(self, callback):
self.callback = callback
def __call__(self, source):
if source.successful():
self.callback(source.value)
def __hash__(self):
return hash(self.callback)
def __eq__(self, other):
return self.callback == getattr(other, 'callback', other)
def __str__(self):
return str(self.callback)
def __repr__(self):
return repr(self.callback)
def __getattr__(self, item):
assert item != 'callback'
return getattr(self.callback, item)
| [
"[email protected]"
] | |
a26a88fbae5ce1b4b5853a5cae25a77b9d11b2cf | a5599f90f1b278916fac6bdb31035403f32314d5 | /0x03-python-data_structures/0-print_list_integer.py | 5492a09cda03c6bda3a65110097e8d5b0e764971 | [] | no_license | veromejia/holbertonschool-higher_level_programming | 46294a47a91771c9b36e8221d070d60516132539 | aab2ea538a45ae72e006e52730886fd45b85fe78 | refs/heads/master | 2020-07-23T02:14:40.652029 | 2020-02-14T05:50:57 | 2020-02-14T05:50:57 | 207,412,376 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 113 | py | #!/usr/bin/python3
def print_list_integer(my_list=[]):
for i in my_list:
print("{:d}".format(i))
| [
"[email protected]"
] | |
9406135cec1ba7dadac85f8e9357dd6eec506674 | 2565b3edfee1dbacbe0f20440337641c99be3356 | /czaSpider/dump/socket/client_for_test2.py | c22844fec9f64084b07f410d9e105e11fc23102f | [] | no_license | hi-noikiy/ScrapyLearning | 9f2cd3c7a1404ec06f4db9c958542b36abbeba09 | 1dd4c946f133d461dfe3fe21e31e5ba57adfd18a | refs/heads/master | 2022-04-27T11:24:28.748495 | 2020-04-28T00:30:34 | 2020-04-28T00:30:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 422 | py | import socket, json, time
HOST = '127.0.0.1' ##
PORT = 8022
tcpCliSock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
tcpCliSock.connect((HOST, PORT))
tcpCliSock.send(json.dumps({'cookie': 'test', 'user':'cza'}).encode())
while True:
print(tcpCliSock.recv(1024).decode())
# message = input('输入聊天内容')
# tcpCliSock.send(json.dumps({'state': 11, 'message': message, 'to': 'test'}).encode())
| [
"czasg0.0"
] | czasg0.0 |
a49cbab98aa7a3ff2cb68295ad3348109ef1e1bd | 7a454567a3de8b9cc399c73320ac803284cbf3ca | /ch17/02_imag.py | 645f4b643476631b8ff2af991aaca4aea0755a64 | [] | no_license | uberman4740/Practical-Deep-Reinforcement-Learning | 86fb84fca52a0ad46d92cb681f95768689b99519 | 4e2b22ecb10485ddb12f910959ee32718cf9d124 | refs/heads/master | 2020-03-10T12:23:51.686942 | 2018-04-12T19:46:08 | 2018-04-12T19:46:08 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,525 | py | #!/usr/bin/env python3
import os
import gym
import ptan
import argparse
import numpy as np
from tensorboardX import SummaryWriter
import torch
import torch.nn.functional as F
import torch.optim as optim
from torch.autograd import Variable
from lib import common, i2a
LEARNING_RATE = 5e-4
NUM_ENVS = 16
BATCH_SIZE = 64
SAVE_EVERY_BATCH = 1000
OBS_WEIGHT = 10.0
REWARD_WEIGHT = 1.0
def get_obs_diff(prev_obs, cur_obs):
prev = np.array(prev_obs)[-1]
cur = np.array(cur_obs)[-1]
prev = prev.astype(np.float32) / 255.0
cur = cur.astype(np.float32) / 255.0
return cur - prev
def iterate_batches(envs, net, cuda=False):
act_selector = ptan.actions.ProbabilityActionSelector()
mb_obs = np.zeros((BATCH_SIZE, ) + common.IMG_SHAPE, dtype=np.uint8)
mb_obs_next = np.zeros((BATCH_SIZE, ) + i2a.EM_OUT_SHAPE, dtype=np.float32)
mb_actions = np.zeros((BATCH_SIZE, ), dtype=np.int32)
mb_rewards = np.zeros((BATCH_SIZE, ), dtype=np.float32)
obs = [e.reset() for e in envs]
total_reward = [0.0] * NUM_ENVS
total_steps = [0] * NUM_ENVS
batch_idx = 0
done_rewards = []
done_steps = []
while True:
obs_v = ptan.agent.default_states_preprocessor(obs, cuda=cuda)
logits_v, values_v = net(obs_v)
probs_v = F.softmax(logits_v)
probs = probs_v.data.cpu().numpy()
actions = act_selector(probs)
for e_idx, e in enumerate(envs):
o, r, done, _ = e.step(actions[e_idx])
mb_obs[batch_idx] = obs[e_idx]
mb_obs_next[batch_idx] = get_obs_diff(obs[e_idx], o)
mb_actions[batch_idx] = actions[e_idx]
mb_rewards[batch_idx] = r
total_reward[e_idx] += r
total_steps[e_idx] += 1
batch_idx = (batch_idx + 1) % BATCH_SIZE
if batch_idx == 0:
yield mb_obs, mb_obs_next, mb_actions, mb_rewards, done_rewards, done_steps
done_rewards.clear()
done_steps.clear()
if done:
o = e.reset()
done_rewards.append(total_reward[e_idx])
done_steps.append(total_steps[e_idx])
total_reward[e_idx] = 0.0
total_steps[e_idx] = 0
obs[e_idx] = o
pass
if __name__ == "__main__":
parser = argparse.ArgumentParser()
parser.add_argument("--cuda", default=False, action="store_true", help="Enable cuda")
parser.add_argument("-n", "--name", required=True, help="Name of the run")
parser.add_argument("-m", "--model", required=True, help="File with model to load")
args = parser.parse_args()
saves_path = os.path.join("saves", "02_env_" + args.name)
os.makedirs(saves_path, exist_ok=True)
envs = [common.make_env() for _ in range(NUM_ENVS)]
writer = SummaryWriter(comment="-02_env_" + args.name)
net = common.AtariA2C(envs[0].observation_space.shape, envs[0].action_space.n)
net_em = i2a.EnvironmentModel(envs[0].observation_space.shape, envs[0].action_space.n)
net.load_state_dict(torch.load(args.model, map_location=lambda storage, loc: storage))
if args.cuda:
net.cuda()
net_em.cuda()
print(net_em)
optimizer = optim.Adam(net_em.parameters(), lr=LEARNING_RATE)
step_idx = 0
best_loss = np.inf
with ptan.common.utils.TBMeanTracker(writer, batch_size=100) as tb_tracker:
for mb_obs, mb_obs_next, mb_actions, mb_rewards, done_rewards, done_steps in iterate_batches(envs, net, cuda=args.cuda):
if len(done_rewards) > 0:
m_reward = np.mean(done_rewards)
m_steps = np.mean(done_steps)
print("%d: done %d episodes, mean reward=%.2f, steps=%.2f" % (
step_idx, len(done_rewards), m_reward, m_steps))
tb_tracker.track("total_reward", m_reward, step_idx)
tb_tracker.track("total_steps", m_steps, step_idx)
obs_v = Variable(torch.from_numpy(mb_obs))
obs_next_v = Variable(torch.from_numpy(mb_obs_next))
actions_t = torch.LongTensor(mb_actions.tolist())
rewards_v = Variable(torch.from_numpy(mb_rewards))
if args.cuda:
obs_v = obs_v.cuda()
actions_t = actions_t.cuda()
obs_next_v = obs_next_v.cuda()
rewards_v = rewards_v.cuda()
optimizer.zero_grad()
out_obs_next_v, out_reward_v = net_em(obs_v.float()/255, actions_t)
loss_obs_v = F.mse_loss(out_obs_next_v, obs_next_v)
loss_rew_v = F.mse_loss(out_reward_v, rewards_v)
loss_total_v = OBS_WEIGHT * loss_obs_v + REWARD_WEIGHT * loss_rew_v
loss_total_v.backward()
optimizer.step()
tb_tracker.track("loss_em_obs", loss_obs_v, step_idx)
tb_tracker.track("loss_em_reward", loss_rew_v, step_idx)
tb_tracker.track("loss_em_total", loss_total_v, step_idx)
loss = loss_total_v.data.cpu().numpy()
if loss < best_loss:
print("Best loss updated: %.4e -> %.4e" % (best_loss, loss))
best_loss = loss
fname = os.path.join(saves_path, "best_%.4e_%05d.dat" % (loss, step_idx))
torch.save(net_em.state_dict(), fname)
step_idx += 1
if step_idx % SAVE_EVERY_BATCH == 0:
fname = os.path.join(saves_path, "em_%05d_%.4e.dat" % (step_idx, loss))
torch.save(net_em.state_dict(), fname)
| [
"[email protected]"
] | |
b116d5975037b3274e9f1ee7859e7ef06fcd7eea | b501a5eae1018c1c26caa96793c6ee17865ebb2d | /data_persistence_and_exchange/sqlite3/sqlite3_argument_named.py | 3a6f8243124bee44d70b39d7bcb3084eb7a03c3b | [] | no_license | jincurry/standard_Library_Learn | 12b02f9e86d31ca574bb6863aefc95d63cc558fc | 6c7197f12747456e0f1f3efd09667682a2d1a567 | refs/heads/master | 2022-10-26T07:28:36.545847 | 2018-05-04T12:54:50 | 2018-05-04T12:54:50 | 125,447,397 | 0 | 1 | null | 2022-10-02T17:21:50 | 2018-03-16T01:32:50 | Python | UTF-8 | Python | false | false | 567 | py | import sqlite3
import sys
db_filename = 'todo.db'
project_name = sys.argv[1]
with sqlite3.connect(db_filename) as conn:
cursor = conn.cursor()
query = """
select id, priority, details, status, deadline from task
where project = :project_name
order by deadline, priority
"""
cursor.execute(query, {'project_name': project_name})
for row in cursor.fetchall():
task_id, priority, details, status, deadline = row
print('{:2d}[{:d}] {:<25}[{:<8}]({})'.format(
task_id, priority, details, status, deadline))
| [
"[email protected]"
] | |
49f1da2b299d4c6d36fdfab3fc635f8ea386801c | 46ac0965941d06fde419a6f216db2a653a245dbd | /sdks/python/test/test_BillingResourceUsage.py | 2c2f2c05a4b8f5ccc8c439600f882e25773cb0bd | [
"MIT",
"Unlicense"
] | permissive | b3nab/appcenter-sdks | 11f0bab00d020abb30ee951f7656a3d7ed783eac | bcc19c998b5f648a147f0d6a593dd0324e2ab1ea | refs/heads/master | 2022-01-27T15:06:07.202852 | 2019-05-19T00:12:43 | 2019-05-19T00:12:43 | 187,386,747 | 0 | 3 | MIT | 2022-01-22T07:57:59 | 2019-05-18T17:29:21 | Python | UTF-8 | Python | false | false | 958 | py | # coding: utf-8
"""
App Center Client
Microsoft Visual Studio App Center API # noqa: E501
OpenAPI spec version: preview
Contact: [email protected]
Project Repository: https://github.com/b3nab/appcenter-sdks
"""
from __future__ import absolute_import
import unittest
import appcenter_sdk
from BillingResourceUsage.clsBillingResourceUsage import BillingResourceUsage # noqa: E501
from appcenter_sdk.rest import ApiException
class TestBillingResourceUsage(unittest.TestCase):
"""BillingResourceUsage unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testBillingResourceUsage(self):
"""Test BillingResourceUsage"""
# FIXME: construct object with mandatory attributes with example values
# model = appcenter_sdk.models.clsBillingResourceUsage.BillingResourceUsage() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
16a2b6f4adbf825184790d3fa41b8ae08f66ba08 | 456d4d710a0a596d293a0ca1c9e3ceeadc9e0b66 | /pirates/world/GameTypeGlobals.py | 7c5852952009fa866604de99e03a0409d8542c2d | [] | no_license | DankMickey/PORSRC | 65471a41891f515630f6e7c97e87a082a12a6592 | 07896684c67758f81216a5bd287441c71b833251 | refs/heads/master | 2021-01-01T20:02:46.030841 | 2017-07-10T14:56:27 | 2017-07-10T14:56:27 | 98,749,038 | 0 | 1 | null | 2017-07-29T17:36:53 | 2017-07-29T17:36:53 | null | UTF-8 | Python | false | false | 14,616 | py | from panda3d.core import ConfigVariable, ConfigVariableBool
import types
from pirates.piratesbase import PiratesGlobals
from pirates.piratesbase import PLocalizer
from pirates.piratesgui import PiratesGuiGlobals
from pirates.uberdog.UberDogGlobals import InventoryType
from pirates.uberdog import DistributedInventoryBase
from otp.otpbase import OTPGlobals
import __builtin__
GAME_DURATION_SHORT = 0
GAME_DURATION_MED = 1
GAME_DURATION_LONG = 2
GAME_OPTION_DURATION = 0
GAME_OPTION_MATCH_COUNT = 1
GAME_OPTION_PASSWORD = 2
GAME_OPTION_NPC_PLAYERS = 3
GAME_OPTION_LOCATION = 4
GAME_OPTION_USE_CURR_CREW = 5
GAME_OPTION_MIN_BET = 6
GAME_OPTION_MIN_PLAYERS = 7
GAME_OPTION_DESIRED_PLAYERS = 8
GAME_OPTION_MAX_PLAYERS = 9
GAME_OPTION_MAX_CREW_SIZE = 10
GAME_OPTION_MAX_CREW_SHIP = 11
GAME_OPTION_VIP_PASS = 12
GAME_OPTION_CREW_INFO = 13
GAME_OPTION_TM_ID = 14
GAME_OPTION_SOLO_PLAY = 15
MATCH_CHANCE_LOW = 1
MATCH_CHANCE_MODERATE = 25
MATCH_CHANCE_HIGH = 75
GAME_TYPE_2_INSTANCE_TYPE = {
PiratesGlobals.GAME_TYPE_PG: PiratesGlobals.INSTANCE_PG,
PiratesGlobals.GAME_TYPE_PVP: PiratesGlobals.INSTANCE_PVP,
PiratesGlobals.GAME_TYPE_HSA: PiratesGlobals.INSTANCE_MAIN,
PiratesGlobals.GAME_TYPE_TM: PiratesGlobals.INSTANCE_TM,
PiratesGlobals.GAME_TYPE_CREW: PiratesGlobals.INSTANCE_MAIN,
PiratesGlobals.GAME_TYPE_PRIV: PiratesGlobals.INSTANCE_MAIN,
PiratesGlobals.GAME_TYPE_QUEST: PiratesGlobals.INSTANCE_MAIN }
def gameType2InstanceType(gameType):
instanceType = GAME_TYPE_2_INSTANCE_TYPE.get(gameType)
return instanceType
GameTypeRanking = {
PiratesGlobals.GAME_STYLE_CTF: InventoryType.CTFGame,
PiratesGlobals.GAME_STYLE_CTL: InventoryType.CTLGame,
PiratesGlobals.GAME_STYLE_PIRATEER: InventoryType.PTRGame,
PiratesGlobals.GAME_STYLE_BATTLE: InventoryType.BTLGame,
PiratesGlobals.GAME_STYLE_TEAM_BATTLE: InventoryType.TBTGame,
PiratesGlobals.GAME_STYLE_SHIP_BATTLE: InventoryType.SBTGame,
PiratesGlobals.GAME_STYLE_ARMADA: InventoryType.ARMGame,
PiratesGlobals.GAME_STYLE_TKP: InventoryType.TKPGame,
PiratesGlobals.GAME_STYLE_BTB: InventoryType.BTBGame,
PiratesGlobals.GAME_STYLE_BLACKJACK: InventoryType.BlackjackGame,
PiratesGlobals.GAME_STYLE_POKER: InventoryType.PokerGame }
GameTypeStrings = {
'type': {
PiratesGlobals.GAME_TYPE_PVP: PLocalizer.PVPGame,
PiratesGlobals.GAME_TYPE_PG: PLocalizer.ParlorGame,
PiratesGlobals.GAME_TYPE_HSA: PLocalizer.HSAGame,
PiratesGlobals.GAME_TYPE_TM: PLocalizer.TMGame,
PiratesGlobals.GAME_TYPE_CREW: PLocalizer.CrewGame,
PiratesGlobals.GAME_TYPE_PRIV: PLocalizer.PrivGame,
PiratesGlobals.GAME_TYPE_QUEST: PLocalizer.QuestGame },
'typeBrief': {
PiratesGlobals.GAME_TYPE_PVP: PLocalizer.PVPGameBrief,
PiratesGlobals.GAME_TYPE_PG: PLocalizer.ParlorGameBrief,
PiratesGlobals.GAME_TYPE_HSA: PLocalizer.HSAGameBrief,
PiratesGlobals.GAME_TYPE_TM: PLocalizer.TMGameBrief,
PiratesGlobals.GAME_TYPE_CREW: PLocalizer.CrewGameBrief,
PiratesGlobals.GAME_TYPE_PRIV: PLocalizer.PrivGameBrief,
PiratesGlobals.GAME_TYPE_QUEST: PLocalizer.QuestGameBrief },
'description': {
PiratesGlobals.GAME_TYPE_PVP: PLocalizer.PVPGameDesc,
PiratesGlobals.GAME_TYPE_PG: PLocalizer.ParlorGameDesc,
PiratesGlobals.GAME_TYPE_HSA: PLocalizer.HSAGameDesc,
PiratesGlobals.GAME_TYPE_TM: PLocalizer.TMGameDesc,
PiratesGlobals.GAME_TYPE_CREW: PLocalizer.CrewGameDesc,
PiratesGlobals.GAME_TYPE_PRIV: PLocalizer.PrivGameDesc,
PiratesGlobals.GAME_TYPE_QUEST: PLocalizer.QuestGameDesc },
'descriptionStyle': {
PiratesGlobals.GAME_STYLE_BATTLE: PLocalizer.GameStyleBattleDesc,
PiratesGlobals.GAME_STYLE_TEAM_BATTLE: PLocalizer.GameStyleTeamBattleDesc,
PiratesGlobals.GAME_STYLE_SHIP_BATTLE: PLocalizer.GameStyleShipBattleDesc,
PiratesGlobals.GAME_STYLE_CTF: PLocalizer.GameStyleCTFDesc,
PiratesGlobals.GAME_STYLE_CTL: PLocalizer.GameStyleCTLDesc,
PiratesGlobals.GAME_STYLE_PIRATEER: PLocalizer.GameStylePirateer,
PiratesGlobals.GAME_STYLE_POKER: PLocalizer.GameStylePoker,
PiratesGlobals.GAME_STYLE_BLACKJACK: PLocalizer.GameStyleBlackjack,
PiratesGlobals.CREW_STYLE_FIND_A_CREW: PLocalizer.CrewStyleFindACrewDesc,
PiratesGlobals.CREW_STYLE_FIND_A_PVP_CREW: PLocalizer.CrewStyleFindAPVPCrewDesc,
PiratesGlobals.CREW_STYLE_RECRUIT_MEMBERS: PLocalizer.CrewStyleRecruitMembersDesc },
'icon': {
PiratesGlobals.GAME_TYPE_PVP: 'lookout_win_pvp_game_icon',
PiratesGlobals.GAME_TYPE_PG: 'lookout_win_parlor_game_icon',
PiratesGlobals.GAME_TYPE_HSA: None,
PiratesGlobals.GAME_TYPE_TM: 'lookout_win_treasuremap_icon',
PiratesGlobals.GAME_TYPE_CREW: 'friend_button',
PiratesGlobals.GAME_TYPE_PRIV: 'pir_t_gui_lok_shipPvp',
PiratesGlobals.GAME_TYPE_QUEST: None },
'iconStyle': {
PiratesGlobals.GAME_STYLE_BATTLE: None,
PiratesGlobals.GAME_STYLE_TEAM_BATTLE: None,
PiratesGlobals.GAME_STYLE_SHIP_BATTLE: None,
PiratesGlobals.GAME_STYLE_CTF: None,
PiratesGlobals.GAME_STYLE_CTL: None,
PiratesGlobals.GAME_STYLE_PIRATEER: None,
PiratesGlobals.GAME_STYLE_POKER: None,
PiratesGlobals.GAME_STYLE_BLACKJACK: None },
'style': {
PiratesGlobals.GAME_STYLE_ANY: PLocalizer.AnyGame,
PiratesGlobals.GAME_STYLE_CTF: PLocalizer.CTFGame,
PiratesGlobals.GAME_STYLE_CTL: PLocalizer.CTLGame,
PiratesGlobals.GAME_STYLE_PIRATEER: PLocalizer.PTRGame,
PiratesGlobals.GAME_STYLE_BATTLE: PLocalizer.BTLGame,
PiratesGlobals.GAME_STYLE_TEAM_BATTLE: PLocalizer.TBTGame,
PiratesGlobals.GAME_STYLE_SHIP_BATTLE: PLocalizer.SBTGame,
PiratesGlobals.GAME_STYLE_ARMADA: PLocalizer.ARMGame,
PiratesGlobals.GAME_STYLE_TKP: PLocalizer.TKPGame,
PiratesGlobals.GAME_STYLE_BTB: PLocalizer.BTBGame,
PiratesGlobals.GAME_STYLE_BLACKJACK: PLocalizer.BlackjackGame,
PiratesGlobals.GAME_STYLE_POKER: PLocalizer.PokerGame,
PiratesGlobals.CREW_STYLE_FIND_A_CREW: PLocalizer.FindACrew,
PiratesGlobals.CREW_STYLE_FIND_A_PVP_CREW: PLocalizer.FindAPVPCrew,
PiratesGlobals.CREW_STYLE_RECRUIT_MEMBERS: PLocalizer.RecruitCrewMembers },
'option': {
GAME_OPTION_DURATION: PLocalizer.GameDuration,
GAME_OPTION_MATCH_COUNT: PLocalizer.GameMatchCount,
GAME_OPTION_PASSWORD: PLocalizer.GamePassword,
GAME_OPTION_MIN_BET: PLocalizer.GameMinBet,
GAME_OPTION_NPC_PLAYERS: PLocalizer.GameNPCPlayers,
GAME_OPTION_LOCATION: PLocalizer.GameLocation,
GAME_OPTION_USE_CURR_CREW: PLocalizer.GameUseCrew,
GAME_OPTION_MIN_PLAYERS: PLocalizer.GameMinPlayers,
GAME_OPTION_DESIRED_PLAYERS: PLocalizer.GameDesPlayers,
GAME_OPTION_MAX_PLAYERS: PLocalizer.GameMaxPlayers,
GAME_OPTION_MAX_CREW_SIZE: PLocalizer.GameMaxCrew,
GAME_OPTION_MAX_CREW_SHIP: PLocalizer.GameMaxShip,
GAME_OPTION_VIP_PASS: PLocalizer.GameVIPPass,
GAME_OPTION_SOLO_PLAY: PLocalizer.GameSoloPlay },
'optionVal': {
GAME_DURATION_SHORT: PLocalizer.GameDurationShort,
GAME_DURATION_MED: PLocalizer.GameDurationMed,
GAME_DURATION_LONG: PLocalizer.GameDurationLong } }
def gatherGameStyleInfo(gameType, gameStyle, callback):
requestId = None
options = { }
if gameType == PiratesGlobals.GAME_TYPE_TM:
def gatherTMInfo(inventory):
if inventory:
treasureMaps = inventory.getTreasureMapsList()
else:
treasureMaps = []
tmsOwned = { }
for currTM in treasureMaps:
tmsOwned[currTM.mapId] = currTM.getOptions()
callback(tmsOwned)
if callback:
if hasattr(__builtin__, 'base'):
requestId = DistributedInventoryBase.DistributedInventoryBase.getInventory(localAvatar.inventoryId, gatherTMInfo)
else:
tmsAvailable = { }
if gameStyle != None:
numPlayers = PiratesGlobals.DYNAMIC_GAME_STYLE_PROPS[PiratesGlobals.GAME_TYPE_TM][gameStyle].get('NumPlayers')
if numPlayers and len(numPlayers) > 1:
options = {
GAME_OPTION_MAX_PLAYERS: numPlayers }
tmsAvailable[gameStyle] = options
styleInfo = {
'options': options }
callback(tmsAvailable)
return (requestId, options)
GameTypes = {
PiratesGlobals.GAME_TYPE_PRIV: {
'options': {
'execute': 'findPvp' } },
PiratesGlobals.GAME_TYPE_PVP: {
'style': {
PiratesGlobals.GAME_STYLE_CTL: {
'options': {
GAME_OPTION_MIN_PLAYERS: [
PiratesGuiGlobals.UIItemType_ListItem,
[
'2',
'3',
'4',
'5',
'6']] } },
PiratesGlobals.GAME_STYLE_PIRATEER: {
'options': {
GAME_OPTION_MIN_PLAYERS: [
PiratesGuiGlobals.UIItemType_ListItem,
[
'2',
'3',
'4',
'5',
'6']] } },
PiratesGlobals.GAME_STYLE_TEAM_BATTLE: {
'options': {
GAME_OPTION_MIN_PLAYERS: [
PiratesGuiGlobals.UIItemType_ListItem,
[
'4',
'6']],
GAME_OPTION_MAX_PLAYERS: [
PiratesGuiGlobals.UIItemType_Hidden,
'8'] } },
PiratesGlobals.GAME_STYLE_BATTLE: {
'options': {
GAME_OPTION_MIN_PLAYERS: [
PiratesGuiGlobals.UIItemType_ListItem,
[
'2',
'3',
'4',
'5',
'6']],
GAME_OPTION_MAX_PLAYERS: [
PiratesGuiGlobals.UIItemType_Hidden,
'8'] } },
PiratesGlobals.GAME_STYLE_SHIP_BATTLE: {
'options': {
GAME_OPTION_MAX_CREW_SIZE: [
PiratesGuiGlobals.UIItemType_Hidden],
GAME_OPTION_MAX_CREW_SHIP: [
PiratesGuiGlobals.UIItemType_Hidden] } } } },
PiratesGlobals.GAME_TYPE_CREW: {
'options': {
'execute': 'find' } },
PiratesGlobals.GAME_TYPE_PG: {
'style': {
PiratesGlobals.GAME_STYLE_BLACKJACK: {
'options': {
GAME_OPTION_MAX_PLAYERS: [
PiratesGuiGlobals.UIItemType_Hidden,
'6'] } },
PiratesGlobals.GAME_STYLE_POKER: {
'options': {
GAME_OPTION_MAX_PLAYERS: [
PiratesGuiGlobals.UIItemType_Hidden,
'6'] } } } },
PiratesGlobals.GAME_TYPE_TM: {
'style': gatherGameStyleInfo,
'hidden': True } }
pvpMode = ConfigVariableBool('pvp-testing-level', 0).getWord(0)
if pvpMode < 1:
del GameTypes[PiratesGlobals.GAME_TYPE_PVP]['style'][PiratesGlobals.GAME_STYLE_CTL]
if pvpMode < 2:
del GameTypes[PiratesGlobals.GAME_TYPE_PVP]['style'][PiratesGlobals.GAME_STYLE_PIRATEER]
if pvpMode < 3:
del GameTypes[PiratesGlobals.GAME_TYPE_PVP]['style'][PiratesGlobals.GAME_STYLE_SHIP_BATTLE]
def getGameTypes():
return GameTypes.keys()
def getGameStyles(gameType, gameStyle = None, callback = None):
if gameType in GameTypes and 'style' in GameTypes[gameType]:
styleInfo = GameTypes[gameType]['style']
if _styleInfoIsDynamic(styleInfo):
return styleInfo(gameType, gameStyle, callback)
callback(styleInfo.keys())
return (None, styleInfo.keys())
def styleInfoIsDynamic(gameType):
styleInfo = GameTypes[gameType]['style']
return _styleInfoIsDynamic(styleInfo)
def _styleInfoIsDynamic(styleInfo):
if not type(styleInfo) == types.MethodType:
pass
return type(styleInfo) == types.FunctionType
def getGameOptions(gameType, gameStyle = None, callback = None):
requestId = None
gameOptions = { }
if gameType in GameTypes:
if 'options' in GameTypes[gameType]:
return GameTypes[gameType]['options']
elif gameStyle != None and 'style' in GameTypes[gameType]:
styleInfo = GameTypes[gameType]['style']
if _styleInfoIsDynamic(styleInfo):
def extractOptions(tmsOwned):
if callback:
foundOptions = { }
if tmsOwned:
foundOptions = tmsOwned[gameStyle].get('options', { })
callback(foundOptions)
(requestId, gameOptions) = styleInfo(gameType, gameStyle, extractOptions)
elif gameStyle in styleInfo:
gameOptions = styleInfo[gameStyle]['options']
if callback:
callback(gameOptions)
return (requestId, gameOptions)
def getGameTypeString(value, type, category = None):
if category != None and category in PiratesGlobals.DYNAMIC_GAME_STYLE_PROPS:
typeInfo = PiratesGlobals.DYNAMIC_GAME_STYLE_PROPS[category].get(value)
if typeInfo:
if type == 'style':
return typeInfo.get('Name')
elif type == 'descriptionStyle':
return typeInfo.get('Desc')
values = GameTypeStrings.get(type)
foundStr = None
if values:
foundStr = values.get(value)
return foundStr
def getGameTypeRanking(value):
foundIt = GameTypeRanking.get(value)
return foundIt
def gameTypeAccessable(gameCat, gameType, paidStatus):
if paidStatus:
return True
elif (gameCat == PiratesGlobals.GAME_TYPE_PVP or gameType == PiratesGlobals.GAME_STYLE_BATTLE or gameType == PiratesGlobals.GAME_STYLE_TEAM_BATTLE or gameCat == PiratesGlobals.GAME_TYPE_PG) and gameType == PiratesGlobals.GAME_STYLE_BLACKJACK:
return True
return False
| [
"[email protected]"
] | |
8afca52e50ab53f3827973dcd8962b2d96ed9f4b | adf195dd2740bf38c50e4182d6c262518f5a71f2 | /ugestor_dto/EmpleadoDTO.py | e97d6c55ad9431f52b2f7bd8d19c359eada3a682 | [] | no_license | jhonex1234/ugestor | 665706a895d5895b6ab08347c63543c3f14614dc | 1d2a8c04c2feb772426666ccf64c003f4014a76f | refs/heads/master | 2020-07-10T18:36:56.378511 | 2019-09-05T04:12:10 | 2019-09-05T04:12:10 | 204,337,475 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 813 | py | # -*- coding: utf-8 -*-
from wtforms import Form
from wtforms import StringField, IntegerField
from wtforms import validators
from wtforms.fields.html5 import DateField
from wtforms.fields import SelectField
from wtforms import BooleanField
from wtforms.validators import DataRequired
from com_dao import ConnectionDB
def validateNotNUll(form, field):
if len(field.data) <= 0:
raise validators.ValidationError('El campo No debe ser Nulo')
class EmpleadoDTO(Form):
idpersona = IntegerField('', [validateNotNUll])
salario = IntegerField('', [validateNotNUll])
cargo = SelectField(label='Cargo', choices=[('Seleccione','Seleccione'),('Gerente','Gerente') ,('Auxiliar','Auxiliar'),('Contratista','contratista')])
fechaIngreso = DateField('Fecha de Registro', [])
| [
"[email protected]"
] | |
d600e630f8fd18cc349b5a1786e92e29c475071e | 1b764845ceab76ab91d12a4a067cb49fa3296001 | /pyfirstweek/第一课时/栈.py | f69a62e8bd29bd81bcb4a6ad8c7dbcaf49691dd9 | [
"Apache-2.0"
] | permissive | mychristopher/test | c5e11aef178d025d25d54afde4fb836a18001a23 | 9977d36bab3fcc47f0e1dd42bbf5a99b39112a2f | refs/heads/master | 2023-07-31T14:58:22.303817 | 2020-09-05T04:26:07 | 2020-09-05T04:26:07 | 276,136,931 | 0 | 0 | Apache-2.0 | 2023-07-14T16:39:16 | 2020-06-30T15:21:29 | HTML | UTF-8 | Python | false | false | 408 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#模拟栈结构
stack =[]
#压栈(向栈里存数据)
stack.append("A")
print(stack)
stack.append("b")
print(stack)
stack.append("c")
print(stack)
#出栈(在栈里取数据) 后进先出
res1 = stack.pop()
print("res1 = ",res1)
print(stack)
res2 = stack.pop()
print("res2 = ",res2)
print(stack)
res3 = stack.pop()
print("res3 = ",res3)
print(stack) | [
"[email protected]"
] | |
5be919a211ae050b93ec8ce0dfcd2335ca02457a | 4616331c3763ec13393f3b79dbddbb568f1c6008 | /pattern/web/api.py | 1ab50632fd8a5c3acf32a29bdfbf7927ac32ee3e | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | owlguy/pattern | b6df40f2acb0a30e5a3956fc814481532d462ff9 | d9ba1a0ed307402f4a09d495f35b5426c124e391 | refs/heads/master | 2021-01-18T08:56:56.258275 | 2013-06-20T22:17:17 | 2013-06-20T22:17:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,166 | py | #--- API LICENSE CONFIGURATION -----------------------------------------------------------------------
# Default license keys used by pattern.web.SearchEngine to contact different API's.
# Google and Yahoo are paid services for which you need a personal license + payment method.
# The default Google license is for testing purposes (= 100 daily queries).
# Wikipedia, Twitter and Facebook are free.
# Bing, Flickr and ProductsWiki use licenses shared among all Pattern users.
license = {}
license["Google"] = \
"AIzaSyBxe9jC4WLr-Rry_5OUMOZ7PCsEyWpiU48"
license["Bing"] = \
"VnJEK4HTlntE3SyF58QLkUCLp/78tkYjV1Fl3J7lHa0="
license["Yahoo"] = \
("", "") # OAuth (key, secret)
license["Wikipedia"] = \
None
license["Twitter"] = (
"p7HUdPLlkKaqlPn6TzKkA", # OAuth (key, secret, token)
"R7I1LRuLY27EKjzulutov74lKB0FjqcI2DYRUmsu7DQ", (
"14898655-TE9dXQLrzrNd0Zwf4zhK7koR5Ahqt40Ftt35Y2qY",
"q1lSRDOguxQrfgeWWSJgnMHsO67bqTd5dTElBsyTM"))
license["Facebook"] = \
"332061826907464|jdHvL3lslFvN-s_sphK1ypCwNaY"
license["Flickr"] = \
"787081027f43b0412ba41142d4540480"
license["Products"] = \
"64819965ec784395a494a0d7ed0def32"
| [
"[email protected]"
] | |
c3f2f4fa4864008fc97bb05aa002d2582d844489 | 7037e16b5ee5f2cdff8f759d4ffcbed4cad3d3f5 | /more-about-strings/isOnlyLikeMethods.py | e9224f71d2824d1adbbb26e28755451c069032da | [] | no_license | michalkasiarz/automate-the-boring-stuff-with-python | 1fe0d3af7c5e57746d2d37aa110a5f1bd45ecf30 | 8fdd4c6945f116c3889000f2ad7357cacdf6ed16 | refs/heads/master | 2021-05-17T00:19:31.454433 | 2020-04-03T12:38:12 | 2020-04-03T12:38:12 | 250,532,982 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,812 | py | # Is only-like methods
import pyperclip
text = """
Then that knight, whom neither captivity nor wounds nor grief nor the
terrible Burdabut could bring down, was brought down by happiness. His
hands dropped at his side, great drops of sweat came out on his
forehead; he fell on his knees, covered his face with his hands, and
leaning his head against the wall of the ravine, remained in silence,
evidently thanking God."""
only_whitespace = " "
only_numbers = "21143"
only_text = "Hello"
title = "The Title Style"
# isalpha method
print(only_text.isalpha()) # True
print(text.isalpha()) # False
# isspace method
print(only_whitespace.isspace()) # True
print("The surely are some spaces".isspace()) # True
# isdecimal method
print(only_numbers.isdecimal()) # True, although it is a String
# istitle method
print(title.istitle()) # True
# isalnum method
print("razdwatrzy123".isalnum()) # True
# startswith and endswith methods
print("Hello World".startswith("Hello")) # True
print("Hello World".endswith("World")) # True
# join method
to_be_joined = ["cats", "dogs", "hot-dogs", "avengers"]
joined_text = ", ".join(to_be_joined)
print(joined_text)
# split method
print(text.split())
# ljust and rjust method
print("Hello".rjust(100, "-"))
print("Hello".ljust(100, "*"))
# center method
print("Hello".center(100, "="))
# strip, rstrip, lstrip methods
rjusted = "Hello".rjust(30, "/")
print(rjusted)
rjusted_stripped = rjusted.strip("/")
print(rjusted_stripped)
text_with_spaces = " Hello "
print(text_with_spaces)
print(text_with_spaces.strip())
# replace method
spam = "Hello there!"
spam = spam.replace("e", "XYZ")
print(spam)
# pyperclip module
pyperclip.copy(spam)
copied_stuff = pyperclip.paste()
print(copied_stuff)
| [
"[email protected]"
] | |
aef68596d8afec518ee50b95d443faae27762db6 | 6ed01f4503fc9de234a561c945adff7cf4b1c81b | /uconn/migration_tools.py | d42f9e5ff91b758c8a1d07ddb196b820ab3cb806 | [] | no_license | ostwald/python-lib | b851943c913a68424a05ce3c7b42878ff9519f68 | 9acd97ffaa2f57b3e9e632e1b75016549beb29e5 | refs/heads/master | 2021-10-28T06:33:34.156095 | 2021-10-21T23:54:49 | 2021-10-21T23:54:49 | 69,060,616 | 0 | 1 | null | 2018-06-21T16:05:30 | 2016-09-23T21:04:46 | Roff | UTF-8 | Python | false | false | 4,503 | py | """
Collection config and Collection Record Mover
We are migrating all DLESE collections ("adn" and "dlese_anno") to NSDL.
The metadata and dcs_data records can be moved by hand.
This file contains tools to migrate the other collection components:
- collection config file
- collection record
NOTE: this module has nothing to do with the CollectionOfCollections
"""
import sys, os, time, shutil
from masterCollection import MasterCollection
from bscs.collection_tool import CollectionTool
from JloXml import DleseCollectRecord, XmlUtils
dowrites = 0
dlese_base = '/Users/ostwald/Desktop/DLESE_MIGRATION/DLESE' #DLESE
nsdl_base = '/Users/ostwald/Desktop/DLESE_MIGRATION/NSDL' # NSDL
dlese_records = os.path.join (dlese_base, 'records') #DLESE
nsdl_records = os.path.join (nsdl_base, 'records') #NSDL
dlese_collection_configs = os.path.join(dlese_base, 'dcs_conf/collections')
nsdl_collection_configs = os.path.join(nsdl_base, 'dcs_conf/collections')
def get_nsdl_collection_keys():
nsdl_collection_records = get_nsdl_collection_records()
return nsdl_collection_records.getValues('key')
def get_nsdl_collection_records():
"""
returns MasterCollection instance containing NSDL collection records
"""
nsdl_collect_dir = os.path.join (nsdl_records, 'dlese_collect', 'collect')
return MasterCollection(nsdl_collect_dir, DleseCollectRecord)
def findDleseCollectionRecord(field, value):
"""
returns first DleseCollectRecord having the specified value for specified field
"""
dlese_collect_dir = os.path.join (dlese_records, 'dlese_collect', 'collect')
for filename in filter (lambda x:x.endswith('xml'), os.listdir(dlese_collect_dir)):
path = os.path.join (dlese_collect_dir, filename)
rec = DleseCollectRecord (path=path)
if (rec.get(field) == value):
return rec
nsdl_keys = get_nsdl_collection_keys()
def copyDleseCollectionRecord(key):
"""
copies DLESE collection record for specified key
into NSDL collection records
"""
record = findDleseCollectionRecord ('key', key)
if not record:
raise KeyError, 'deleseCollectionRecord not found for %s' % key
#now we want to rename the record
record.setId(key)
# create the dest path in nsdl collections
nsdl_collect_dir = os.path.join (nsdl_records, 'dlese_collect', 'collect')
dest = os.path.join (nsdl_collect_dir, key+'.xml')
# check to see if file exists
if os.path.exists(dest):
raise KeyError, "nsdl collection record already exists for %s" % key
# check to see if collection key exists!!
if key in nsdl_keys:
raise KeyError, "nsdl key already exists for %s" % key
if dowrites:
record.write (path=dest)
print "wrote to", dest
else:
print 'Would have written record to %s' % dest
# print record
# for EACH collection
# find the collection record
# copy it into nsdl repo
# find collection config
# copy it into nsdl collection config
def findCollectionConfig (key):
"""
finds DLESE collection config for given key
"""
filename = key+'.xml'
path = os.path.join (dlese_collection_configs, filename)
if not os.path.exists(path):
raise KeyError, "dlese collection config not found for %s" % path
return path
def moveCollectionConfig(key):
"""
copies DLESE collection config for given key into NSDL collection configs
"""
filename = key+'.xml'
collection_config = findCollectionConfig (key)
newpath = os.path.join (nsdl_collection_configs, filename)
if os.path.exists(newpath):
raise KeyError, 'nsdl collection config already exists for %s' % key
if dowrites:
return shutil.copyfile (collection_config, newpath)
else:
print 'Would have copied %s to %s' % (filename, newpath)
# copy collection dir into dest rep
# copy collection dcs_data dir into dest rep
def testGet_nsdl_collection_keys():
for key in get_nsdl_collection_keys():
print '-', key
def testFindCollectionRecord():
foo = findDleseCollectionRecord('key', 'dcc')
if foo:
print foo
else:
print 'not found'
def main():
"""
for each collection key for adn and dlese_anno xmlFormats,
- copy the DLESE collection record to NSDL
- copy the DLESE collection config file to NSDL
"""
for xmlFormat in ['adn', 'dlese_anno']:
print '\n', xmlFormat
dlese_format_dir = os.path.join (dlese_records, xmlFormat)
for key in os.listdir(dlese_format_dir):
print '-', key
copyDleseCollectionRecord(key)
moveCollectionConfig(key)
if __name__ == '__main__':
# moveCollectionConfig ("dcc")
# copyDleseCollectionRecord("dcc")
# testGet_nsdl_collection_keys()
main()
| [
"[email protected]"
] | |
af49827c4a4802709049af532cd2171713a24035 | 2655d38647240d8698e2d8d92d81fdc474e6b8c7 | /attention-analysis/naacl2019/poster_conll/samples/devow.py | bbf073bcc01b11c411c8128c35d660ce04695b90 | [] | no_license | ufal/lsd | 2cc72b2e86c170967f33110030a8dd22e6e25591 | 1fe8d68c97b7efb6889fb1ca1fceec1c0cb139e8 | refs/heads/master | 2023-06-19T19:02:22.020684 | 2020-05-05T12:21:37 | 2020-05-05T12:21:37 | 127,886,734 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 604 | py | #!/usr/bin/env python3
#coding: utf-8
import sys
from unidecode import unidecode
for line in sys.stdin:
line = line.strip()
forms = line.split(' ')
forms_devow = list()
for form in forms:
form = unidecode(form)
form = form.lower()
form = form.replace("a", "")
form = form.replace("e", "")
form = form.replace("i", "")
form = form.replace("o", "")
form = form.replace("u", "")
form = form.replace("y", "")
if form == "":
form = "_"
forms_devow.append(form)
print(*forms_devow, sep=' ')
| [
"[email protected]"
] | |
139590a0c54be5f68071cae5f2c2a249ccf0060e | 3de2a746243ad1cb000994a06a0f9699db9a901f | /jsc2019b.py | 89f0910336866197d45cfa3f3d1332e18c433413 | [] | no_license | takumi152/atcoder | 71d726ffdf2542d8abac0d9817afaff911db7c6c | ebac94f1227974aa2e6bf372e18605518de46441 | refs/heads/master | 2022-10-30T12:14:41.742596 | 2022-09-29T19:49:32 | 2022-09-29T19:49:32 | 181,502,518 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py |
mod = 1000000007
def main():
n, k = map(int, input().split())
a = list(map(int, input().split()))
ans = 0
for i in range(n):
for j in range(n):
if a[i] > a[j]:
inv = 0
if (i < j):
inv = (((k + 1) * k) // 2) % mod
else:
inv = ((k * (k - 1)) // 2) % mod
ans = (ans + inv) % mod
print(ans)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
8b54d1891f5538b15a69956a3913d3edb85dfca3 | 741333ced9ea1b326997dc031e5de27529bad04a | /glue_vispy_viewers/extern/vispy/visuals/collections/collection.py | 14e2eecf740eb08031f0e177b159799b158e1384 | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | jzuhone/glue-vispy-viewers | f1b7f506d3263c4b0c2f4032d4940b931b2c1ada | d940705f4ba95f8d7a9a74d37fb68c71080b490a | refs/heads/master | 2020-06-20T19:10:02.866527 | 2019-06-24T11:40:39 | 2019-06-24T11:40:39 | 197,217,964 | 0 | 0 | BSD-2-Clause | 2019-07-16T15:14:53 | 2019-07-16T15:14:52 | null | UTF-8 | Python | false | false | 8,684 | py | # -*- coding: utf-8 -*-
# -----------------------------------------------------------------------------
# Copyright (c) 2014, Nicolas P. Rougier
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
# -----------------------------------------------------------------------------
"""
A collection is a container for several items having the same data
structure (dtype). Each data type can be declared as local (it specific to a
vertex), shared (it is shared among an item vertices) or global (it is shared
by all vertices). It is based on the BaseCollection but offers a more intuitive
interface.
"""
import numpy as np
from ... import gloo
from . util import fetchcode
from . base_collection import BaseCollection
from ..shaders import ModularProgram
from ...util.event import EventEmitter
class Collection(BaseCollection):
"""
A collection is a container for several items having the same data
structure (dtype). Each data type can be declared as local (it is specific
to a vertex), shared (it is shared among item vertices) or global (it is
shared by all items). It is based on the BaseCollection but offers a more
intuitive interface.
Parameters
----------
dtype: list
Data individual types as (name, dtype, scope, default)
itype: np.dtype or None
Indices data type
mode : GL_ENUM
GL_POINTS, GL_LINES, GL_LINE_STRIP, GL_LINE_LOOP,
GL_TRIANGLES, GL_TRIANGLE_STRIP, GL_TRIANGLE_FAN
vertex: str or tuple of str
Vertex shader to use to draw this collection
fragment: str or tuple of str
Fragment shader to use to draw this collection
kwargs: str
Scope can also be specified using keyword argument,
where parameter name must be one of the dtype.
"""
_gtypes = {('float32', 1): "float",
('float32', 2): "vec2",
('float32', 3): "vec3",
('float32', 4): "vec4",
('int32', 1): "int",
('int32', 2): "ivec2",
('int32', 3): "ivec3",
('int32', 4): "ivec4"}
def __init__(self, dtype, itype, mode, vertex, fragment, program=None,
**kwargs):
"""
"""
self._uniforms = {}
self._attributes = {}
self._varyings = {}
self._mode = mode
vtype = []
utype = []
self.update = EventEmitter(source=self, type='collection_update')
# Build vtype and utype according to parameters
declarations = {"uniforms": "",
"attributes": "",
"varyings": ""}
defaults = {}
for item in dtype:
name, (basetype, count), scope, default = item
basetype = np.dtype(basetype).name
if scope[0] == "!":
scope = scope[1:]
else:
scope = kwargs.pop(name, scope)
defaults[name] = default
gtype = Collection._gtypes[(basetype, count)]
if scope == "local":
vtype.append((name, basetype, count))
declarations[
"attributes"] += "attribute %s %s;\n" % (gtype, name)
elif scope == "shared":
utype.append((name, basetype, count))
declarations["varyings"] += "varying %s %s;\n" % (gtype, name)
else:
declarations["uniforms"] += "uniform %s %s;\n" % (gtype, name)
self._uniforms[name] = None
if len(kwargs) > 0:
raise NameError("Invalid keyword argument(s): %s" %
list(kwargs.keys()))
vtype = np.dtype(vtype)
itype = np.dtype(itype) if itype else None
utype = np.dtype(utype) if utype else None
BaseCollection.__init__(self, vtype=vtype, utype=utype, itype=itype)
self._declarations = declarations
self._defaults = defaults
# Build program (once base collection is built)
saved = vertex
vertex = ""
if self.utype is not None:
vertex += fetchcode(self.utype) + vertex
else:
vertex += "void fetch_uniforms(void) { }\n" + vertex
vertex += self._declarations["uniforms"]
vertex += self._declarations["attributes"]
vertex += saved
self._vertex = vertex
self._fragment = fragment
if program is None:
program = ModularProgram(vertex, fragment)
else:
program.vert = vertex
program.frag = fragment
if hasattr(program, 'changed'):
program.changed.connect(self.update)
self._programs.append(program)
# Initialize uniforms
for name in self._uniforms.keys():
self._uniforms[name] = self._defaults.get(name)
program[name] = self._uniforms[name]
def view(self, transform, viewport=None):
""" Return a view on the collection using provided transform """
return CollectionView(self, transform, viewport)
# program = gloo.Program(self._vertex, self._fragment)
# if "transform" in program.hooks:
# program["transform"] = transform
# if "viewport" in program.hooks:
# if viewport is not None:
# program["viewport"] = viewport
# else:
# program["viewport"] = Viewport()
# self._programs.append(program)
# program.bind(self._vertices_buffer)
# for name in self._uniforms.keys():
# program[name] = self._uniforms[name]
# #if self._uniforms_list is not None:
# # program["uniforms"] = self._uniforms_texture
# # program["uniforms_shape"] = self._ushape
# # Piggy backing
# def draw():
# if self._need_update:
# self._update()
# program.bind(self._vertices_buffer)
# if self._uniforms_list is not None:
# program["uniforms"] = self._uniforms_texture
# program["uniforms_shape"] = self._ushape
# if self._indices_list is not None:
# Program.draw(program, self._mode, self._indices_buffer)
# else:
# Program.draw(program, self._mode)
# program.draw = draw
# return program
def __getitem__(self, key):
program = self._programs[0]
for name, (storage, _, _) in program._code_variables.items():
if name == key and storage == 'uniform':
return program[key]
return BaseCollection.__getitem__(self, key)
def __setitem__(self, key, value):
try:
BaseCollection.__setitem__(self, key, value)
except IndexError:
for program in self._programs:
program[key] = value
def draw(self, mode=None):
""" Draw collection """
if self._need_update:
self._update()
program = self._programs[0]
mode = mode or self._mode
if self._indices_list is not None:
program.draw(mode, self._indices_buffer)
else:
program.draw(mode)
class CollectionView(object):
def __init__(self, collection, transform=None, viewport=None):
vertex = collection._vertex
fragment = collection._fragment
program = gloo.Program(vertex, fragment)
# if "transform" in program.hooks and transform is not None:
# program["transform"] = transform
# if "viewport" in program.hooks and viewport is not None:
# program["viewport"] = viewport
program.bind(collection._vertices_buffer)
for name in collection._uniforms.keys():
program[name] = collection._uniforms[name]
collection._programs.append(program)
self._program = program
self._collection = collection
def __getitem__(self, key):
return self._program[key]
def __setitem__(self, key, value):
self._program[key] = value
def draw(self):
program = self._program
collection = self._collection
mode = collection._mode
if collection._need_update:
collection._update()
# self._program.bind(self._vertices_buffer)
if collection._uniforms_list is not None:
program["uniforms"] = collection._uniforms_texture
program["uniforms_shape"] = collection._ushape
if collection._indices_list is not None:
program.draw(mode, collection._indices_buffer)
else:
program.draw(mode)
| [
"[email protected]"
] | |
00191c1a773e4d66c15685de87cbbd915c2b92a5 | 801510e45d9aebe5c5b8b09a3ce4453a3a11a3ca | /django/django_ORM/single_model_orm/single_model_orm/urls.py | db0d1d94d822231e050fbf74c79095da52263139 | [] | no_license | michelleshan/coding_dojo_python_course | 5581ebca0a645ba7231a2da2d2d64d6c3735bfc4 | e20e8195950004ef0aa09e6b0f84e7f05bd355e8 | refs/heads/master | 2022-11-21T01:34:54.309175 | 2020-07-16T03:29:45 | 2020-07-16T03:29:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 736 | py | """single_model_orm URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import path, include
urlpatterns = [
path('',include('users_app.urls'))
]
| [
"[email protected]"
] | |
fc392e3854daabe6445ca4420543b0deb2a18396 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /6DppMcokmzJ3TtNNB_18.py | 4f78b679d2153a5d1ab0cf45e1470064d88fea3d | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 756 | py | """
Create a function which takes every letter in every word, and puts it in
alphabetical order. Note how the **original word lengths must stay the same**.
### Examples
true_alphabetic("hello world") ➞ "dehll loorw"
true_alphabetic("edabit is awesome") ➞ "aabdee ei imosstw"
true_alphabetic("have a nice day") ➞ "aaac d eehi nvy"
### Notes
* All sentences will be in lowercase.
* No punctuation or numbers will be included in the **Tests**.
"""
def true_alphabetic(txt):
s = ''
for x in txt:
if x != ' ':
s += x
s = sorted(s)
ans = ''
j = 0
for x in txt:
if x == ' ':
ans += x
else:
ans += s[j]
j += 1
return ans
| [
"[email protected]"
] | |
a1a946620d9626abff33659e812cd8405867a69b | 0fa55a3150ebda33cf252e1915b0d3a1fd9474b2 | /setup.py | 089382e864463f497a51304a51e2d89a68848fdc | [
"MIT"
] | permissive | michalc/lowhaio-aws-sigv4 | 8dff5e58faceae4b892e8fa29f1331cd3378bbcf | c802126f2ce13cb88e7f695b86484637840fd464 | refs/heads/master | 2020-05-24T22:45:13.397252 | 2019-06-15T18:08:16 | 2019-06-15T18:08:16 | 187,502,903 | 0 | 0 | MIT | 2019-05-19T18:17:33 | 2019-05-19T16:44:34 | Python | UTF-8 | Python | false | false | 828 | py | import setuptools
def long_description():
with open('README.md', 'r') as file:
return file.read()
setuptools.setup(
name='lowhaio_aws_sigv4',
version='0.0.4',
author='Michal Charemza',
author_email='[email protected]',
description='AWS Signature Version 4 signing for lowhaio',
long_description=long_description(),
long_description_content_type='text/markdown',
url='https://github.com/michalc/lowhaio-aws-sigv4',
py_modules=[
'lowhaio_aws_sigv4',
],
python_requires='>=3.6.0',
test_suite='test',
tests_require=[
'lowhaio~=0.0.61',
],
classifiers=[
'Programming Language :: Python :: 3',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Framework :: AsyncIO',
],
)
| [
"[email protected]"
] | |
9bc366600d2d561b6f0b040a3d7c62a0d11fb15f | e8bf00dba3e81081adb37f53a0192bb0ea2ca309 | /domains/explore/problems/training/problem274_EE.py | c63ddf6da5997ae0f79a3d1c91553002a27dcf11 | [
"BSD-3-Clause"
] | permissive | patras91/rae_release | 1e6585ee34fe7dbb117b084df982ca8a8aed6795 | 0e5faffb7eb732fdb8e3bbf2c6d2f2cbd520aa30 | refs/heads/master | 2023-07-13T20:09:41.762982 | 2021-08-11T17:02:58 | 2021-08-11T17:02:58 | 394,797,515 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,694 | py | __author__ = 'patras'
from domain_exploreEnv import *
from timer import DURATION
from state import state, rv
DURATION.TIME = {
'survey': 5,
'monitor': 5,
'screen': 5,
'sample': 5,
'process': 5,
'fly': 3,
'deposit': 1,
'transferData': 1,
'take': 2,
'put': 2,
'move': 10,
'charge': 5,
'negotiate': 5,
'handleAlien': 5,
}
DURATION.COUNTER = {
'survey': 5,
'monitor': 5,
'screen': 5,
'sample': 5,
'process': 5,
'fly': 3,
'deposit': 1,
'transferData': 1,
'take': 2,
'put': 2,
'move': 10,
'charge': 5,
'negotiate': 5,
'handleAlien': 5,
}
rv.TYPE = {'e1': 'survey', 'e2': 'monitor', 'e3': 'screen', 'e4': 'sample', 'e5':'process'}
rv.EQUIPMENT = {'survey': 'e1', 'monitor': 'e2', 'screen': 'e3', 'sample': 'e4', 'process': 'e5'}
rv.EQUIPMENTTYPE = {'e1': 'survey', 'e2': 'monitor', 'e3': 'screen', 'e4': 'sample', 'e5':'process'}
rv.LOCATIONS = ['base', 'z1', 'z2', 'z3', 'z4']
rv.EDGES = {'base': {'z1': 20, 'z2': 50, 'z3': 20, 'z4': 50}, 'z1': {'base': 20, 'z2': 30, 'z4': 50}, 'z2': {'base': 50, 'z1': 30, 'z3': 30}, 'z3': {'base': 20, 'z2': 30, 'z4': 30}, 'z4': {'base': 50, 'z3': 30, 'z1': 50}}
def ResetState():
state.loc = {'r1': 'base', 'r2': 'base', 'UAV': 'base'}
state.charge = { 'UAV': 50, 'r1': 80, 'r2': 50}
state.data = { 'UAV': 3, 'r1': 1, 'r2': 1}
state.pos = {'c1': 'base', 'e1': 'r2', 'e2': 'base', 'e3': 'base', 'e4': 'base', 'e5': 'base'}
state.load = {'r1': NIL, 'r2': 'e1', 'UAV': NIL}
state.storm = {'active': False}
tasks = {
6: [['doActivities', 'UAV', [['survey', 'z2'], ['survey', 'z3'], ['survey', 'base']]]],
}
eventsEnv = {
} | [
"[email protected]"
] | |
0de58022fd098cfc9447970c42cfb2c2a68d63d3 | 383a974b225b3d5decf311e6224a14f0e86a14c9 | /affiliates/banners/models.py | 00f4f8e7026897464b9c19b05c51285e76522a56 | [
"MIT",
"BSD-3-Clause"
] | permissive | tub216/affiliates | d7b465d1dc9a5d3bcf041cf96741028e9c67625c | ffce6c42a6caf73bbedaca429ec0aa9ad70fc7e5 | refs/heads/master | 2020-05-29T11:06:33.555351 | 2014-02-18T21:06:24 | 2014-03-20T21:47:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,603 | py | import hashlib
import os
from django.core.exceptions import ValidationError
from django.db import models
from django.template.loader import render_to_string
from mptt.models import MPTTModel, TreeForeignKey
from affiliates.banners import COLOR_CHOICES
from affiliates.links.models import Link
from affiliates.shared.models import LocaleField
class Category(MPTTModel):
"""
Category that groups together either subcategories or banners.
A category tree can only be 2 layers deep, including the roots. This
is only enforced by model validation, so site code could
theoretically create Categories that violate this rule, but in
practice the only place that Categories should be created is the
admin interface.
"""
name = models.CharField(max_length=255)
parent = TreeForeignKey('self', null=True, blank=True, related_name='children')
class MPTTMeta:
order_insertion_by = ['name']
def clean(self):
"""
Validate that this category isn't more than one layer deep.
"""
if self.get_level() > 1:
raise ValidationError('Categories cannot be more than one level deep.')
class Banner(models.Model):
"""A type of banner that a user can generate links from."""
category = TreeForeignKey(Category)
name = models.CharField(max_length=255)
destination = models.URLField(max_length=255)
visible = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
class Meta:
abstract = True
def generate_banner_code(self, *args, **kwargs):
"""
Generate the HTML that users will copy onto their website to
display this banner. Arguments will vary based on the subclass.
"""
raise NotImplementedError()
def create_link(self, user, *args, **kwargs):
"""
Create a Link based off of this banner. Extra arguments are
passed on to generate_banner_code.
"""
html = self.generate_banner_code(*args, **kwargs)
return Link(user=user, destination=self.destination, html=html)
class ImageBanner(Banner):
"""Banner displayed as an image link."""
def generate_banner_code(self, variation):
return render_to_string('banners/banner_code/image_banner.html', {
'href': self.destination,
'variation': variation
})
class ImageBannerVariation(models.Model):
"""
Variation of an image banner that a user can choose to use for their
link.
"""
banner = models.ForeignKey(ImageBanner)
color = models.CharField(max_length=32, choices=COLOR_CHOICES)
locale = LocaleField()
def _filename(self, filename):
props = '{id}_{width}_{height}_{color}_{locale}'.format(
id=self.banner_id,
width=self.image.width,
height=self.image.height,
color=self.color,
locale=self.locale
)
props_hash = hashlib.sha1(props).hexdigest()
extension = os.path.splitext(filename)[1]
return os.path.join('uploads/banners', props_hash + extension)
image = models.ImageField(upload_to=_filename, max_length=255)
class TextBanner(Banner):
"""
Banner displayed as a string of text with a link.
Text should use Python format syntax to include the link. For
example:
> Value privacy? <a href="{href}">Download Firefox!</a>
"""
text = models.TextField()
def generate_banner_code(self):
return self.text.format(href=self.destination)
| [
"[email protected]"
] | |
940914c8add27ca2a6dc0ce9414a4b1d69b2bdc8 | ef10c3da3b15bfdec0d9b88de753ae3540f72120 | /utils/inputs/segmentation.py | 175d7a02a4b9885e9bb5d403134b39cc2dd884b4 | [
"MIT"
] | permissive | Qoboty/asr_preprocessing | 3554456364b9ee751298b3378a1a109737c473d4 | d9cfda36edd9155ef45c6eb9626c42d1ba105bfd | refs/heads/master | 2021-07-06T18:08:59.336598 | 2017-09-26T08:30:53 | 2017-09-26T08:30:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,594 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Segment a htk file into each utterance."""
import numpy as np
from struct import unpack
from utils.inputs.wav2feature_python_speech_features import wav2feature as w2f_psf
from utils.inputs.wav2feature_librosa import wav2feature as w2f_librosa
def segment_htk(audio_path, speaker, utterance_dict, is_training,
sil_duration=0., tool='htk', config=None, mean=None,
dtype=np.float64):
"""Segment each HTK or WAV file into utterances. Normalization will not be
conducted here.
Args:
audio_path (string): path to a HTK or WAV file
speaker (string): speaker name
utterance_dict (dict): dictionary of utterance information
key (string) => utterance index
value (list) => [start_frame, end_frame, transcript (, transcript2)]
sil_duration (float): duration of silence at both ends. Default is 0.
tool (string): htk or python_speech_features or librosa
config (dict): a configuration for feature extraction
mean (np.ndarray): A mean vector over the file
dtype (optional): default is np.float64
Returns:
input_data_dict (dict):
key (string) => utt_index
value (np.ndarray )=> a feature vector of size
`(frame_num, feature_dim)`
input_data_utt_sum (np.ndarray): A sum of feature vectors of a speaker
mean (np.ndarray): A mean vector over the file
stddev (np.ndarray): A stddev vector over the file
total_frame_num_file (int): total frame num of the target speaker's utterances
"""
if tool != 'htk' and config is None:
raise ValueError('Set config dict.')
# Read the HTK or WAV file
if tool == 'htk':
input_data = read_htk(audio_path)
elif tool == 'python_speech_features':
input_data = w2f_psf(audio_path,
feature_type=config['feature_type'],
feature_dim=config['channels'],
use_energy=config['energy'],
use_delta1=config['delta'],
use_delta2=config['deltadelta'],
window=config['window'],
slide=config['slide'])
elif tool == 'librosa':
input_data = w2f_librosa(audio_path,
feature_type=config['feature_type'],
feature_dim=config['channels'],
use_energy=config['energy'],
use_delta1=config['delta'],
use_delta2=config['deltadelta'],
window=config['window'],
slide=config['slide'])
feature_dim = input_data.shape[1]
# Divide into each utterance
input_data_dict = {}
total_frame_num_file = 0
end_frame_pre = 0
utt_num = len(utterance_dict.keys())
utt_dict_sorted = sorted(utterance_dict.items(), key=lambda x: x[0])
input_data_utt_sum = np.zeros((feature_dim,), dtype=dtype)
stddev = np.zeros((feature_dim,), dtype=dtype)
for i, (utt_index, utt_info) in enumerate(utt_dict_sorted):
start_frame, end_frame = utt_info[0], utt_info[1]
# Check timestamp
if start_frame > end_frame:
print(utterance_dict)
print('Warning: time stamp is reversed.')
print('speaker index: %s' % speaker)
print('utterance index: %s & %s' %
(str(utt_index), utt_dict_sorted[i + 1][0]))
# Check the first utterance
if i == 0:
if start_frame >= sil_duration:
start_frame_extend = start_frame - sil_duration
else:
start_frame_extend = 0
start_frame_next = utt_dict_sorted[i + 1][1][0]
if end_frame > start_frame_next:
print('Warning: utterances are overlapping.')
print('speaker index: %s' % speaker)
print('utterance index: %s & %s' %
(str(utt_index), utt_dict_sorted[i + 1][0]))
if start_frame_next - end_frame >= sil_duration * 2:
end_frame_extend = end_frame + sil_duration
else:
end_frame_extend = end_frame + \
int((start_frame_next - end_frame) / 2)
# Check the last utterance
elif i == utt_num - 1:
if start_frame - end_frame_pre >= sil_duration * 2:
start_frame_extend = start_frame - sil_duration
else:
start_frame_extend = start_frame - \
int((start_frame - end_frame_pre) / 2)
if input_data.shape[0] - end_frame >= sil_duration:
end_frame_extend = end_frame + sil_duration
else:
end_frame_extend = input_data.shape[0] # last frame
# Check other utterances
else:
if start_frame - end_frame_pre >= sil_duration * 2:
start_frame_extend = start_frame - sil_duration
else:
start_frame_extend = start_frame - \
int((start_frame - end_frame_pre) / 2)
start_frame_next = utt_dict_sorted[i + 1][1][0]
if end_frame > start_frame_next:
print('Warning: utterances are overlapping.')
print('speaker: %s' % speaker)
print('utt index: %s & %s' %
(str(utt_index), utt_dict_sorted[i + 1][0]))
if start_frame_next - end_frame >= sil_duration * 2:
end_frame_extend = end_frame + sil_duration
else:
end_frame_extend = end_frame + \
int((start_frame_next - end_frame) / 2)
input_data_utt = input_data[start_frame_extend:end_frame_extend]
input_data_utt_sum += np.sum(input_data_utt, axis=0)
total_frame_num_file += (end_frame_extend - start_frame_extend)
input_data_dict[str(utt_index)] = input_data_utt
# For computing stddev over the file
if mean is not None:
stddev += np.sum(
np.abs(input_data_utt - mean) ** 2, axis=0)
# Update
end_frame_pre = end_frame
if is_training:
if mean is not None:
# Compute stddev over the file
stddev = np.sqrt(stddev / (total_frame_num_file - 1))
else:
# Compute mean over the file
mean = input_data_utt_sum / total_frame_num_file
stddev = None
else:
mean, stddev = None, None
return input_data_dict, input_data_utt_sum, mean, stddev, total_frame_num_file
def read_htk(audio_path):
"""Read each HTK file.
Args:
audio_path (string): path to a HTK file
Returns:
input_data (np.ndarray): A tensor of size (frame_num, feature_dim)
"""
with open(audio_path, "rb") as fh:
spam = fh.read(12)
frame_num, sampPeriod, sampSize, parmKind = unpack(">IIHH", spam)
# print(frame_num) # frame num
# print(sampPeriod) # 10ms
# print(sampSize) # feature dim * 4 (byte)
# print(parmKind)
veclen = int(sampSize / 4)
fh.seek(12, 0)
input_data = np.fromfile(fh, 'f')
# input_data = input_data.reshape(int(len(input_data) / veclen),
# veclen)
input_data = input_data.reshape(-1, veclen)
input_data.byteswap(True)
return input_data
| [
"[email protected]"
] | |
edd7fe691c2cef36ba433f32c16a17394a35791b | 254e35ed13abb5670eb664c1b17cb77d6b2d6289 | /LeetCode/python/_486.PredicttheWinner.py | 1d78412cd25a05e9eb784c866f68ac48259c106b | [] | no_license | bobby20180331/Algorithms | 475f7b29efcab829bc97b18a088600d406850fc7 | c56967e292b34162438f86bfc4c76925329105dd | refs/heads/master | 2023-04-23T04:36:26.977179 | 2021-02-04T06:47:41 | 2021-02-04T06:47:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,774 | py | #利用dp..只能写到这一步了...
#16行有问题,因为这个并不是一直让你选的。A选了后,B选,B同样会最大化收益的去选择
#突然又有想法了,改了下,分别为求min和max,然后交替迭代即可
#但是还是没过[0,0,7,6,5,6,1],不知道怎么错了...
class Solution(object):
def minA(self,nums1):
if len(nums1)==3:
n0 = nums1[:]
n0.sort()
return n0[2]+min(nums1[0],nums1[2])
if len(nums1)==4:
n1 = nums1[1:]
n1.sort()
n2 = nums1[:-1]
n2.sort()
left2min = n1[1]
right2min =n2[1]
return min((nums1[0]+left2min),(nums1[0]+nums1[2]),(nums1[-1]+right2min),(nums1[-1]+nums1[1]))
minASum = min((nums1[0]+self.maxA(nums1[1:])),(nums1[-1]+self.maxA(nums1[:-1])))
return minASum
def maxA(self,nums2):
if len(nums2)==3:
n0 = nums2[:]
n0.sort()
return n0[0]+max(nums2[0],nums2[2])
if len(nums2)==4:
n1 = nums2[1:]
n1.sort()
n2 = nums2[:-1]
n2.sort()
left2max = n1[1]
right2max =n2[1]
return max((nums2[0]+left2max),(nums2[0]+nums2[2]),(nums2[-1]+right2max),(nums2[-1]+nums2[1]))
maxASum = max((nums2[0]+self.minA(nums2[1:])),(nums2[-1]+self.minA(nums2[:-1])))
return maxASum
def PredictTheWinner(self, nums):
"""
:type nums: List[int]
:rtype: bool
"""
#思路:利用DP,先求出A能得出的最大的值,再比较B的值从而判断。
if len(nums)<3:
return True
sumA = self.maxA(nums)
if sumA >sum(nums)-sumA:
return True
else:
return False
#AC版本
class Solution(object):
def PredictTheWinner(self, nums):
def check(left, right, memo): #用两端索引来迭代,便于存入memo并比较
if left > right:
return 0
if left == right: #左右索引相等时停止
return nums[left]
if not (left, right) in memo:
ss = sum(nums[left: right + 1])
l, r = ss - check(left + 1, right, memo) + nums[left], ss - check(left, right - 1, memo) + nums[right]
#因为两人交替选,所以这里迭代时用总和减去选择某端的最大值(相当于对方选的)
memo[(left, right)] = max(l, r) #保存一堆索引号能取得的最大值
return memo[(left, right)]
s = sum(nums)
c1 = check(0, len(nums) - 1, {})
return c1 >= s - c1#不用写if直接写式子就可以返回布尔值了
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.