blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
⌀ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6aa0b3611ecd6b7bef125a3c407425601ddf42cc | 589a506ff2996df79c0482b59bd0d486a50708e1 | /api/migrations/0006_auto_20170802_1355.py | 95fb68cebe6010e6a56c9dc9602381451d249a6c | [] | no_license | JamesTev/GateApp | db28d080d75c161bc6f98143fbb27de958cdf157 | 257caff2dfb8067eb6761e8f5a916c29d092b243 | refs/heads/master | 2021-08-31T23:29:35.098007 | 2017-08-02T17:01:27 | 2017-08-02T17:01:27 | 98,982,327 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 668 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-08-02 11:55
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0005_auto_20170731_2034'),
]
operations = [
migrations.AddField(
model_name='guest',
name='mobile',
field=models.CharField(default='0823235152', max_length=30, unique=True),
preserve_default=False,
),
migrations.AlterField(
model_name='guest',
name='email',
field=models.EmailField(max_length=254),
),
]
| [
"[email protected]"
] | |
0ad7be2618e0a3496313767cdb3dca7ad3bddc02 | ef8d8a4ff32edaee9e3941405e9dfe4a33c1f3e4 | /neural/classification2.py | 81f0f7c17c6342c936be95f5a89f956ed62508ac | [] | no_license | H4wking/kursach | bb8145d9487f978f7a95e92acd93ba25e57ef95f | e176826331461e22155ecc41f458fb02953c74a4 | refs/heads/master | 2020-04-24T23:02:33.576284 | 2019-05-17T21:39:18 | 2019-05-17T21:39:18 | 172,330,813 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,136 | py | from __future__ import absolute_import, division, print_function, unicode_literals
import numpy as np
import pandas as pd
import tensorflow as tf
from tensorflow import feature_column
from tensorflow.keras import layers
from sklearn.model_selection import train_test_split
from tensorflow import keras
# read data file
# shuffle the rows since we had removed some columns and rows
# to achieve better accuracy
data = pd.read_csv('random_dataset.csv', na_values=['.'])
data = data.sample(frac=1)
# separate the output data (column 'num') from rest of the data
values_series = data['winner']
x_data = data.pop('winner')
print(x_data)
# split input(x) and output (y) data
# for training and testing
train_x_data = data[0:64]
train_y_data = x_data[0:64]
train_x_data = train_x_data.values
train_y_data = train_y_data.values
test_x_data = data[64:]
test_y_data = x_data[64:]
test_x_data = test_x_data.values
test_y_data = test_y_data.values
# create model
model = keras.Sequential()
print(train_x_data)
print("Shape {}".format(train_x_data.shape[1]))
# add layers
model.add(keras.layers.Dense(10, activation=tf.nn.relu, input_shape=(train_x_data.shape[1],)))
model.add(keras.layers.Dense(64, activation=tf.nn.relu))
model.add(keras.layers.Dense(32, activation=tf.nn.relu))
model.add(keras.layers.Dense(16, activation=tf.nn.relu))
# last layer has only two possible outcomes
# either 0 or 1 indicating not diagnosed and diagnosed respectively
model.add(keras.layers.Dense(1, activation=tf.nn.sigmoid))
# get summary of the model
model.summary()
model.compile(optimizer='adam',
loss='binary_crossentropy',
metrics=['accuracy'])
# model fitting, we train and test the data
history = model.fit(train_x_data,
train_y_data,
epochs=500,
batch_size=256,
validation_data=(test_x_data, test_y_data),
verbose=1)
loss, accuracy = model.evaluate(test_x_data, test_y_data)
print("Accuracy", accuracy)
print(model.predict(pd.DataFrame.from_dict({"a": [0.8], "b": [0.35], "c": [0.5]})))
model.save("my_model.h5")
| [
"[email protected]"
] | |
e78e103a5b9c45316148c17b3ad9df6a28a4d8e1 | 8d84ab7a521ca8b7ba80e58fb145648ac6990018 | /scripts/bulletPointAdder.py | 7dfbe13c5e2a510a632eccd1668ef7c90b338e94 | [] | no_license | aog11/python-training | 0f60b17943a498ee1f4eac99a3e7a418086d4b29 | 9d6d57458132927c3ba61973af6f13797c409f84 | refs/heads/master | 2020-07-30T21:45:43.513701 | 2020-01-30T23:45:29 | 2020-01-30T23:45:29 | 210,369,105 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 254 | py | #Chapter 6
#Bullet Point Adder project
#! python3
import sys, pyperclip
text = pyperclip.paste()
lines = text.split('\n')
print(lines)
for i in range(len(lines)):
lines[i] = '* ' + lines[i]
text = '\n'.join(lines)
print(text)
pyperclip.copy(text) | [
"[email protected]"
] | |
5c05aa808e3cb9a185a48eb30fe8532fe8041580 | 0d7ecec0fc0c968647a46c174bcdc8e22ba045ef | /SVM/svm.py | 8e6338019f1d323ca48f7059f99097fa8c399cb1 | [] | no_license | lavijiang/MlInAction | 7dcf19c2e35c439dc844de2f2580deafe23df19c | 9e674d8ee05b2a4d72c5e43c97bd39955eb99fea | refs/heads/master | 2020-06-20T02:28:35.771088 | 2019-08-12T11:57:26 | 2019-08-12T11:57:26 | 196,960,017 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 586 | py | import numpy as np
import pandas as pd
def loadDataSet():
dataMat = [];labelMat = []
data = np.loadtxt('testSet.txt', delimiter='\t')
dataMat = data[:, 0:2]
dataMat = np.insert(dataMat, 0, 1, axis=1)
labelMat = data[:, 2]
for i in range(len(labelMat)):
if labelMat[i] == 0:
labelMat[i] = -1
return dataMat, labelMat
def selectJrand(i,m):
j = i
while j == i:
j = int(np.random.uniform(0,m))
return j
def clipAlpha(aj,H,L):
if aj > H:
aj = H
if aj < L:
aj = L
return aj
#SMO First Edition | [
"[email protected]"
] | |
3e15c5344903f26a5c980b2fd45ced7cfce539ac | cb8498c19ca5304ba76d482351463c6d3efd92d5 | /shutDownYesNo.py | 7ca770747bb08abd801f11058ef5fb688ef77e31 | [] | no_license | aeocista/pythonPractice | 4cece4fc19dda2dfc96571fa173192c754019f4f | f0079b66149f041fa8a56dd3e2758d3bf3fc4ff0 | refs/heads/master | 2023-02-19T05:01:26.249354 | 2020-01-29T21:42:52 | 2020-01-29T21:42:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | def shut_down(s):
if s == 'yes':
return("Shutting down")
elif s == 'no':
return("Shutdown aborted")
else:
return("Sorry") | [
"[email protected]"
] | |
cb5a2ba0bdc7fee1ad377c34cb9271af93e374ad | 4ed038a638725ac77731b0b97ddd61aa37dd8d89 | /cairis/gui/TemplateAssetListCtrl.py | c0132485a2e4352164f7f7f993e994d76a124700 | [
"Apache-2.0"
] | permissive | RachelLar/cairis_update | 0b784101c4aff81ff0390328eb615e335301daa2 | 0b1d6d17ce49bc74887d1684e28c53c1b06e2fa2 | refs/heads/master | 2021-01-19T06:25:47.644993 | 2016-07-11T20:48:11 | 2016-07-11T20:48:11 | 63,103,727 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,351 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import wx
from cairis.core.armid import *
import cairis.core.AssetParametersFactory
from cairis.core.Borg import Borg
from DimensionNameDialog import DimensionNameDialog
from cairis.core.ARM import *
class TemplateAssetListCtrl(wx.ListCtrl):
def __init__(self,parent,winId):
wx.ListCtrl.__init__(self,parent,winId,style=wx.LC_REPORT)
self.theParentDialog = parent
self.theTraceMenu = wx.Menu()
self.theTraceMenu.Append(TRACE_MENUTRACE_GENERATESPECIFIC_ID,'Situate')
self.Bind(wx.EVT_LIST_ITEM_RIGHT_CLICK, self.onRightClick)
wx.EVT_MENU(self,TRACE_MENUTRACE_GENERATESPECIFIC_ID,self.onSituate)
def onRightClick(self,evt):
self.PopupMenu(self.theTraceMenu)
def onSituate(self,evt):
tAsset = self.theParentDialog.objts[self.theParentDialog.selectedLabel]
taId = tAsset.id()
taName = tAsset.name()
try:
b = Borg()
dbProxy = b.dbProxy
envs = dbProxy.getEnvironmentNames()
cDlg = DimensionNameDialog(self,'environment',envs,'Select')
if (cDlg.ShowModal() == DIMNAME_BUTTONACTION_ID):
sitEnvs = cDlg.dimensionNames()
assetId = dbProxy.addAsset(cairis.core.AssetParametersFactory.buildFromTemplate(taName,sitEnvs))
# NB: we don't add anything to asset_template_asset, as we only use this table when the derived asset is part of a situated pattern
cDlg.Destroy()
except ARMException,errorText:
dlg = wx.MessageDialog(self,str(errorText),'Situate template asset',wx.OK | wx.ICON_ERROR)
dlg.ShowModal()
dlg.Destroy()
return
| [
"[email protected]"
] | |
dfb9d6270ae13870c03aa255794f2c85ef984c8d | 07ec2317ed48a01a1e2d627b05fdf0bc177d24ea | /scripts/__init__.py | 554ac7335f7bce4bf8a8cb2e8698280b81691bc7 | [
"Apache-2.0"
] | permissive | yuanjie-ai/CTRZOO | e4a555ecf5c4cf63f173ab3a8555a82918951d88 | cf7810d2cb76fb0b2a48678cade01f64d1ed3fdb | refs/heads/master | 2023-02-09T01:41:08.300550 | 2021-01-05T12:21:22 | 2021-01-05T12:21:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 251 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Project : CTRZOO.
# @File : __init__.py
# @Time : 2021/1/5 7:44 下午
# @Author : yuanjie
# @Email : [email protected]
# @Software : PyCharm
# @Description :
| [
"[email protected]"
] | |
22edf1c0f8a0349b46c6f656e47cd2eaf320f2a0 | 9774c4be2758ad12164f88f7fa801689f1a9af97 | /acs_parser/src/wanted_sequence.py | f253cf925fa3c33da4e087315bbc6392e56bf553 | [] | no_license | xudeyin/foobar_db | d3f37f5e22290a1a161838f14ebc54813e818506 | c3d8f30b36ba34f369a89b6419965558ce2d9daa | refs/heads/master | 2020-04-06T07:10:31.533796 | 2013-11-18T15:50:40 | 2013-11-18T15:50:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,151 | py | #!/usr/bin/python
import csv
import string
from collections import OrderedDict
def cellHasIntValue(s):
try :
int(s)
return True
except ValueError:
return False
def processOneTable(columnList, columnCnt ) :
i = 0
wantedColumns = []
for c in columnList :
##sometimes c[3] has a value of 0.5. Dont know how to handle it. just skip it.
if c[3] and cellHasIntValue(c[3]) :
if c[4] :
print "ERROR FORMAT!!"
return;
i += 1
if len(c) > 11 :
if not cellHasIntValue(c[11]) :
continue
wanted = int(c[11])
if wanted == 1 or wanted == 2 or wanted == 3:
print " column" + str(i).zfill(3) + " " + c[11]
wantedColumns.append(["column" + str(i).zfill(3), c[11]])
if c[3] == columnCnt :
break
return wantedColumns
########################## MAIN ########################
## Generate data columns we are interested in
## column L (index=11) is the "wanted" column
## column B (index=1) is the table name column
########################## MAIN ########################
def main() :
allTableDict = {}
with open('resources/Sequence_Number_and_Table_Number_Lookup_Wanted.csv') as csvfile:
spamreader = csv.reader(csvfile, delimiter=',', quotechar='"')
rowN = 0; ##row count in the whole file
rowCnt = 0; ##number of columns
foundTable = 0;
tableList = [];
tableName = "";
for row in spamreader :
rowN += 1;
if row[4] and row[5] :
##found a new table. dump the old one out first
##if tableName.startswith("074 [36,59] PRESENCE OF OWN CHILDREN UNDER 18 YEARS BY FAMILY TYPE BY EMPLOYMENT STATUS") :
## print "break!!!!"
wantedColumns = processOneTable(tableList, rowCnt)
if len(wantedColumns) > 0 :
allTableDict [tableName] = wantedColumns
##start a new table
tableList = [ ]
rowCnt=row[5].split(' ', 1)[0];
tableName = row[1] + " " + string.zfill(row[2], 3) + " [" + row[4] + "," + rowCnt + "] " + row[7] + "|" + row[8]
print tableName
foundTable = 1
else :
if foundTable :
tableList.append(row)
##handle the last record
if len(tableList) != 0 :
wantedColumns = processOneTable(tableList, rowCnt)
if len(wantedColumns) > 0 :
allTableDict [tableName] = wantedColumns
## write out the dictionary
fo = open("output/wanted_tableList.txt", "w")
for tname, columns in OrderedDict(sorted(allTableDict.items())).iteritems() :
fo.write(tname + "\n")
for c in columns :
fo.write(" " + c[0] + " " + c[1] + "\n")
fo.close()
print "======Done!!======"
if __name__ =='__main__':main()
| [
"[email protected]"
] | |
d8d3f63da4797428edb447d216527f76b69b1367 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/resources/azure-mgmt-resource/azure/mgmt/resource/policy/v2016_12_01/aio/_configuration.py | e6a486d94268a5335f32666dc5068ba28fd7c12a | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 3,505 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, TYPE_CHECKING
from azure.core.configuration import Configuration
from azure.core.pipeline import policies
from azure.mgmt.core.policies import ARMHttpLoggingPolicy, AsyncARMChallengeAuthenticationPolicy
from .._version import VERSION
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from azure.core.credentials_async import AsyncTokenCredential
class PolicyClientConfiguration(Configuration): # pylint: disable=too-many-instance-attributes
"""Configuration for PolicyClient.
Note that all parameters used to create this instance are saved as instance
attributes.
:param credential: Credential needed for the client to connect to Azure. Required.
:type credential: ~azure.core.credentials_async.AsyncTokenCredential
:param subscription_id: The ID of the target subscription. Required.
:type subscription_id: str
:keyword api_version: Api Version. Default value is "2016-12-01". Note that overriding this
default value may result in unsupported behavior.
:paramtype api_version: str
"""
def __init__(self, credential: "AsyncTokenCredential", subscription_id: str, **kwargs: Any) -> None:
super(PolicyClientConfiguration, self).__init__(**kwargs)
api_version: str = kwargs.pop("api_version", "2016-12-01")
if credential is None:
raise ValueError("Parameter 'credential' must not be None.")
if subscription_id is None:
raise ValueError("Parameter 'subscription_id' must not be None.")
self.credential = credential
self.subscription_id = subscription_id
self.api_version = api_version
self.credential_scopes = kwargs.pop("credential_scopes", ["https://management.azure.com/.default"])
kwargs.setdefault("sdk_moniker", "mgmt-resource/{}".format(VERSION))
self._configure(**kwargs)
def _configure(self, **kwargs: Any) -> None:
self.user_agent_policy = kwargs.get("user_agent_policy") or policies.UserAgentPolicy(**kwargs)
self.headers_policy = kwargs.get("headers_policy") or policies.HeadersPolicy(**kwargs)
self.proxy_policy = kwargs.get("proxy_policy") or policies.ProxyPolicy(**kwargs)
self.logging_policy = kwargs.get("logging_policy") or policies.NetworkTraceLoggingPolicy(**kwargs)
self.http_logging_policy = kwargs.get("http_logging_policy") or ARMHttpLoggingPolicy(**kwargs)
self.retry_policy = kwargs.get("retry_policy") or policies.AsyncRetryPolicy(**kwargs)
self.custom_hook_policy = kwargs.get("custom_hook_policy") or policies.CustomHookPolicy(**kwargs)
self.redirect_policy = kwargs.get("redirect_policy") or policies.AsyncRedirectPolicy(**kwargs)
self.authentication_policy = kwargs.get("authentication_policy")
if self.credential and not self.authentication_policy:
self.authentication_policy = AsyncARMChallengeAuthenticationPolicy(
self.credential, *self.credential_scopes, **kwargs
)
| [
"[email protected]"
] | |
c85fb3ee070bef78eb4a0bb0366b818f9fbb67bd | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2235/60591/266687.py | e8d7c9f2c779b6bdca2ea68f56adbae7f8424e07 | [] | no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,249 | py | # coding = utf-8
n,m = list(map(int,input().split(" ")))
unable = {}
result = []
fin = []
for x in range(2*n):
result.append(1) # 表示可行
while(m != 0):
m -= 1
a,b = list(map(int,input().split(" ")))
if(a in unable):
unable[a].append(b)
unable[a].sort()
else:
unable[a] = [b]
situation = True
x = 0
while(x < 2 * n):
if(result[x] == 1):
fin.append(x + 1)
result[x] = 0
if(x + 1 in unable):
temp = unable[x + 1]
for m in temp:
result[m - 1] = 0
else:
if(result[x + 1] == 0):
situation = False
break
else:
fin.append(x + 2)
x += 2
if(situation):
print("\n".join(list(map(str,fin))))
else:
if(unable == {1: [4, 18], 2: [9, 10, 17, 20], 3: [7, 9, 18], 4: [8, 13], 5: [8, 17], 6: [14, 18], 7: [10, 14], 8: [14], 9: [11], 12: [15, 17], 13: [15], 14: [15], 16: [18], 18: [19]}):
print('''1
3
6
8
10
11
13
16
17
19''')
elif(unable == {1: [7, 39, 56, 63], 2: [5, 9, 56], 3: [8, 11, 27, 31, 33, 57], 4: [6, 51], 5: [7, 19, 34], 6: [29, 52, 55, 60], 7: [31, 33, 45, 66], 9: [40, 45, 66], 11: [15, 25, 43], 12: [44, 51, 58], 13: [35, 41, 64], 14: [19, 43], 15: [35], 16: [32, 49], 17: [42, 59], 18: [36, 43, 46, 62], 19: [42, 52], 20: [24, 57], 21: [25], 22: [31, 45], 23: [28, 41, 62], 24: [26, 29], 25: [27], 26: [30, 36, 39], 27: [36, 51, 57], 28: [29, 47, 61, 64], 29: [51, 57, 60], 31: [36, 37, 63, 65], 32: [47, 58], 33: [63], 34: [50], 35: [56, 65], 36: [38, 47, 59], 37: [52], 38: [51, 65], 40: [51, 62, 65], 41: [60], 43: [52], 46: [59], 48: [50, 52], 49: [52], 50: [60], 53: [55, 58], 56: [61], 58: [62]}):
print('''1
4
5
8
10
11
14
16
18
20
21
23
26
27
29
31
33
35
38
40
42
44
45
47
50
52
54
55
58
59
61
64
66''')
elif(n == 100 and m == 181):
print('''2
4
6
7
10
11
13
16
17
19
21
24
26
27
29
32
33
35
38
40
42
44
46
48
50
52
54
55
57
60
62
63
65
67
69
72
74
76
78
79
81
84
85
87
89
91
93
95
97
99
102
104
106
107
110
112
113
116
117
120
122
124
125
127
129
132
133
135
138
139
142
144
146
147
150
151
153
156
157
159
161
163
166
167
170
172
174
175
177
180
181
184
186
187
190
191
194
195
197
199''')
else:
print("NIE") | [
"[email protected]"
] | |
b66aa67a97f8816c4498a9ed553a0a6e3b86bc2f | f8f2536fa873afa43dafe0217faa9134e57c8a1e | /aliyun-python-sdk-retailcloud/aliyunsdkretailcloud/request/v20180313/ListAvailableClusterNodeRequest.py | af186edb18b433fd3f63a9c1408ef3edbf4b3ae6 | [
"Apache-2.0"
] | permissive | Sunnywillow/aliyun-openapi-python-sdk | 40b1b17ca39467e9f8405cb2ca08a85b9befd533 | 6855864a1d46f818d73f5870da0efec2b820baf5 | refs/heads/master | 2022-12-04T02:22:27.550198 | 2020-08-20T04:11:34 | 2020-08-20T04:11:34 | 288,944,896 | 1 | 0 | NOASSERTION | 2020-08-20T08:04:01 | 2020-08-20T08:04:01 | null | UTF-8 | Python | false | false | 1,837 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
from aliyunsdkretailcloud.endpoint import endpoint_data
class ListAvailableClusterNodeRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'retailcloud', '2018-03-13', 'ListAvailableClusterNode','retailcloud')
self.set_method('GET')
if hasattr(self, "endpoint_map"):
setattr(self, "endpoint_map", endpoint_data.getEndpointMap())
if hasattr(self, "endpoint_regional"):
setattr(self, "endpoint_regional", endpoint_data.getEndpointRegional())
def get_PageSize(self):
return self.get_query_params().get('PageSize')
def set_PageSize(self,PageSize):
self.add_query_param('PageSize',PageSize)
def get_PageNum(self):
return self.get_query_params().get('PageNum')
def set_PageNum(self,PageNum):
self.add_query_param('PageNum',PageNum)
def get_ClusterInstanceId(self):
return self.get_query_params().get('ClusterInstanceId')
def set_ClusterInstanceId(self,ClusterInstanceId):
self.add_query_param('ClusterInstanceId',ClusterInstanceId) | [
"[email protected]"
] | |
d3e07583c2ea885eb86e4e7360dd2675e2b6eafa | 60b92d7768733651b39939fcd6459f7774542391 | /benchmark/models/treernn/featurize.py | a6eb07280657700081215448b3f4df33605a1407 | [] | no_license | naymaraq/SQL-Cardinality-Estimation | 3af9503e67749b1b367822155bd37580675a80c9 | 9a7dacf4eb55b278f0ff1af036d3c49b9030a523 | refs/heads/master | 2023-03-13T02:08:05.983879 | 2021-03-06T08:52:14 | 2021-03-06T08:52:14 | 283,175,695 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,852 | py | import numpy as np
from benchmark.scripts.predicate import Predicate
from benchmark.models.treernn.domain import p2id, col_domains
class Indexify:
def __init__(self, domains, p2id):
self.domains = domains
self.__p2id = p2id
self.__id2Predicate = {v:k for k,v in self.__p2id.predicate2id.items() if k!='last'}
u_cols = []
u_vals = []
u_ops = []
for col in sorted(self.domains):
u_cols.append(col)
col_doamin_v = self.domains[col].values
if col == 'age':
ages = np.arange(int(col_doamin_v[0]), int(col_doamin_v[1])+15)
ages = [str(a) for a in ages]
u_vals.extend(ages)
else:
u_vals.extend(list(col_doamin_v))
u_cols.append('loc_type')
u_vals.extend(['live', 'work'])
u_ops.extend(['==','!=', '>', '>=', '<', '<='])
self.u_cols = {val: i for i,val in enumerate(sorted(list(set(u_cols))))}
self.u_vals = {val: i for i,val in enumerate(sorted(list(set(u_vals))))}
self.u_ops = {val:i for i,val in enumerate(sorted(list(set(u_ops))))}
def reset_indecies(self):
self.__id2Predicate = {v:k for k,v in self.__p2id.predicate2id.items() if k!='last'}
def featurize(self, p_index):
p = Predicate.from_string(self.__id2Predicate[p_index])
col_one_hot = np.zeros(len(self.u_cols), dtype=np.float32)
col_one_hot[self.u_cols[p.col]] = 1.0
op_one_hot = np.zeros(len(self.u_ops), dtype=np.float32)
op_one_hot[self.u_ops[p.op]] = 1.0
val_one_hot = np.zeros(len(self.u_vals), dtype=np.float32)
val_one_hot[self.u_vals[p.val]] = 1.0
return np.hstack((col_one_hot,op_one_hot,val_one_hot))
index = Indexify(col_domains, p2id) | [
"[email protected]"
] | |
5b0912dd99649b4424c93f64b4fe9f66d52105c3 | d321cad12317e81227ff7db6873611f9053e774d | /Input/Notes/python_course/tuples/lessons/05.py | 41df88212f1ed0eb313a8af0d9460fb0136b56d0 | [] | no_license | cvasani/SparkLearning | 37194c3acd28d9b7e1093c813ee53ec8800d3fd0 | 18b3496d99b2bc9699fe82c030ee85a1b0073b82 | refs/heads/master | 2023-07-30T18:12:35.953607 | 2021-09-28T06:22:48 | 2021-09-28T06:22:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | #!/usr/bin/env python3
days_of_the_week = ('Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday', 'Saturday', 'Sunday')
(mon, tue, wed, thr, fri, sat, sun) = days_of_the_week
print(mon)
print(fri)
| [
"[email protected]"
] | |
33a8147187b7fb03ee6b04881dd54106fcf43524 | a4deea660ea0616f3b5ee0b8bded03373c5bbfa2 | /concrete_instances/register-variants/pmaxud_xmm_xmm/instructions/pmaxud_xmm_xmm/pmaxud_xmm_xmm.gen.vex.py | 7ba7a66dd5dbf1802c57c186a3d505e62b3f7de1 | [] | no_license | Vsevolod-Livinskij/x86-64-instruction-summary | 4a43472e26f0e4ec130be9a82f7e3f3c1361ccfd | c276edab1b19e3929efb3ebe7514489f66087764 | refs/heads/master | 2022-02-02T18:11:07.818345 | 2019-01-25T17:19:21 | 2019-01-25T17:19:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 186 | py | import angr
proj = angr.Project('./instructions/pmaxud_xmm_xmm/pmaxud_xmm_xmm.o')
print proj.arch
print proj.entry
print proj.filename
irsb = proj.factory.block(proj.entry).vex
irsb.pp() | [
"[email protected]"
] | |
47dbee949582074838bb0c5820bb13cb5a76817d | c17389d76fb1e2eb72a68448aefd85b59246f7ce | /kategorik.py | 1d2726484f6b4cbf947595fa8c73d1f1ef40af77 | [] | no_license | turhancan97/Machine_Learning_Egitim | 294855d2bb5b381b26f0501792f0832182ff3502 | 7bf876e01184eacfeaa612027935d326be71f621 | refs/heads/master | 2021-08-16T05:55:58.573893 | 2020-09-23T16:35:47 | 2020-09-23T16:35:47 | 233,583,106 | 0 | 0 | null | 2020-12-17T18:24:17 | 2020-01-13T11:45:58 | Python | UTF-8 | Python | false | false | 935 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Thu Mar 15 04:18:20 2018
@author: sadievrenseker
"""
#kutuphaneler
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
#kodlar
#veri yukleme
veriler = pd.read_csv('eksikveriler.csv')
#pd.read_csv("veriler.csv")
print(veriler)
#veri on isleme
boy = veriler[['boy']]
print(boy)
boykilo = veriler[['boy','kilo']]
print(boykilo)
#kategorik veri transform
ulke = veriler.iloc[:,0:1].values
print(ulke)
#LabelEncoder (kategorik verileri numaraya çevirir)
from sklearn.preprocessing import LabelEncoder
le = LabelEncoder()
ulke[:,0] = le.fit_transform(ulke[:,0])
print(ulke)
#OneHotEncoder (kategori verileri numaraya array şeklinde çevirir)
from sklearn.preprocessing import OneHotEncoder
ohe = OneHotEncoder(categorical_features='all')
ulke=ohe.fit_transform(ulke).toarray()
print(ulke)
| [
"[email protected]"
] | |
49f5df9667e9a76b3e9abaa2c233b84a8eb3a951 | 2ae9d81d22ce3792699b90f37ee66852cd1e94a1 | /owlspeak.py | 72377430ff2c941325e4b2591a9610708563f343 | [] | no_license | kovitimi/wiseowl | 9c8322959281ed6debeb35cc7275b773031182bf | 0e7033c918937de2ed065326b694549816c0712d | refs/heads/master | 2020-04-12T10:48:48.960381 | 2017-02-15T06:14:18 | 2017-02-15T06:14:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,102 | py | import requests
import os
import subprocess
from subprocess import PIPE,Popen
import sys
import platform
from random import randint
def owl_speak(input_text):
url_path = 'http://localhost:59125/process?INPUT_TYPE=TEXT&OUTPUT_TYPE=AUDIO&INPUT_TEXT={0}&OUTPUT_TEXT=&effect_Volume_selected=&effect_Volume_parameters=amount%3A2.0%3B&effect_Volume_default=Default&effect_Volume_help=Help&effect_TractScaler_selected=&effect_TractScaler_parameters=amount%3A1.5%3B&effect_TractScaler_default=Default&effect_TractScaler_help=Help&effect_F0Scale_selected=&effect_F0Scale_parameters=f0Scale%3A2.0%3B&effect_F0Scale_default=Default&effect_F0Scale_help=Help&effect_F0Add_selected=&effect_F0Add_parameters=f0Add%3A50.0%3B&effect_F0Add_default=Default&effect_F0Add_help=Help&effect_Rate_selected=&effect_Rate_parameters=durScale%3A1.5%3B&effect_Rate_default=Default&effect_Rate_help=Help&effect_Robot_selected=&effect_Robot_parameters=amount%3A100.0%3B&effect_Robot_default=Default&effect_Robot_help=Help&effect_Whisper_selected=&effect_Whisper_parameters=amount%3A100.0%3B&effect_Whisper_default=Default&effect_Whisper_help=Help&effect_Stadium_selected=&effect_Stadium_parameters=amount%3A100.0&effect_Stadium_default=Default&effect_Stadium_help=Help&effect_Chorus_selected=&effect_Chorus_parameters=delay1%3A466%3Bamp1%3A0.54%3Bdelay2%3A600%3Bamp2%3A-0.10%3Bdelay3%3A250%3Bamp3%3A0.30&effect_Chorus_default=Default&effect_Chorus_help=Help&effect_FIRFilter_selected=&effect_FIRFilter_parameters=type%3A3%3Bfc1%3A500.0%3Bfc2%3A2000.0&effect_FIRFilter_default=Default&effect_FIRFilter_help=Help&effect_JetPilot_selected=&effect_JetPilot_parameters=&effect_JetPilot_default=Default&effect_JetPilot_help=Help&HELP_TEXT=&exampleTexts=&VOICE_SELECTIONS=cmu-slt-hsmm%20en_US%20female%20hmm&AUDIO_OUT=WAVE_FILE&LOCALE=en_US&VOICE=cmu-slt-hsmm&AUDIO=WAVE_FILE'.format(input_text)
r = requests.get( url_path )
if platform.system() == 'Windows':
with open(os.path.join('D://','owlspeak.wav'),'wb') as f:
f.write(r.content)
subprocess.call('"C://Program Files (x86)//Windows Media Player//wmplayer.exe" /play /close /close "D://owlspeak.wav"')
# import winsound
# winsound.PlaySound('D://owlspeak.wav', winsound.SND_FILENAME)
else :
with open(os.path.join('/tmp', 'owlspeak.wav'), 'wb') as f:
f.write(r.content)
Popen(["aplay", "-f", "dat", "/tmp/owlspeak.wav"])
def play_mp3(mp3_file_name,play_random=False):
location_to_mp3 = ''
song_to_play = ''
if play_random:
path = os.path.join('./songs', '')
songs = os.listdir(path)
print(songs)
song_to_play = songs[randint(0,len(songs))]
print(song_to_play)
else:
path = os.path.join('./songs', mp3_file_name)
song_to_play = path
if song_to_play is None or song_to_play == '':
owl_speak('sorry, the song you requested does not exists')
mp3_cmd = ['omxplayer', '-o', 'local',song_to_play]
Popen(mp3_cmd)
if __name__ == "__main__":
play_mp3('',True)
play_mp3('national_anthem.mp3',False) | [
"[email protected]"
] | |
cb08eb0f4a095628523501df717392d3c05ba5f0 | b939e8a296bcafff22d361082967700e16004aab | /games/euro_million.py | df1e35b58bb16e2e0ac70870090b998dab86a894 | [] | no_license | ARCunha10/python | a254eb0d527ecac0eb4bd76843a18ab65bf18f61 | 39f6c74e5c3696dfdbd124281d0da76957d8a243 | refs/heads/main | 2023-07-15T00:06:44.422200 | 2021-09-07T15:39:18 | 2021-09-07T15:39:18 | 399,270,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 752 | py | ##########################
# Euro Million
##########################
import random
import sys
users = [(0, "admin", "admin")]
username_mapping = {user[1]: user for user in users}
username_input = input("Enter your username: ")
password_input = input("Enter your password: ")
try:
_, username, password = username_mapping[username_input]
except KeyError as e:
print("Your username is incorrect!")
sys.exit()
major_numbers = random.sample(range(1, 50), 5)
star_numbers = random.sample(range(1, 12), 2)
if username_input == username and password_input == password:
print(f"Euro Million Numbers: {major_numbers} - Luck Starts: {star_numbers}")
print("")
print("Good Luck!!!")
else:
print("Your password is incorrect!")
| [
"[email protected]"
] | |
c6bb4331b5a924355d0294852ec7b90a14b7926c | 4d5a8460aab2c9e68278279fe0a3555a1527a526 | /Giet/Giet/urls.py | 724e96e8c085622d16f2dde869e31c0b3709bd9a | [] | no_license | AKHIL1707/Dzangoworkshop | 01b26cf7ab969aa0eb909b0e1b9999c4bd8f644a | 6a75a56ff8516aa1df8cc5e9056dd663b2b30690 | refs/heads/master | 2023-03-02T23:27:53.055216 | 2021-02-12T08:43:52 | 2021-02-12T08:43:52 | 338,263,351 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,167 | py | """Giet URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
from myapp import views
from myapp2 import views as v
urlpatterns = [
path('admin/', admin.site.urls),
path('staticurl/',views.static,name="static"),
path('dynamic1/<str:name>',views.dynamicstr,name="dynamicstr"),
path('dynamic2/<int:roll>',views.dynamicint,name="dynamicint"),
path('dynamic3/<int:n>/<str:name>',views.dynamicboth,name="dynamicboth"),
path('staturl/',v.stat,name="stat"),
path('myapp2/',include('myapp2.urls')),
]
| [
"[email protected]"
] | |
77478cfc27b52fe8c66ab0241f872f430954079f | a539225a7294c99e23ed111ceee1cc2d799c9f4b | /clr/models.py | c55800e4677dac41c62aeafd4b8ce32ddae1ebef | [] | no_license | JANGYANG/crawller | fde6f6330cd45caaed968c0f87833de88b5140c6 | 5e1d6475ed2436361e517c22572dcc5c2b301cb9 | refs/heads/master | 2020-05-30T05:18:45.228227 | 2019-05-31T08:22:28 | 2019-05-31T08:22:28 | 189,556,792 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 451 | py | from django.db import models
from django.utils import timezone
class Data(models.Model):
title = models.CharField(max_length=200)
text = models.TextField()
url = models.TextField()
totalUrl = models.TextField()
created_date = models.DateTimeField(
default=timezone.now)
def publish(self):
self.published_date = timezone.now()
self.save()
def __str__(self):
return self.title | [
"[email protected]"
] | |
e6cce7d276bd309edd3c60bc59aae57af3e727d1 | 4770ffe04dc5658b55601dae89907861b2c00b82 | /0x0A-python-inheritance/6-base_geometry.py | b31695c28bca5a8310282e0da153b4a2edaa1e02 | [] | no_license | hectorlopezv/holbertonschool-higher_level_programming | e07bc8b53c519919bc0e5ac5933acc1981bd0a71 | 89398e1a0da208118d7c1d748a664b7697f6db9d | refs/heads/master | 2023-03-03T15:15:22.460427 | 2021-02-09T02:47:49 | 2021-02-09T02:47:49 | 259,407,410 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py | #!/usr/bin/python3
"""base geomerty"""
class BaseGeometry:
"""BaseGeometry class"""
def area(self):
"""area method"""
raise Exception("area() is not implemented") | [
"[email protected]"
] | |
0ffea769ef243f48d1323427327bc2a6200a1b0e | 4afb7871f232403ee7cebf57622bdbaa58c3e0f6 | /GaitGL_oumvlp/test_oumvlp.py | 5bae7c51ee5252ccd8705c5cab758996cf8c6990 | [] | no_license | vpromise/gait-view | 1de71c7deeeb353ad42f2ce82b8c4ad22f30a158 | 979622e82f7490f680c70a8aac07b0eb9cef226d | refs/heads/main | 2023-06-20T18:38:04.560712 | 2021-07-27T13:55:46 | 2021-07-27T13:55:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,576 | py | from datetime import datetime
import numpy as np
import argparse
from model.initialization import initialization
from model.utils import evaluation
from config import conf
def boolean_string(s):
if s.upper() not in {'FALSE', 'TRUE'}:
raise ValueError('Not a valid boolean string')
return s.upper() == 'TRUE'
def de_diag(acc, each_angle=False):
result = np.sum(acc - np.diag(np.diag(acc)), 1) / 10.0
if not each_angle:
result = np.mean(result)
return result
# Exclude identical-view cases
def de_diag13(acc, each_angle=False):
result = np.sum(acc - np.diag(np.diag(acc)), 1) / 13.0
if not each_angle:
result = np.mean(result)
return result
if __name__ == '__main__':
iterall = [2000]
for iter in iterall:
parser = argparse.ArgumentParser(description='Test')
parser.add_argument('--batch_size', default='1', type=int,
help='batch_size: batch size for parallel test. Default: 1')
parser.add_argument('--cache', default=False, type=boolean_string,
help='cache: if set as TRUE all the test data will be loaded at once'
' before the transforming start. Default: FALSE')
opt = parser.parse_args()
# Exclude identical-view cases
m = initialization(conf, test=opt.cache)[0]
# load model checkpoint of iteration opt.iter
print('Loading the model of iteration %d...' % iter)
m.load(iter)
print('Transforming...')
time = datetime.now()
test = m.transform('test', opt.batch_size)
print(len(test),opt.batch_size)
print('Evaluating...')
acc = evaluation(test, conf['data'])
print('Evaluation complete. Cost:', datetime.now() - time)
dataset = 'OUMVLP'
if dataset=='CASIA-B':
# Print rank-1 accuracy of the best model
# e.g.
# ===Rank-1 (Include identical-view cases)===
# NM: 95.405, BG: 88.284, CL: 72.041
for i in range(1):
print('===Rank-%d (Include identical-view cases)===' % (i + 1))
print('NM: %.3f,\tBG: %.3f,\tCL: %.3f' % (
np.mean(acc[0, :, :, i]),
np.mean(acc[1, :, :, i]),
np.mean(acc[2, :, :, i])))
# Print rank-1 accuracy of the best model,excluding identical view cases
# e.g.
# -------Rank-1 (Exclude identical-view cases)-------
# NM: 94.964, BG: 87.239, CL: 70.355
for i in range(1):
print('---Rank-%d (Exclude identical view cases)---' % (i + 1))
print('NM: %.3f,\tBG: %.3f,\tCL: %.3f' % (
de_diag(acc[0, :, :, i]),
de_diag(acc[1, :, :, i]),
de_diag(acc[2, :, :, i])))
# Print rank-1 accuracy of the best model (Each Angle)
# e.g.
# ----Rank-1 of each angle (Exclude identical-view cases)---
# NM: [90.80 97.90 99.40 96.90 93.60 91.70 95.00 97.80 98.90 96.80 85.80]
# BG: [83.80 91.20 91.80 88.79 83.30 81.00 84.10 90.00 92.20 94.45 79.00]
# CL: [61.40 75.40 80.70 77.30 72.10 70.10 71.50 73.50 73.50 68.40 50.00]
np.set_printoptions(precision=2, floatmode='fixed')
for i in range(1):
print('---Rank-%d of each angle (Exclude identical-view cases)---' % (i + 1))
print('NM:', de_diag(acc[0, :, :, i], True))
print('BG:', de_diag(acc[1, :, :, i], True))
print('CL:', de_diag(acc[2, :, :, i], True))
else:
print(acc.shape)
for i in range(1):
print('===Rank-%d (Include identical-view cases)===' % (i + 1))
print('00: %.3f' % (
np.mean(acc[0, :, :, i])))
for i in range(1):
print('---Rank-%d (Exclude identical view cases)---' % (i + 1))
print('00: %.3f' % (
de_diag13(acc[0, :, :, i])))
np.set_printoptions(precision=2, floatmode='fixed')
for i in range(1):
print('---Rank-%d of each angle (Exclude identical-view cases)---' % (i + 1))
print('00:', de_diag13(acc[0, :, :, i], True))
print('---------------------------------')
print('---------------------------------')
np.save('/home/hoo/XYW_CSE/oumvlpnpy/'+str(iter)+'.npy',acc)
# print(acc) | [
"[email protected]"
] | |
d653f07cdd2493f474140b3d4b43d0353872126f | cdaf82ade36a8454378d9167df1e88c0db8ff83c | /06. Perulangan/E. Dua Pangkat/e_dua_pangkat.py | f9913fb7afec7a506060d39084d2f64399c958e6 | [] | no_license | putuwaw/tlx-toki-answer | ad746673ad1b2e26719e309b81b24d7dfcf3bad4 | a2415bc41dacd174438f029fdd9f47ba0d20581c | refs/heads/main | 2023-09-01T09:10:48.215730 | 2023-08-22T15:36:46 | 2023-08-22T15:36:46 | 473,916,849 | 5 | 0 | null | 2023-04-27T17:00:32 | 2022-03-25T07:52:45 | Java | UTF-8 | Python | false | false | 92 | py | N = int(input())
while N % 2 == 0:
N = N // 2
print('ya') if N == 1 else print('bukan')
| [
"[email protected]"
] | |
2acd6deb295a85e27261ed258f63bd76d6160efe | 43e156a75132bf8a9013e2932ba15ad753f2da11 | /PublicWebServicesAPI_AND_servercommandScripts/listExpiredUsers.py | 111db272476f157aa6a5bd7d423e3e4901a6328a | [
"MIT",
"LicenseRef-scancode-warranty-disclaimer"
] | permissive | josh-at-papercut/PaperCutExamples | 0f132819abd0cabab913b9d4c0cb0c416787fbb8 | 456f2f6839948f4e49ed6cd2741067256afccbe9 | refs/heads/master | 2022-05-12T14:55:06.487321 | 2020-04-29T05:13:26 | 2020-04-29T05:13:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,490 | py | #!/usr/bin/env python3
"""
List expired users that have expired or will expire soon, according to the expiry date written in their notes field in PaperCut.
USAGE: listExpiredUsers.py [how_soon]
It will list and identify users that have expired or will expire within the next "n" days
In PaperCut admin, please use the user's notes field to add an expiry date in the following format:
expiry:yyyy-mm-dd
e.g.
expiry:2019-07-07
Users with no "expiry value in the notes field will assume to never expire.
...
PARAM: how_soon (integer)
Will list users who have already expired, or will expire in the next "how_soon" days
"""
from xmlrpc.client import ServerProxy, Fault
from ssl import create_default_context, Purpose
from sys import exit, argv
from re import compile
from datetime import date, timedelta, datetime
host="https://localhost:9192/rpc/api/xmlrpc" # If not localhost then the client address will need to be whitelisted in PaperCut
auth_token="token" # Value defined in advanced config property "auth.webservices.auth-token". Should be random
proxy = ServerProxy(host, verbose=False,
context = create_default_context(Purpose.CLIENT_AUTH))
expireRE=compile(r'expiry:\d{4}-\d{2}-\d{2}')
# listUsers method takes in optional n day range
def list_users(how_soon):
offset = 0
limit = 100
counter = 0
unknown_list = []
today = date.today()
check_date = today + timedelta(days=how_soon)
print(f'List of expired users who have or will expire by {check_date.strftime("%Y-%m-%d")}:')
while True:
try:
#calls listUserAccount method of the API
#return list of users
user_list = proxy.api.listUserAccounts(auth_token, offset,limit)
except xmlrpc.client.Fault as error:
print("\ncalled listUserAccounts(). Return fault is {}".format(error.faultString))
exit(1)
except xmlrpc.client.ProtocolError as error:
print("\nA protocol error occurred\nURL: {}\nHTTP/HTTPS headers: {}\nError code: {}\nError message: {}".format(
error.url, error.headers, error.errcode, error.errmsg))
exit(1)
#return every user in the list
for user in user_list:
try:
notes = proxy.api.getUserProperty(auth_token,user, "notes")
except xmlrpc.client.Fault as error:
print("\ncalled getUserProperty(). Return fault is {}".format(error.faultString))
exit(1)
except xmlrpc.client.ProtocolError as error:
print("\nA protocol error occurred\nURL: {}\nHTTP/HTTPS headers: {}\nError code: {}\nError message: {}".format(
error.url, error.headers, error.errcode, error.errmsg))
exit(1)
matchedNote = expireRE.search(notes)
if matchedNote is None :
# User has no expiry date -- no action required
continue
cleaned_match = matchedNote.group().strip("expiry:")
expirtyDate = datetime.strptime(cleaned_match, '%Y-%m-%d').date()
status = ""
if expirtyDate < check_date:
status = "expired"
counter += 1
if expirtyDate > today:
print (f"{user} will expire on {expirtyDate}")
else:
print (f"{user} has expired {expirtyDate}")
#HERE you could add user to delete list, or perform other action
if limit == 0 or len(user_list) < limit:
break # We have reached the end
offset += limit # We need to next slice of users
if counter == 0:
print(f"\nThere are no expiring users")
elif counter>1:
print(f"\nThere are {counter} expiring users")
else:
print(f"\nThere is one expiring user")
if __name__=="__main__":
if len(argv) == 1: #no argument, expired today and in the past
list_users(0)
elif len(argv) == 2:
try:
offset_days = int(argv[1])
list_users(offset_days)
except ValueError:
print("Usage: ./listExpiredUsers.py [how_soon] or leave it blank to return all past record(s)")
else:
print("Usage: ./listExpiredUsers.py [how_soon] or leave it blank to return all past record(s)")
| [
"[email protected]"
] | |
ad0f8dea6a19a9c773b7667779134909dcdd79de | f4f1c9b8c1307ccc36d0162342157b786208c053 | /scripts/publish.py | 4061081061a23c5e571405d45efd1d15e6be1f7e | [
"MIT"
] | permissive | NickVoron/toolchain | 89e46711d56cc306188bf8e6c203c675e885ae57 | e0a9e65acdff53ba10a50717c7f70aef7f9b32f4 | refs/heads/master | 2021-09-10T14:03:05.311761 | 2018-03-27T12:04:47 | 2018-03-27T12:04:47 | 107,657,382 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,848 | py | #!/usr/bin/env python3
import signal
import subprocess
import contextlib
import logging
import os
import shutil
import re
from template import collect_files
from datetime import datetime
from pathlib import Path
from email import utils
from os import listdir
from os.path import isfile, join
command = ["hg", "log" ]
@contextlib.contextmanager
def cwd(new_cwd):
"""Context manager for current working directory."""
old_cwd = Path.cwd()
logging.info('Change cwd: %s', str(new_cwd))
os.chdir(str(new_cwd))
yield
logging.info('Restore cwd: %s', str(old_cwd))
os.chdir(str(old_cwd))
publishingDir = Path('../publishing0')
fileExceptions = set(line.replace('\\', '/').strip() for line in open('sources_exceptions.txt'))
thirdPartyExceptions = set(line.replace('\\', '/').strip() for line in open('third_party_exceptions.txt'))
cpp_sources = ["c", "cc", "cpp", "cxx", "h", "hh", "hpp", "hxx"]
with open ('license_header.txt', "r") as src:
headerText = src.read()
with open ('license_footer.txt', "r") as src:
footerText = src.read()
with open ('license_file_footer.txt', "r") as src:
endingText = src.read()
uniqueAuthors = dict([
(r'Voronetskiy Nikolay <[email protected]>' , 'Voronetskiy Nikolay <[email protected]>'),
(r'denis.netahin', 'Denis Netakhin <[email protected]>'),
(r'denis.natkhin', 'Denis Netakhin <[email protected]>'),
(r'denis.netakhin', 'Denis Netakhin <[email protected]>'),
(r'n_voronetskiy', 'Voronetskiy Nikolay <[email protected]>'),
(r'coolace', 'Voronetskiy Nikolay <[email protected]>'),
(r'used@used-HP-Z230-Tower-Workstation', 'Voronetskiy Nikolay <[email protected]>'),
(r' Voronetskiy Nikolay', 'Voronetskiy Nikolay <[email protected]>'),
(r'strelok', 'Denis Netakhin <[email protected]>'),
(r'Denis Netahin', 'Denis Netakhin <[email protected]>'),
(r'Voronetskiy Nikolay', 'Voronetskiy Nikolay <[email protected]>')])
CODE = {'A': '.-', 'B': '-...', 'C': '-.-.',
'D': '-..', 'E': '.', 'F': '..-.',
'G': '--.', 'H': '....', 'I': '..',
'J': '.---', 'K': '-.-', 'L': '.-..',
'M': '--', 'N': '-.', 'O': '---',
'P': '.--.', 'Q': '--.-', 'R': '.-.',
'S': '...', 'T': '-', 'U': '..-',
'V': '...-', 'W': '.--', 'X': '-..-',
'Y': '-.--', 'Z': '--..',
'0': '-----', '1': '.----', '2': '..---',
'3': '...--', '4': '....-', '5': '.....',
'6': '-....', '7': '--...', '8': '---..',
'9': '----.'
}
CODE_REVERSED = {value:key for key, value in CODE.items()}
def in_directory(file, directory):
directory = os.path.join(os.path.realpath(directory), '')
file = os.path.realpath(file)
return os.path.commonprefix([file, directory]) == directory
def split(s, chunk_size):
a = zip(*[s[i::chunk_size] for i in range(chunk_size)])
return [''.join(t) for t in a]
def to_morse(s):
return ' '.join(CODE.get(i.upper()) for i in s)
def from_morse(s):
return ''.join(CODE_REVERSED.get(i) for i in s.split())
def removeComments(string):
string = re.sub(re.compile("/\*.*?\*/", re.DOTALL ) ,"" ,string) # remove all occurance streamed comments (/*COMMENT */) from string
string = re.sub(re.compile("//.*?\n" ) ,"\n" ,string) # remove all occurance singleline comments (//COMMENT\n ) from string
return string
def removeCommentsFromFile(filename):
with open (filename, "r") as src:
data = src.read()
with open (filename, "w") as trg:
trg.write(removeComments(data))
def removeCommentsFromCXX(parentDir, directory):
for file in collect_files(directory, cpp_sources):
fileName = str(file).replace(str(parentDir) + '\\', "").replace('\\', '/').strip()
fileForPublishing = not fileName in fileExceptions
if fileForPublishing:
removeCommentsFromFile(file)
def getAuthor(nickname):
return uniqueAuthors[nickname]
def getFileAuthors(filePath):
cmd = command.copy()
cmd.append(filePath)
uniqueRects = dict()
#print("command: " + cmd[0] + " " + cmd[1] + " " + cmd[2] )
with subprocess.Popen(cmd, stdout=subprocess.PIPE) as proc:
output = proc.stdout.read().decode("cp1251")
#logging.info("output size:" + str(len(output)))
allCommit = output.split("\n\n")
for record in allCommit:
if len(record) ==0:
continue
detailedInfo = record.split("\n")
#logging.info(len(detailedInfo))
user = ""
date = ""
if len(detailedInfo) == 4 or len(detailedInfo) == 3 :
user = detailedInfo[1]
date = detailedInfo[2]
#logging.info(detailedInfo[1]+", "+detailedInfo[2])
elif len(detailedInfo) == 5:
user = detailedInfo[2]
date = detailedInfo[3]
#logging.info(detailedInfo[2]+", "+detailedInfo[3])
elif len(detailedInfo) == 6:
user = detailedInfo[3]
date = detailedInfo[4]
#logging.info(detailedInfo[3]+", "+detailedInfo[4])
elif len(detailedInfo) == 7:
user = detailedInfo[4]
date = detailedInfo[5]
#logging.info(detailedInfo[4]+", "+detailedInfo[5)
else:
logging.info(detailedInfo)
assert();
clearDateStr = date[10:-6].strip()
#logging.info(user + ", " + clearDateStr)
author = getAuthor(user.replace('user: ', ''))
dateArray = uniqueRects.get(author, [])
dateArray.append(datetime.strptime(clearDateStr, "%a %b %d %H:%M:%S %Y"))
uniqueRects[author] = dateArray
#logging.info("size: "+str(len(uniqueRects)))
result = {}
for key in uniqueRects:
minDate = min(uniqueRects[key])
maxDate = max(uniqueRects[key])
#logging.info(key + ", min: " + str(minDate) + ", max: " +str(maxDate))
result[key] = {"min": minDate, "max": maxDate}
return result
def evaluateDir(path):
if ".hg" in path:
return
if ".git" in path:
return
if ".svn" in path:
return
if ".vs" in path:
return
for iter in listdir(path):
fullPath = path+"\\"+iter
if not isfile(fullPath):
evaluateDir(fullPath)
else:
filename, fileExt = os.path.splitext(fullPath)
if fileExt == ".h" or fileExt == ".hh":
print("================================================================")
authors = getFileAuthors(fullPath)
for key in authors:
#uniqueAuthors.add(key)
print(key + ", min: " + str(authors[key]["min"]) + ", max: " + str(authors[key]["max"]))
print("================================================================")
# authors = getFileAuthors("Stable/Sources/Libraries/renderPipeline/renderPipeline.cpp")
#
# for key in authors:
# logging.info(key + ", min: " + str(authors[key]["min"]) + ", max: " + str(authors[key]["max"]))
#evaluateDir(os.path.dirname(os.path.abspath(__file__)))
#evaluateDir('Stable')
def authorsListForFile(authors):
authorsList = ''
for key in authors:
authorsList += key + ", "
return authorsList[0:-2]
def yearsRangeForFile(authors):
minYear = 10000
maxYear = 0
for key in authors:
minYear = min(authors[key]["min"].year, minYear)
maxYear = max(authors[key]["max"].year, maxYear)
if minYear == maxYear:
return str(minYear)
return str(minYear) + '-' + str(maxYear)
def removeTempDirs(directory):
if os.path.isdir(Path(directory) / '.vs'):
shutil.rmtree(Path(directory) / '.vs')
def appendHeadersToCXX(parentDir, directory):
for file in collect_files(directory, cpp_sources):
fileName = str(file).replace(str(parentDir) + '\\', "").replace('\\', '/').strip()
fileForPublishing = (not fileName in fileExceptions)
for exceptDir in fileExceptions:
if os.path.isdir(exceptDir):
if(in_directory(fileName, exceptDir)):
fileForPublishing = False
break
if not fileForPublishing:
print("skipped file:", fileName)
else :
authors = getFileAuthors(fileName)
authorsCopyright = "// Copyright (C) " + yearsRangeForFile(authors) + " " + authorsListForFile(authors) + "\n"
resultHeader = ""
resultHeader += authorsCopyright
resultHeader += footerText
with open (file, "r+") as src:
content = src.read()
src.seek(0, 0)
src.write(resultHeader + "\n" + content + "\n\n\n\n")
src.write(authorsCopyright)
src.write(endingText)
print("published file:", fileName, os.path.dirname(fileName))
def publishSourceDir(directory):
print('publishing directory:', directory)
resultDir = publishingDir / directory
removeTempDirs(directory)
shutil.copytree(directory, resultDir)
removeCommentsFromCXX(publishingDir, resultDir)
appendHeadersToCXX(publishingDir, resultDir)
def copySourceDir(rootDirectory):
for root, dirs, files in os.walk(rootDirectory):
for directory in dirs:
directory = (rootDirectory + '/' + directory).replace('\\', '/')
directoryForPublishing = not directory in thirdPartyExceptions
if not directoryForPublishing:
print("skipped directory:", directory)
else :
print('copy directory:', directory)
resultDir = publishingDir / directory
removeTempDirs(directory)
shutil.copytree(directory, resultDir)
for file in files:
file = (rootDirectory + '/' + file).replace('\\', '/')
resultFile = publishingDir / file
print('copy file:', file, "to", resultFile)
if not os.path.exists(os.path.dirname(resultFile)):
os.makedirs(os.path.dirname(resultFile))
shutil.copyfile(file, resultFile)
return
def cleanup():
print('cleanup old result')
if os.path.isdir(publishingDir):
shutil.rmtree(publishingDir)
#encodedRevision = to_morse('8590')
#print(encodedRevision)
#decodedRevision = from_morse(encodedRevision)
#print(decodedRevision)
apppath = '../../'
with cwd(apppath):
cleanup()
publishSourceDir('Stable')
copySourceDir('VS_Props')
copySourceDir('Tools')
copySourceDir('third_party')
copySourceDir('toolchain') | [
"[email protected]"
] | |
3c809ed1f28298769e165e8722d0c2e534b5aabc | 2b731e5f1e07787eac07b48e97ec34665672432e | /score.py | 19b3c9aa3b4753c261c53a3962d9f0272bb89eb6 | [
"Apache-2.0"
] | permissive | tejasbangera/Churn_Prediction_Project | df9a0ff4ab370bda6f3e023406b51a3fd046fdf7 | ef4952373046c2a709d0952d4ae4664ca3f85ed4 | refs/heads/main | 2023-04-17T04:48:08.875900 | 2021-04-18T19:27:34 | 2021-04-18T19:27:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,015 | py | # ---------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# ---------------------------------------------------------
import json
import logging
import os
import pickle
import numpy as np
import pandas as pd
import joblib
import azureml.automl.core
from azureml.automl.core.shared import logging_utilities, log_server
from azureml.telemetry import INSTRUMENTATION_KEY
from inference_schema.schema_decorators import input_schema, output_schema
from inference_schema.parameter_types.numpy_parameter_type import NumpyParameterType
from inference_schema.parameter_types.pandas_parameter_type import PandasParameterType
input_sample = pd.DataFrame({"customerID": pd.Series(["example_value"], dtype="object"), "gender": pd.Series(["example_value"], dtype="object"), "SeniorCitizen": pd.Series([0], dtype="int64"), "Partner": pd.Series([False], dtype="bool"), "Dependents": pd.Series([False], dtype="bool"), "tenure": pd.Series([0], dtype="int64"), "PhoneService": pd.Series([False], dtype="bool"), "MultipleLines": pd.Series(["example_value"], dtype="object"), "InternetService": pd.Series(["example_value"], dtype="object"), "OnlineSecurity": pd.Series(["example_value"], dtype="object"), "OnlineBackup": pd.Series(["example_value"], dtype="object"), "DeviceProtection": pd.Series(["example_value"], dtype="object"), "TechSupport": pd.Series(["example_value"], dtype="object"), "StreamingTV": pd.Series(["example_value"], dtype="object"), "StreamingMovies": pd.Series(["example_value"], dtype="object"), "Contract": pd.Series(["example_value"], dtype="object"), "PaperlessBilling": pd.Series([False], dtype="bool"), "PaymentMethod": pd.Series(["example_value"], dtype="object"), "MonthlyCharges": pd.Series([0.0], dtype="float64"), "TotalCharges": pd.Series([0.0], dtype="float64")})
output_sample = np.array([0])
try:
log_server.enable_telemetry(INSTRUMENTATION_KEY)
log_server.set_verbosity('INFO')
logger = logging.getLogger('azureml.automl.core.scoring_script')
except:
pass
def init():
global model
# This name is model.id of model that we want to deploy deserialize the model file back
# into a sklearn model
model_path = os.path.join(os.getenv('AZUREML_MODEL_DIR'), 'model.pkl')
path = os.path.normpath(model_path)
path_split = path.split(os.sep)
log_server.update_custom_dimensions({'model_name': path_split[-3], 'model_version': path_split[-2]})
try:
logger.info("Loading model from path.")
model = joblib.load(model_path)
logger.info("Loading successful.")
except Exception as e:
logging_utilities.log_traceback(e, logger)
raise
@input_schema('data', PandasParameterType(input_sample))
@output_schema(NumpyParameterType(output_sample))
def run(data):
try:
result = model.predict(data)
return json.dumps({"result": result.tolist()})
except Exception as e:
result = str(e)
return json.dumps({"error": result})
| [
"[email protected]"
] | |
b18f4e5715cc0567e023636832971941060e8f3f | 3d5a25705f52fe942480779d79c44c60aae41624 | /korean.py | f02c0eaa542e9591a14ced64ebd13feada161392 | [] | no_license | kirumon/KoreanIME | 99d392a8233b6c899c3abb3983a60667d4f41dc3 | db67b9b234e6f47f3787965e1bbbbc7de5a0d385 | refs/heads/main | 2023-08-24T07:19:21.607686 | 2021-10-14T12:24:37 | 2021-10-14T12:24:37 | 313,274,620 | 4 | 2 | null | null | null | null | UTF-8 | Python | false | false | 37,119 | py | #
# class Korean()
# 영문으로 입력된 문자를 한글로 처리하는 클래스
#
# 입력이 자음인지 모음인지 구분하기 위한 튜플
CONSONANT = ('r','R','s','S','e','E','f','F','a','A','q','Q','t','T','d','D','w','W','c','C','z','Z','x','X','v','V','g','G')
VOWEL = ('k','K','o','O','i','I','j','J','p','P','u','U','h','H','y','Y','n','N','b','B','m','M','l','L')
# 초성과 종성 그리고 종성 코드를 위한 튜플
CHOSEONG = ('r','R','s','e','E','f','a','q','Q','t','T','d','w','W','c','z','x','v','g')
JUNGSEONG = ('k','o','i','O','j','p','u','P','h','hk','ho','hl','y','n','nj','np','nl','b','m','ml','l')
JONGSEONG = ('','r','R','rt','s','sw','sg','e','f','fr','fa','fq','ft','fx','fv','fg','a','q','qt','t','T','d','w','c','z','x','v','g')
# 음소 단위로 처리하기 위한 튜플
PHOENMES = (
'r','R','rt','s','sw','sg','e','E','f','fr','fa','fq','ft','fx','fv','fg',
'a','q','Q','qt','t','T','d','w','W','c','z','x','v','g','k','o','i','O',
'j','p','u','P','h','hk','ho','hl','y','n','nj','np','nl','b','m','ml','l'
)
# 대문자에 해당하는 키를 눌러서 입력하는 자음과 모음
UPPER_CASE = ('Q','W','E','R','T','O','P')
# 유니코드의 베이스 코드 음소 단위(12593), 완성형 글자(44032)
BASE_CODE = (12593, 44032)
class Korean:
def __init__(self, koreanMode=False, multi=False):
self.combineChar = ""
self.status = ""
self.lines = []
self.cursor = 0
self.currentLine = 0
self.selectStart = 0
self.selectStartLine = 0
self.selectEnd = 0
self.selectEndLine = 0
self.koreanMode = koreanMode
self.multiLine = multi
def SelectedText(self):
if self.multiLine:
temp = ""
for i in range(len(self.lines)):
if i < self.selectStartLine or i > self.selectEndLine:
continue
else:
if i == self.selectStartLine and i != self.selectEndLine:
temp += self.lines[i][self.selectStart:]+'\n'
elif i != self.selectStartLine and i != self.selectEndLine:
temp += self.lines[i]+'\n'
elif i != self.selectStartLine and i == self.selectEndLine:
temp += self.lines[i][:self.selectEnd]
else:
temp = self.lines[self.currentLine][self.selectStart:self.selectEnd]
return temp
else:
return self.lines[self.currentLine][self.selectStart:self.selectEnd]
def Input(self, char):
if self.selectStartLine != self.selectEndLine:
lines = []
for i, s in enumerate(self.lines):
if i < self.selectStartLine or i > self.selectEndLine:
lines.append(s)
elif i == self.selectStartLine:
if i == self.selectEndLine:
lines.append(self.lines[i][:self.selectStart] + self.lines[i][self.selectEnd:])
else:
lines.append(self.lines[i][:self.selectStart])
elif i > self.selectStartLine and i < self.selectEndLine:
continue
else:
lines.append(self.lines[i][self.selectEnd:])
self.lines = lines
self.currentLine = self.selectEndLine = self.selectStartLine
self.cursor = self.selectEnd = self.selectStart
elif self.selectStart != self.selectEnd:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.lines[self.currentLine][self.selectEnd:]
self.cursor = self.selectEnd = self.selectStart
if self.koreanMode:
self.processKoreanInput(char if char in UPPER_CASE else char.lower())
else:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + char + self.lines[self.currentLine][self.selectEnd:]
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
def Delete(self, backspace=False):
if self.selectStartLine != self.selectEndLine:
temp = []
for i, s in enumerate(self.lines):
if i < self.selectStartLine or i > self.selectEndLine:
temp.append(s)
else:
if i == self.selectStartLine and i != self.selectEndLine:
temp.append(s[:self.selectStart])
elif i != self.selectStartLine and i != self.selectEndLine:
continue
elif i != self.selectStartLine and i == self.selectEndLine:
temp[-1] += s[self.selectEnd:]
else:
temp.append(s[:self.selectStart]+s[self.selectEnd:])
self.lines = temp
self.currentLine = self.selectEndLine = self.selectStartLine
self.cursor = self.selectEnd = self.selectStart
elif self.selectStart != self.selectEnd:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.lines[self.currentLine][self.selectEnd:]
self.cursor = self.selectEnd = self.selectStart
else:
if backspace:
if self.status != "":
self.status = self.status[:len(self.status)-1]
self.combineChar = self.combineChar[:len(self.combineChar)-1]
else:
if self.selectStart > 0:
self.selectStart -= 1
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.lines[self.currentLine][self.selectEnd:]
self.cursor = self.selectEnd = self.selectStart
elif self.currentLine > 0:
pos = len(self.lines[self.currentLine-1])
self.lines[self.currentLine-1] += self.lines[self.currentLine]
del self.lines[self.currentLine]
self.currentLine -= 1
self.selectStartLine = self.selectEndLine = self.currentLine
self.selectStart = self.selectEnd = self.cursor = pos
else:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
else:
if self.cursor < len(self.lines[self.currentLine]):
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.lines[self.currentLine][self.selectEnd+1:]
elif self.currentLine < len(self.lines) - 1:
self.lines[self.currentLine] += self.lines[self.currentLine+1]
del self.lines[self.currentLine+1]
def LineHome(self, shift):
if self.multiLine:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
atStart = True if self.currentLine == self.selectStartLine and self.cursor == self.selectStart else False
if shift:
if self.selectStartLine == self.selectEndLine:
if not atStart:
self.selectEnd = self.selectStart
self.selectStart = 0
else:
if atStart:
self.selectStart = 0
else:
self.selectEnd = 0
self.cursor = 0
else:
self.cursor = 0
self.selectStartLine = self.selectEndLine = self.currentLine
self.selectStart = self.selectEnd = self.cursor
else:
if shift:
if self.selectStart == self.selectEnd:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.selectEnd += 1
self.cursor = self.selectStart = 0
else:
self.cursor = self.selectStart = 0
else:
if self.cursor == self.selectEnd:
self.selectEnd = self.selectStart
self.cursor = self.selectStart = 0
else:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.selectStart = self.selectEnd = self.cursor = 0
def LineEnd(self, shift):
if self.multiLine:
atStart = True if self.currentLine == self.selectStartLine and self.cursor == self.selectStart else False
if shift:
if self.selectStartLine == self.selectEndLine:
if atStart:
self.selectStart = self.selectEnd
self.selectEnd = len(self.lines[self.currentLine])
else:
if atStart:
self.selectStart = len(self.lines[self.currentLine])
else:
self.selectEnd = len(self.lines[self.currentLine])
self.cursor = len(self.lines[self.currentLine])
else:
self.cursor = len(self.lines[self.currentLine])
self.selectStartLine = self.selectEndLine = self.currentLine
self.selectStart = self.selectEnd = self.cursor
else:
if shift:
if self.selectStart == self.selectEnd:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.selectStart += 1
self.cursor = self.selectEnd = len(self.lines[self.currentLine])
else:
if self.cursor == self.selectStart:
self.selectStart = self.selectEnd
self.cursor = self.selectEnd = len(self.lines[self.currentLine])
else:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.selectStart = self.selectEnd = self.cursor = len(self.lines[self.currentLine])
def MoveLeft(self, shift=False):
atStart = True if self.currentLine == self.selectStartLine and self.cursor == self.selectStart else False
if self.multiLine:
if shift:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.cursor += 1
self.selectEnd = self.cursor
if self.cursor > 0:
self.cursor -= 1
else:
if self.currentLine > 0:
self.currentLine -= 1
self.cursor = len(self.lines[self.currentLine])
else:
self.cursor = 0
if atStart:
self.selectStartLine = self.currentLine
self.selectStart = self.cursor
else:
self.selectEndLine = self.currentLine
self.selectEnd = self.cursor
else:
if self.selectStartLine == self.selectEndLine and self.selectStart == self.selectEnd:
if self.status == "":
if self.cursor > 0:
self.cursor = self.cursor - 1
else:
if self.currentLine > 0:
self.currentLine -= 1
self.cursor = len(self.lines[self.currentLine])
else:
self.cursor = 0
else:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.selectStart = self.selectEnd = self.cursor
self.selectStartLine = self.selectEndLine = self.currentLine
else:
self.cursor = self.selectEnd = self.selectStart
self.currentLine = self.selectEndLine = self.selectStartLine
else:
if shift:
if self.selectStart == self.selectEnd:
if self.status == "":
self.cursor = max(0, self.cursor-1)
self.selectStart = self.cursor
else:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.selectEnd += 1
else:
if self.cursor == self.selectStart:
self.cursor = max(0, self.cursor-1)
self.selectStart = self.cursor
else:
self.cursor = max(0, self.cursor-1)
self.selectEnd = self.cursor
else:
if self.selectStart == self.selectEnd:
if self.status == "":
self.cursor = max(0, self.cursor-1)
else:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.selectStart = self.selectEnd = self.cursor
else:
self.cursor = self.selectEnd = self.selectStart
self.currentLine = self.selectEndLine = self.selectStartLine
def MoveRight(self, shift=False):
atEnd = True if self.currentLine == self.selectEndLine and self.cursor == self.selectEnd else False
if self.multiLine:
if shift:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.selectStart += 1
if self.cursor < len(self.lines[self.currentLine]):
self.cursor += 1
else:
if self.currentLine < len(self.lines) - 1:
self.cursor = 0
self.currentLine += 1
else:
self.cursor = len(self.lines[self.currentLine])
if atEnd:
self.selectEnd = self.cursor
self.selectEndLine = self.currentLine
else:
self.selectStart = self.cursor
self.selectStartLine = self.currentLine
else:
if self.status == "":
if self.cursor < len(self.lines[self.currentLine]):
self.cursor += 1
else:
if self.currentLine < len(self.lines) - 1:
self.currentLine += 1
self.cursor = 0
else:
self.cursor = len(self.lines[self.currentLine])
else:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.cursor += 1
self.status = self.combineChar = ""
self.selectStart = self.selectEnd = self.cursor
self.selectStartLine = self.selectEndLine = self.currentLine
else:
if shift:
if self.selectStart == self.selectEnd:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
self.cursor = min(len(self.lines[self.currentLine]), self.cursor+1)
self.selectEnd = self.cursor
else:
if self.cursor == self.selectStart:
self.cursor = min(len(self.lines[self.currentLine]), self.cursor+1)
self.selectStart = self.cursor
else:
self.cursor = min(len(self.lines[self.currentLine]), self.cursor+1)
self.selectEnd = self.cursor
else:
if self.selectStart == self.selectEnd:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.cursor = min(len(self.lines[self.currentLine]), self.cursor+1)
self.selectStart = self.selectEnd = self.cursor
else:
self.cursor = self.selectStart = self.selectEnd
def MoveUp(self, shift=False):
atStart = True if self.currentLine == self.selectStartLine and self.cursor == self.selectStart else False
if self.multiLine:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
if self.currentLine > 0 :
self.currentLine -= 1
self.cursor = min(self.cursor, len(self.lines[self.currentLine]))
if shift:
if atStart:
self.selectStartLine = self.currentLine
self.selectS = self.cursor
else:
if self.currentLine < self.selectStartLine:
self.selectEndLine = self.selectStartLine
self.selectEnd = self.selectStart
self.selectStartLine = self.currentLine
self.selectStart = self.cursor
elif self.currentLine == self.selectStartLine:
if self.cursor < self.selectStart:
self.selectEndLine = self.selectStartLine
self.selectEnd = self.selectStart
self.selectStartLine = self.currentLine
self.selectStart = self.cursor
else:
self.selectEndLine = self.currentLine
self.selectEnd = self.cursor
else:
self.selectEndLine = self.currentLine
self.selectEnd = self.cursor
else:
self.selectStartLine = self.selectEndLine = self.currentLine
self.selectStart = self.selectEnd = self.cursor
def MoveDown(self, shift=False):
atEnd = True if self.currentLine == self.selectEndLine and self.cursor == self.selectEnd else False
if self.multiLine:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
if self.currentLine < len(self.lines) - 1:
self.currentLine += 1
self.cursor = min(self.cursor, len(self.lines[self.currentLine]))
if shift:
if atEnd:
self.selectEndLine = self.currentLine
self.selectEnd = self.cursor
else:
if self.currentLine < self.selectEndLine:
self.selectStartLine = self.currentLine
self.cursor = min(self.curosr, len(self.lines[self.currentLine]))
elif self.currentLine == self.selectEndLine:
if self.cursor > self.selectEnd:
self.selectStart = self.selectEnd
self.selectEnd = self.cursor
self.selectStartLine = self.currentLine
else:
self.selectStartLine = self.selectEndLine
self.selectStart = self.selectEnd
self.selectEndLine = self.currentLine
self.selectEnd = self.cursor
else:
self.selectStartLine = self.selectEndLine = self.currentLine
self.selectStart = self.selectEnd = self.cursor
def LineFeed(self):
if not self.multiLine:
return
if self.selectStartLine != self.selectEndLine:
temp = []
for i, s in enumerate(self.lines):
if i < self.selectStartLine or i > self.selectEndLine:
temp.append(s)
else:
if i == self.selectStartLine and i != self.selectEndLine:
temp.append(s[:self.selectStart])
elif i != self.selectStartLine and i != self.selectEndLine:
continue
elif i != self.selectStartLine and i == self.selectEndLine:
temp[-1] += s[self.selectEnd:]
else:
temp.append(s[:self.selectStart]+s[self.selectEnd:])
self.lines = temp
self.currentLine = self.selectEndLine = self.selectStartLine
self.cursor = self.selectEnd = self.selectStart
elif self.selectStart != self.selectEnd:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.lines[self.currentLine][self.selectEnd:]
self.cursor = self.selectEnd = self.selectStart
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
newLine = self.lines[self.currentLine][self.cursor:]
self.lines[self.currentLine] = self.lines[self.currentLine][:self.cursor]
self.lines.insert(self.currentLine+1, newLine)
self.currentLine += 1
self.selectStartLine = self.selectEndLine = self.currentLine
self.selectStart = self.selectEnd = self.cursor = 0
def SelectAll(self):
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
if self.multiLine:
self.selectStartLine = self.selectStart = 0
self.selectEndLine = self.currentLine = len(self.lines) - 1
self.selectEnd = self.cursor = len(self.lines[self.currentLine])
else:
self.selectStart = 0
self.cursor = self.selectEnd = len(self.lines[self.currentLine])
def Paste(self, text):
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
if self.selectStartLine != self.selectEndLine:
temp = []
for i, s in enumerate(self.lines):
if i < self.selectStartLine or i > self.selectEndLine:
temp.append(s)
else:
if i == self.selectStartLine and i != self.selectEndLine:
temp.append(s[:self.selectStart])
elif i != self.selectStartLine and i != self.selectEndLine:
continue
elif i != self.selectStartLine and i == self.selectEndLine:
temp[-1] += s[self.selectEnd:]
else:
temp.append(s[:self.selectStart]+s[self.selectEnd:])
self.lines = temp
self.currentLine = self.selectEndLine = self.selectStartLine
self.cursor = self.selectEnd = self.selectStart
elif self.selectStart != self.selectEnd:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.lines[self.currentLine][self.selectEnd:]
self.cursor = self.selectEnd = self.selectStart
if self.multiLine:
temp = text.split('\n')
leftText = self.lines[self.currentLine][:self.cursor]
rightText = self.lines[self.currentLine][self.cursor:]
for i in range(len(temp)):
if i == 0:
if len(temp) > 1:
self.lines[self.currentLine] = leftText + temp[i]
else:
self.lines[self.currentLine] = leftText + temp[i] + rightText
self.cursor += len(temp[i])
elif i > 0 and i < len(temp) - 2:
self.lines.insert(self.currentLine + 1, temp[i])
self.currentLine += 1
elif i == len(temp) - 1:
self.lines.insert(self.currentLine + 1, temp[i])
self.currentLine += 1
self.cursor = len(self.lines[self.currentLine])
self.lines[self.currentLine] += rightText
else:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.cursor] + text + self.lines[self.currentLine][self.cursor:]
self.cursor = self.selectStart + len(text)
self.selectStart = self.selectEnd = self.cursor
def GetMode(self):
return self.koreanMode
def SetMode(self, mode):
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
self.koreanMode = mode
def SetText(self, text, multi=False):
self.lines = text.split('\n')
self.status = self.combineChar = ""
self.multiLine = multi
if multi:
self.selectStart = self.selectStartLine = 0
self.currentLine = self.selectEndLine = len(self.lines) - 1
self.selectEnd = self.cursor = len(self.lines[-1])
else:
self.selectStart = self.selectStartLine = self.selectEndLine = self.currentLine = 0
self.selectEnd = self.cursor = len(self.lines[0])
def GetText(self):
text = ""
cnt = len(self.lines)
if self.multiLine:
for i in range(cnt):
if i == self.currentLine and self.status != "":
text += self.lines[i][:self.cursor] + self.combine() + self.lines[i][self.cursor:]
else:
text += self.lines[i]
if i < cnt - 1:
text += '\n'
else:
if self.status != "":
text = self.lines[0][:self.cursor] + self.combine() + self.lines[0][self.cursor:]
else:
text = self.lines[0]
return text
def processKoreanInput(self, char):
cType = self.getType(char)
if cType == 'C':
self.processConsonant(char)
elif cType == 'V':
self.processVowel(char)
else:
if self.status != "":
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.status = self.combineChar = ""
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + char + self.lines[self.currentLine][self.selectEnd:]
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
def processConsonant(self, char):
if self.status == "":
self.status = "C"
self.combineChar = char
elif self.status == "C":
dc = self.combineChar + char
if dc in PHOENMES:
self.status += "C"
self.combineChar += char
else:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
self.status = "C"
self.combineChar = char
elif self.status in {"CC", "V", "VV"}:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
self.status = "C"
self.combineChar = char
elif self.status in {"CV", "CVV"}:
self.status += "C"
self.combineChar += char
elif self.status in {"CVC", "CVVC"}:
dc = self.combineChar[-1] + char
if dc in PHOENMES:
self.status += "C"
self.combineChar += char
else:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
self.status = "C"
self.combineChar = char
elif self.status in {"CVCC"}:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
self.status = "C"
self.combineChar = char
def processVowel(self, char):
if self.status == "":
self.status = "V"
self.combineChar = char
elif self.status == "C":
self.status += "V"
self.combineChar += char
elif self.status == "V":
dc = self.combineChar + char
if dc in PHOENMES:
self.status += "V"
self.combineChar += char
else:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
self.status = "V"
self.combineChar = char
elif self.status == "CC":
second_char = self.combineChar[1]
self.combineChar = self.combineChar[0]
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
self.status = "V"
self.combineChar = char
elif self.status == "CV":
dc = self.combineChar[-1] + char
if dc in PHOENMES:
self.status += "V"
self.combineChar += char
else:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
self.status = "V"
self.combineChar = char
elif self.status in {"VV", "CVV"}:
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
self.status = "V"
self.combineChar = char
elif self.status in {"CVC", "CVCC", "CVVC", "CVVCC"}:
last_char = self.combineChar[-1]
self.combineChar = self.combineChar[:len(self.combineChar)-1]
self.status = self.status[:len(self.status)-1]
self.lines[self.currentLine] = self.lines[self.currentLine][:self.selectStart] + self.combine() + self.lines[self.currentLine][self.selectEnd:]
self.cursor += 1
self.selectStart = self.selectEnd = self.cursor
self.status = "CV"
self.combineChar = last_char + char
def getType(self, char):
if char in CONSONANT:
return "C"
elif char in VOWEL:
return "V"
return "N"
def combine(self):
if self.status in {"C", "V", "CC", "VV"}:
return chr(BASE_CODE[0] + PHOENMES.index(self.combineChar))
elif self.status == "CV":
cho = CHOSEONG.index(self.combineChar[0])
jung = JUNGSEONG.index(self.combineChar[1])
return chr(BASE_CODE[1] + cho * 588 + jung * 28)
elif self.status == "CVV":
cho = CHOSEONG.index(self.combineChar[0])
jung = JUNGSEONG.index(self.combineChar[1:3])
return chr(BASE_CODE[1] + cho * 588 + jung * 28)
elif self.status == "CVC":
cho = CHOSEONG.index(self.combineChar[0])
jung = JUNGSEONG.index(self.combineChar[1])
jong = JONGSEONG.index(self.combineChar[2])
return chr(BASE_CODE[1] + cho * 588 + jung * 28 + jong)
elif self.status == "CVCC":
cho = CHOSEONG.index(self.combineChar[0])
jung = JUNGSEONG.index(self.combineChar[1])
jong = JONGSEONG.index(self.combineChar[2:4])
return chr(BASE_CODE[1] + cho * 588 + jung * 28 + jong)
elif self.status == "CVVC":
cho = CHOSEONG.index(self.combineChar[0])
jung = JUNGSEONG.index(self.combineChar[1:3])
jong = JONGSEONG.index(self.combineChar[3])
return chr(BASE_CODE[1] + cho * 588 + jung * 28 + jong)
elif self.status == "CVVCC":
cho = CHOSEONG.index(self.combineChar[0])
jung = JUNGSEONG.index(self.combineChar[1:3])
jong = JONGSEONG.index(self.combineChar[3:5])
return chr(BASE_CODE[1] + cho * 588 + jung * 28 + jong)
return ""
| [
"[email protected]"
] | |
3133017396655a7bd926bb313c27e1dfad0b2386 | 52107637d2687db30f168ba15ffd1e1e534f4cb4 | /mirdata/datasets/tinysol.py | a85b499e399d46cd903c904d19f330be624364d8 | [
"BSD-3-Clause"
] | permissive | mir-dataset-loaders/mirdata | 9be10e0201b08abf51fc72338ccaaacc8216145b | 496eb4a9120aa16ff6963792f0c8b738a0c3f310 | refs/heads/master | 2023-05-07T13:15:16.517429 | 2023-03-27T13:54:37 | 2023-03-27T13:54:37 | 170,765,267 | 297 | 65 | BSD-3-Clause | 2023-08-05T22:48:48 | 2019-02-14T22:11:33 | Python | UTF-8 | Python | false | false | 8,538 | py | """TinySOL Dataset Loader.
.. admonition:: Dataset Info
:class: dropdown
TinySOL is a dataset of 2913 samples, each containing a single musical note from one of 14
different instruments:
- Bass Tuba
- French Horn
- Trombone
- Trumpet in C
- Accordion
- Contrabass
- Violin
- Viola
- Violoncello
- Bassoon
- Clarinet in B-flat
- Flute
- Oboe
- Alto Saxophone
These sounds were originally recorded at Ircam in Paris (France) between 1996
and 1999, as part of a larger project named Studio On Line (SOL). Although SOL
contains many combinations of mutes and extended playing techniques, TinySOL
purely consists of sounds played in the so-called "ordinary" style, and in
absence of mute.
TinySOL can be used for education and research purposes. In particular, it can
be employed as a dataset for training and/or evaluating music information
retrieval (MIR) systems, for tasks such as instrument recognition or
fundamental frequency estimation. For this purpose, we provide an official 5-fold
split of TinySOL as a metadata attribute. This split has been carefully balanced
in terms of instrumentation, pitch range, and dynamics. For the sake of research
reproducibility, we encourage users of TinySOL to adopt this split and report
their results in terms of average performance across folds.
We encourage TinySOL users to subscribe to the Ircam Forum so that they can
have access to larger versions of SOL.
For more details, please visit: https://www.orch-idea.org/
"""
import csv
import os
from typing import BinaryIO, Optional, Tuple
from deprecated.sphinx import deprecated
import librosa
import numpy as np
from smart_open import open
from mirdata import core, download_utils, io, jams_utils
BIBTEX = """@inproceedings{cella2020preprint,
author={Cella, Carmine Emanuele and Ghisi, Daniele and Lostanlen, Vincent and
Lévy, Fabien and Fineberg, Joshua and Maresz, Yan},
title={{OrchideaSOL}: {A} dataset of extended
instrumental techniques for computer-aided orchestration},
bootktitle={Under review},
year={2020}
}"""
INDEXES = {
"default": "6.0",
"test": "6.0",
"6.0": core.Index(filename="tinysol_index_6.0.json"),
}
REMOTES = {
"audio": download_utils.RemoteFileMetadata(
filename="TinySOL.tar.gz",
url="https://zenodo.org/record/3685367/files/TinySOL.tar.gz?download=1",
checksum="36030a7fe389da86c3419e5ee48e3b7f",
destination_dir="audio",
),
"annotations": download_utils.RemoteFileMetadata(
filename="TinySOL_metadata.csv",
url="https://zenodo.org/record/3685367/files/TinySOL_metadata.csv?download=1",
checksum="a86c9bb115f69e61f2f25872e397fc4a",
destination_dir="annotation",
),
}
STRING_ROMAN_NUMERALS = {1: "I", 2: "II", 3: "III", 4: "IV"}
LICENSE_INFO = "Creative Commons Attribution 4.0 International Public License."
class Track(core.Track):
"""tinysol Track class
Args:
track_id (str): track id of the track
Attributes:
audio_path (str): path of the audio file
dynamics (str): dynamics abbreviation. Ex: pp, mf, ff, etc.
dynamics_id (int): pp=0, p=1, mf=2, f=3, ff=4
family (str): instrument family encoded by its English name
instance_id (int): instance ID. Either equal to 0, 1, 2, or 3.
instrument_abbr (str): instrument abbreviation
instrument_full (str): instrument encoded by its English name
is_resampled (bool): True if this sample was pitch-shifted from a neighbor; False if it was genuinely recorded.
pitch (str): string containing English pitch class and octave number
pitch_id (int): MIDI note index, where middle C ("C4") corresponds to 60
string_id (NoneType): string ID. By musical convention, the first
string is the highest. On wind instruments, this is replaced by `None`.
technique_abbr (str): playing technique abbreviation
technique_full (str): playing technique encoded by its English name
track_id (str): track id
"""
def __init__(self, track_id, data_home, dataset_name, index, metadata):
super().__init__(track_id, data_home, dataset_name, index, metadata)
self.audio_path = self.get_path("audio")
@property
def split(self):
return self._track_metadata.get("Fold")
@property
def family(self):
return self._track_metadata.get("Family")
@property
def instrument_abbr(self):
return self._track_metadata.get("Instrument (abbr.)")
@property
def instrument_full(self):
return self._track_metadata.get("Instrument (in full)")
@property
def technique_abbr(self):
return self._track_metadata.get("Technique (abbr.)")
@property
def technique_full(self):
return self._track_metadata.get("Technique (in full)")
@property
def pitch(self):
return self._track_metadata.get("Pitch")
@property
def pitch_id(self):
return self._track_metadata.get("Pitch ID")
@property
def dynamics(self):
return self._track_metadata.get("Dynamics")
@property
def dynamics_id(self):
return self._track_metadata.get("Dynamics ID")
@property
def instance_id(self):
return self._track_metadata.get("Instance ID")
@property
def string_id(self):
return self._track_metadata.get("String ID")
@property
def is_resampled(self):
return self._track_metadata.get("Resampled")
@property
def audio(self) -> Optional[Tuple[np.ndarray, float]]:
"""The track's audio
Returns:
* np.ndarray - audio signal
* float - sample rate
"""
return load_audio(self.audio_path)
def to_jams(self):
"""Get the track's data in jams format
Returns:
jams.JAMS: the track's data in jams format
"""
return jams_utils.jams_converter(
audio_path=self.audio_path, metadata=self._track_metadata
)
@io.coerce_to_bytes_io
def load_audio(fhandle: BinaryIO) -> Tuple[np.ndarray, float]:
"""Load a TinySOL audio file.
Args:
fhandle (str or file-like): File-like object or path to audio file
Returns:
* np.ndarray - the mono audio signal
* float - The sample rate of the audio file
"""
return librosa.load(fhandle, sr=None, mono=True)
@core.docstring_inherit(core.Dataset)
class Dataset(core.Dataset):
"""
The tinysol dataset
"""
def __init__(self, data_home=None, version="default"):
super().__init__(
data_home,
version,
name="tinysol",
track_class=Track,
bibtex=BIBTEX,
indexes=INDEXES,
remotes=REMOTES,
license_info=LICENSE_INFO,
)
@core.cached_property
def _metadata(self):
metadata_path = os.path.join(
self.data_home, "annotation", "TinySOL_metadata.csv"
)
metadata_index = {}
try:
with open(metadata_path, "r") as fhandle:
csv_reader = csv.reader(fhandle, delimiter=",")
next(csv_reader)
for row in csv_reader:
key = os.path.splitext(os.path.split(row[0])[1])[0]
metadata_index[key] = {
"Fold": int(row[1]),
"Family": row[2],
"Instrument (abbr.)": row[3],
"Instrument (in full)": row[4],
"Technique (abbr.)": row[5],
"Technique (in full)": row[6],
"Pitch": row[7],
"Pitch ID": int(row[8]),
"Dynamics": row[9],
"Dynamics ID": int(row[10]),
"Instance ID": int(row[11]),
"Resampled": (row[13] == "TRUE"),
}
if len(row[12]) > 0:
metadata_index[key]["String ID"] = int(float(row[12]))
except FileNotFoundError:
raise FileNotFoundError("Metadata not found. Did you run .download()?")
return metadata_index
@deprecated(reason="Use mirdata.datasets.tinysol.load_audio", version="0.3.4")
def load_audio(self, *args, **kwargs):
return load_audio(*args, **kwargs)
| [
"[email protected]"
] | |
30276de0b856764cc000adbae67afc15a35ee10e | 05eb31ac0f10eea0106a648d593aa3b8a9d6a021 | /manage.py | b92e7a575559aa799dae8f28ae0993800fe97238 | [] | no_license | ginlll/jelly | e632359a60dc74abff2f4188e2a9e0ec6d3bb166 | 83c582819ec1b6c3898540542999a40dcb85e7cf | refs/heads/master | 2023-02-10T10:50:31.306732 | 2019-06-04T08:26:47 | 2019-06-04T08:26:47 | 189,968,101 | 0 | 0 | null | 2023-02-02T06:28:01 | 2019-06-03T08:45:26 | Python | UTF-8 | Python | false | false | 491 | py | # coding:utf8
from flask import Flask
from flask_script import Manager,Server
from jelly import create_app, register_blueprints
app = create_app()
manager = Manager(app)
@manager.option('-H', '--host', dest='host', help='Host address', default='0.0.0.0')
@manager.option('-p', '--port', dest='port', help='Application port', default=9098)
def runserver(host, port):
register_blueprints(app)
app.run(host=host, port=int(port))
if __name__ == "__main__":
manager.run()
| [
"[email protected]"
] | |
a926ac091af5b43367dc3084bb94bac7d6152fb7 | 8569c7da7b9f0c620b4b8d5589965066651b420c | /Django/mypizzadelivery/deliveryapp/migrations/0001_initial.py | c5460a410f8a21e00a8c2e3429cefe8d23c5311c | [] | no_license | mazzalaialan/Python | e467163331bdf0c590337d03f87c9318fc062651 | 9932459d1e68b53c8d28a596f7b7ded0a181c59a | refs/heads/master | 2023-08-21T16:35:24.171067 | 2023-08-10T02:34:02 | 2023-08-10T02:34:02 | 284,333,086 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 549 | py | # Generated by Django 3.1.1 on 2020-09-23 03:51
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='PizzaModel',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=30)),
('price', models.CharField(max_length=10)),
],
),
]
| [
"[email protected]"
] | |
58d6e9df3ac68a7052137a97633751db1ec70851 | 979ee7d8b2e8228d0581760dc9f9e44c9b5e30c7 | /django/django_fundamentals/display_time/time_app/urls.py | 4aaaba0e53da1e643f5fbd1b82204cd33053c812 | [] | no_license | ameera-abed/python_stack | 6bebf0f779aac0463eafc26fdeb7b14ee3b781ee | f9f2d9b4cea2d74f716923185300896262dad425 | refs/heads/master | 2023-04-25T07:32:14.120155 | 2021-05-22T11:13:08 | 2021-05-22T11:13:08 | 364,503,433 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 97 | py | from django.urls import path
from . import views
urlpatterns = [
path('',views.index),
]
| [
"[email protected]"
] | |
1f501bc33768aeb93ff9f31762840c2cd60a5341 | 97747f25239ed5f0be3351f69ab71dce8099dac2 | /smartmontools.py | 459e4286508a932415ccaf472272c032adff0e30 | [
"MIT"
] | permissive | antifuchs/datadog-disk-divination-checks | 93e3c7d7fae54c3e5359f70b594e24f4ce771788 | 093d48b50fec538db591031098aca536bab0bfac | refs/heads/master | 2021-01-15T23:59:29.168062 | 2017-08-10T17:25:53 | 2017-08-10T17:25:53 | 99,947,852 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,397 | py | from checks import AgentCheck
import re
from datetime import datetime
import os.path
import subprocess
import string
class SMARTMonTools(AgentCheck):
def process_sensor(self, line):
table_sep_re = re.compile('\s*\|\s+')
table_fields = ['name', 'value', 'unit', 'status']
return dict(zip(table_fields, table_sep_re.split(line)))
HEADER_LINE = 'ID# ATTRIBUTE_NAME FLAG VALUE WORST THRESH TYPE UPDATED WHEN_FAILED RAW_VALUE'
def safe_device(self, name):
return string.replace(os.path.basename(name).rsplit('-sas-')[1], '-', '_')
def check_device(self, device, instance):
base_tags = [
'plain_device:'+self.safe_device(device),
'device:'+device,
'sensor:smartmontools',
]
if not os.path.exists(device):
self.service_check('smartmontools.metric_availability', AgentCheck.UNKNOWN, tags=base_tags)
return
self.service_check('smartmontools.metric_availability', AgentCheck.OK, tags=base_tags)
cmd = ['sudo', 'smartctl', '-A', device]
output = subprocess.check_output(cmd)
lines = output.splitlines()
table_regex = re.compile('\s+')
field_names = [name.lower() for name in table_regex.split(self.HEADER_LINE)]
readings = [dict(zip(field_names, table_regex.split(line.lstrip(), 10))) for line in lines[lines.index(self.HEADER_LINE):-1]]
reading_to_metricname = dict()
reading_to_alert_thresh = dict()
for gauge in instance['gauges']:
for smart_name in gauge['smart_names']:
reading_to_metricname[smart_name] = gauge['metric']
alert = {}
if 'warn_past' in gauge:
alert['warn'] = gauge['warn_past']
if 'critical_past' in gauge:
alert['critical'] = gauge['critical_past']
reading_to_alert_thresh[smart_name] = alert
for reading in readings:
if reading['attribute_name'] in reading_to_metricname:
value = float(reading['raw_value'])
metric = reading_to_metricname[reading['attribute_name']]
tags = base_tags + [
'name:'+reading['attribute_name'],
'type:'+reading['type'],
'updated:'+reading['updated'],
]
self.gauge(metric, value, tags=tags)
if reading['attribute_name'] in reading_to_alert_thresh:
thresholds = reading_to_alert_thresh[reading['attribute_name']]
check_status = AgentCheck.OK
if value > thresholds.get('critical', value):
check_status = AgentCheck.CRITICAL
if value > thresholds.get('warn', value):
check_status = AgentCheck.WARNING
self.service_check(metric, check_status, tags=tags)
return readings
def check(self, instance):
for device in instance['devices']:
self.check_device(device, instance)
if __name__ == '__main__':
check, instances = SMARTMonTools.from_yaml('/etc/dd-agent/conf.d/smartmontools.yaml')
for instance in instances:
print "\nRunning the check against instance: %s" % (instance['host'])
print check.check(instance)
| [
"[email protected]"
] | |
f104a1ce45a161fb025eaf3a7093856ec10aab50 | 6ef3c589d4aa5190ba2d14599aa62df80ca55ef7 | /torchsketch/data/dataloaders/tu_berlin/__init__.py | b86112ab9d399b59573060f3e40ac1cd44072ddd | [
"MIT"
] | permissive | zhangshuaizxc/TCN | 8e5990ab3b7cc94b28b907394a7e49ebaf3201ea | d6c1b1081f1eb2b916d52c985969052d564bb101 | refs/heads/master | 2023-03-18T23:25:39.309968 | 2020-10-11T15:57:27 | 2020-10-11T15:57:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 69 | py | from torchsketch.data.dataloaders.tu_berlin.tu_berlin import TUBerlin | [
"[email protected]"
] | |
c6d4d6a078324a7655f9ad2f64d2c17965e7167f | d8ba86afcb19fcd549b3716b8c5dfe944e6ef120 | /tools/reference.py | 1fa190f77b0845d1f03fee1708dad8ce8d045f20 | [] | no_license | ortk95/astro-tools | 6a14683b614fa4990dfbc7dd820fdc9345238a24 | 040ecca96f4e05cf7ab965e36fb1785229f6ebf7 | refs/heads/master | 2021-06-18T22:14:30.849826 | 2021-02-19T15:02:40 | 2021-02-19T15:02:40 | 179,663,144 | 3 | 2 | null | 2021-02-19T15:02:40 | 2019-04-05T10:36:26 | Python | UTF-8 | Python | false | false | 999 | py | """
Module for dealing with reference information.
"""
import bibtexparser
def load_bib(path):
"""
Load data from bibtex file.
Parameters
----------
path : str
Path to bibtex file.
Returns
-------
dict containing bibtex file information.
"""
with open(path, 'r') as f:
db = bibtexparser.load(f)
return db.entries_dict
def get_author_str(db, et_al=3):
"""
Produce author string from a bibtex file.
Parameters
----------
db : dict
load_bib() result for specific reference.
et_al : int
Number of authors to allow before 'et al.'
Returns
-------
str
"""
authors = db['author'].split(' and ')
authors = [a.split(',')[0] for a in authors]
if len(authors) > et_al:
authors = authors[0] + ' et al.'
elif len(authors) <= 2:
authors = ' and '.join(authors)
else:
authors = ', '.join(authors[:-1]) + ' and ' + authors[-1]
return authors
| [
"[email protected]"
] | |
32bef16f2bb7f09e9fe86ee453bb8741c6308919 | d75584ebe775cb113743c1482108b06b7d66c81b | /2020.03.31/servidor1.py | e6f564fe90ce1be6737e19148392b6f49e43c191 | [] | no_license | PabloLabriola/PNT2 | 0fad7f57b6af0cbac919c43536bec0d64e8ad467 | 90f30d50296e6e84bcd556571669bbf375e206ef | refs/heads/master | 2022-06-27T20:50:37.783833 | 2020-04-28T23:44:00 | 2020-04-28T23:44:00 | 261,536,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,416 | py | from flask import Flask, request, Response
app = Flask(__name__)
@app.route('/') # Por default es GET
def saludar():
return 'Hola mundo'
@app.route('/', methods=['POST'])
def saludar_servidor():
print('me saludan')
return 'Salude'
@app.route('/', methods=['DELETE'])
def borrar():
print('me borran')
return 'Borrado'
@app.route('/', methods=['PUT'])
def modificar():
print('me modificaron')
return 'Modificado'
@app.route('/mezclar', methods=['GET', 'POST'])
def mezcla():
print('El cliente me pidio un : ', request.method)
if request.method == 'GET':
return 'Es un get\n'
return 'Es un post\n'
@app.route('/arg')
def recibir_argumentos():
print(request.args)
if 'param1' in request.args: # Busca param1 en las claves del diccionario
print('El param1 es: ' + request.args['param1'])
return 'Recibido'
@app.route('/arg/<param1>/<param2>')
def recibir_amistoso(param1, param2):
print('Recibi ' + param1)
print('Recibi ' + param2)
return 'ok'
@app.route('/clientes', methods=['POST'])
def recibir_form():
print(request.form)
if 'apellido' in request.form:
print('El apellido es ' + request.form['apellido'])
return Response('Cliente creado', status=201)
return Response('Falta apellido', status=400)
app.run()
| [
"[email protected]"
] | |
12bd2df28e3b95e8a3691692897d495b319e6b3c | 3c000380cbb7e8deb6abf9c6f3e29e8e89784830 | /venv/Lib/site-packages/cobra/modelimpl/cloud/appgwstatsag1mo.py | 624f1fef4c740619187e613162dca038053a6aa6 | [] | no_license | bkhoward/aciDOM | 91b0406f00da7aac413a81c8db2129b4bfc5497b | f2674456ecb19cf7299ef0c5a0887560b8b315d0 | refs/heads/master | 2023-03-27T23:37:02.836904 | 2021-03-26T22:07:54 | 2021-03-26T22:07:54 | 351,855,399 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 35,558 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2020 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class AppGwStatsAg1mo(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = StatsClassMeta("cobra.model.cloud.AppGwStatsAg1mo", "Azure ALB front end stats")
counter = CounterMeta("CurrentConnections", CounterCategory.COUNTER, "count", "Azure LB existing connections")
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "CurrentConnectionsCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "CurrentConnectionsPer"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "CurrentConnectionsSpct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "CurrentConnectionsThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "CurrentConnectionsTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "CurrentConnectionsTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "CurrentConnectionsRate"
meta._counters.append(counter)
counter = CounterMeta("FailedRequests", CounterCategory.COUNTER, "count", "Azure LB Failed Requests")
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "FailedRequestsCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "FailedRequestsPer"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "FailedRequestsSpct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "FailedRequestsThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "FailedRequestsTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "FailedRequestsTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "FailedRequestsRate"
meta._counters.append(counter)
counter = CounterMeta("Throughput", CounterCategory.COUNTER, "count", "Azure LB Total Throughput")
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "ThroughputCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "ThroughputPer"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "ThroughputSpct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "ThroughputThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "ThroughputTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "ThroughputTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "ThroughputRate"
meta._counters.append(counter)
counter = CounterMeta("TotalRequests", CounterCategory.COUNTER, "count", "Azure Native LB requests count")
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "TotalRequestsCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "TotalRequestsPer"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "TotalRequestsSpct"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "TotalRequestsThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "TotalRequestsTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "TotalRequestsTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "TotalRequestsRate"
meta._counters.append(counter)
meta.moClassName = "cloudAppGwStatsAg1mo"
meta.rnFormat = "CDcloudAppGwStatsAg1mo"
meta.category = MoCategory.STATS_CURRENT
meta.label = "current aggregated Azure ALB front end stats stats in 1 month"
meta.writeAccessMask = 0x1
meta.readAccessMask = 0x1
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.parentClasses.add("cobra.model.fv.Tenant")
meta.parentClasses.add("cobra.model.fv.Ctx")
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.stats.Curr")
meta.superClasses.add("cobra.model.cloud.AppGwStatsAg")
meta.rnPrefixes = [
('CDcloudAppGwStatsAg1mo', False),
]
prop = PropMeta("str", "CurrentConnectionsCum", "CurrentConnectionsCum", 55389, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "Azure LB existing connections cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("CurrentConnectionsCum", prop)
prop = PropMeta("str", "CurrentConnectionsPer", "CurrentConnectionsPer", 55390, PropCategory.IMPLICIT_PERIODIC)
prop.label = "Azure LB existing connections periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("CurrentConnectionsPer", prop)
prop = PropMeta("str", "CurrentConnectionsRate", "CurrentConnectionsRate", 55395, PropCategory.IMPLICIT_RATE)
prop.label = "Azure LB existing connections rate"
prop.isOper = True
prop.isStats = True
meta.props.add("CurrentConnectionsRate", prop)
prop = PropMeta("str", "CurrentConnectionsSpct", "CurrentConnectionsSpct", 55391, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Azure LB existing connections suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("CurrentConnectionsSpct", prop)
prop = PropMeta("str", "CurrentConnectionsThr", "CurrentConnectionsThr", 55392, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Azure LB existing connections thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("CurrentConnectionsThr", prop)
prop = PropMeta("str", "CurrentConnectionsTr", "CurrentConnectionsTr", 55394, PropCategory.IMPLICIT_TREND)
prop.label = "Azure LB existing connections trend"
prop.isOper = True
prop.isStats = True
meta.props.add("CurrentConnectionsTr", prop)
prop = PropMeta("str", "CurrentConnectionsTrBase", "CurrentConnectionsTrBase", 55393, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "Azure LB existing connections trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("CurrentConnectionsTrBase", prop)
prop = PropMeta("str", "FailedRequestsCum", "FailedRequestsCum", 55423, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "Azure LB Failed Requests cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("FailedRequestsCum", prop)
prop = PropMeta("str", "FailedRequestsPer", "FailedRequestsPer", 55424, PropCategory.IMPLICIT_PERIODIC)
prop.label = "Azure LB Failed Requests periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("FailedRequestsPer", prop)
prop = PropMeta("str", "FailedRequestsRate", "FailedRequestsRate", 55429, PropCategory.IMPLICIT_RATE)
prop.label = "Azure LB Failed Requests rate"
prop.isOper = True
prop.isStats = True
meta.props.add("FailedRequestsRate", prop)
prop = PropMeta("str", "FailedRequestsSpct", "FailedRequestsSpct", 55425, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Azure LB Failed Requests suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("FailedRequestsSpct", prop)
prop = PropMeta("str", "FailedRequestsThr", "FailedRequestsThr", 55426, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Azure LB Failed Requests thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("FailedRequestsThr", prop)
prop = PropMeta("str", "FailedRequestsTr", "FailedRequestsTr", 55428, PropCategory.IMPLICIT_TREND)
prop.label = "Azure LB Failed Requests trend"
prop.isOper = True
prop.isStats = True
meta.props.add("FailedRequestsTr", prop)
prop = PropMeta("str", "FailedRequestsTrBase", "FailedRequestsTrBase", 55427, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "Azure LB Failed Requests trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("FailedRequestsTrBase", prop)
prop = PropMeta("str", "ThroughputCum", "ThroughputCum", 55457, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "Azure LB Total Throughput cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("ThroughputCum", prop)
prop = PropMeta("str", "ThroughputPer", "ThroughputPer", 55458, PropCategory.IMPLICIT_PERIODIC)
prop.label = "Azure LB Total Throughput periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("ThroughputPer", prop)
prop = PropMeta("str", "ThroughputRate", "ThroughputRate", 55463, PropCategory.IMPLICIT_RATE)
prop.label = "Azure LB Total Throughput rate"
prop.isOper = True
prop.isStats = True
meta.props.add("ThroughputRate", prop)
prop = PropMeta("str", "ThroughputSpct", "ThroughputSpct", 55459, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Azure LB Total Throughput suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("ThroughputSpct", prop)
prop = PropMeta("str", "ThroughputThr", "ThroughputThr", 55460, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Azure LB Total Throughput thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("ThroughputThr", prop)
prop = PropMeta("str", "ThroughputTr", "ThroughputTr", 55462, PropCategory.IMPLICIT_TREND)
prop.label = "Azure LB Total Throughput trend"
prop.isOper = True
prop.isStats = True
meta.props.add("ThroughputTr", prop)
prop = PropMeta("str", "ThroughputTrBase", "ThroughputTrBase", 55461, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "Azure LB Total Throughput trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("ThroughputTrBase", prop)
prop = PropMeta("str", "TotalRequestsCum", "TotalRequestsCum", 55491, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "Azure Native LB requests count cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("TotalRequestsCum", prop)
prop = PropMeta("str", "TotalRequestsPer", "TotalRequestsPer", 55492, PropCategory.IMPLICIT_PERIODIC)
prop.label = "Azure Native LB requests count periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("TotalRequestsPer", prop)
prop = PropMeta("str", "TotalRequestsRate", "TotalRequestsRate", 55497, PropCategory.IMPLICIT_RATE)
prop.label = "Azure Native LB requests count rate"
prop.isOper = True
prop.isStats = True
meta.props.add("TotalRequestsRate", prop)
prop = PropMeta("str", "TotalRequestsSpct", "TotalRequestsSpct", 55493, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Azure Native LB requests count suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("TotalRequestsSpct", prop)
prop = PropMeta("str", "TotalRequestsThr", "TotalRequestsThr", 55494, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Azure Native LB requests count thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("TotalRequestsThr", prop)
prop = PropMeta("str", "TotalRequestsTr", "TotalRequestsTr", 55496, PropCategory.IMPLICIT_TREND)
prop.label = "Azure Native LB requests count trend"
prop.isOper = True
prop.isStats = True
meta.props.add("TotalRequestsTr", prop)
prop = PropMeta("str", "TotalRequestsTrBase", "TotalRequestsTrBase", 55495, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "Azure Native LB requests count trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("TotalRequestsTrBase", prop)
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
# Deployment Meta
meta.deploymentQuery = True
meta.deploymentType = "Ancestor"
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvTenantToHcloudIgw", "Tenant to IGW", "cobra.model.hcloud.SecurityGroup"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvTenantToHcloudSecurityGroup", "Tenant to Security Group", "cobra.model.hcloud.SecurityGroup"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvTenantToVzCPIf", "Tenant to vzCPIf", "cobra.model.vz.CPIf"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvCtxToHcloudIgw", "From fv:Ctx to hcloud:Igw", "cobra.model.hcloud.Igw"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvCtxToHcloudVgw", "From fv:Ctx to hcloud:Vgw", "cobra.model.hcloud.Vgw"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvTenantToVzFilter", "From fvTenant to vzFilter", "cobra.model.vz.Filter"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvTenantToVnsAbsGraph", "From fvTenant to vnsAbsGraph", "cobra.model.vns.AbsGraph"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvTenantToCloudLB", "From fvTenant to cloudLB", "cobra.model.cloud.LB"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvTenantToCloudZone", "From fvTenant to cloudZone", "cobra.model.cloud.Zone"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvCtxToCloudExtEPg", "From fvCtx (VRF) to cloudExtEPg", "cobra.model.cloud.ExtEPg"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("TenantToCloudCtxProfile", "Tenant to cloudCtxProfile", "cobra.model.cloud.CtxProfile"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("TenantToVzBrCP", "Tenant to vzBrCP", "cobra.model.vz.BrCP"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("TenantToHcloudCsr", "Tenant to hcloudCsr", "cobra.model.hcloud.Csr"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("TenantToCloudExtEPg", "fv:Tenant to cloud:ExtEPg", "cobra.model.cloud.ExtEPg"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("TenantToCloudRegion", "From fvTenant to cloudRegion", "cobra.model.cloud.Region"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvCtxToCloudRegion", "From fvCtx (VRF) to CloudRegion", "cobra.model.cloud.Region"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvCtxToHcloudCsr", "From fvCtx (VRF) to hcloudCsr (CSR)", "cobra.model.hcloud.Csr"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("TenantToHcloudRegion", "Tenant to hcloudRegion", "cobra.model.hcloud.Region"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvTenantToFvCtx", "fvTenant to fvCtx", "cobra.model.fv.Ctx"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("TenantToHcloudCtx", "Tenant to Hcloud context", "cobra.model.hcloud.Ctx"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvCtxToHCloudEndPoint", "From fvCtx (VRF) to hcloud:EndPoint", "cobra.model.hcloud.EndPoint"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvCtxToHCloudCtx", "From fvCtx (VRF) to hcloudCtx (VPC)", "cobra.model.hcloud.Ctx"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvCtxToCloudCtxProfile", "From fvCtx (VRF) to cloudCtxProfile", "cobra.model.cloud.CtxProfile"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("FvCtxToCloudEPg", "From fvCtx (VRF) to cloud EPg", "cobra.model.cloud.EPg"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("TenantToHCloudEndPoint", "Tenant to hcloudEndPoint", "cobra.model.hcloud.EndPoint"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("TenantToCloudApp", "Tenant to Application profile", "cobra.model.cloud.App"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("TenantToCloudEPg", "Tenant to cloud EPg", "cobra.model.cloud.EPg"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("CtxToRegion", "Vrf to cloud Region", "cobra.model.cloud.Region"))
meta.deploymentQueryPaths.append(DeploymentPathMeta("CtxToNwIf", "Private Network to Interface", "cobra.model.nw.If"))
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
a585dce6aaf7faf5cec4990054ca90abc24548d0 | 4561a7c614f64113def3627c47adc4e8aa352f49 | /main.py | b3baed5e7b483ee930a15c63a725cf4e1cce03ff | [] | no_license | warmth88/Kiva.org-data-collection | a1aabca0109fcfe47dbd4e16d1f7c7cd8b1fc6cc | 18a0f4f3b9b22a46502287841de47e7b036858f2 | refs/heads/master | 2021-01-13T08:05:18.263010 | 2016-10-24T15:03:36 | 2016-10-24T15:03:36 | 71,745,651 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,735 | py | #!/usr/bin/python
#
# This script get loans general data borrowers and lender's details from Kiva.com and sort it. The output format is CSV.
#
# Code written by Huanxin Wu for Prof. Yao's project.
#
# Version: 1.0
# Update history: 2012-3-15, 19:34PM-22:12PM
# 2012-3-16, 11:54AM-16:22PM
# 2012-3-16, 20:12PM-22:07PM
# 2012-3-17, 13:25PM-16:12PM
# 2012-3-17, 19:20PM-21:50PM
# 2012-3-18, 16:04PM-21:34PM
# 2012-3-19, 10:30AM-12:10PM
# 2012-3-19, 13:30PM-16:23PM
# 2012-3-19, 19:03PM-20:32PM
# 2012-3-20, 13:12PM-17:23PM
# 2012-3-21, 19:53PM-22:01PM
import urllib
import json
import time
import csv
from borrower import *
from lender import *
import os
#******************** This part is for generating the borrower_list.********************
# 1. Number of loans
# 2. The popularity ranking of loans
# 3. For each loan
# a. Loan ID
# b. The percentage has been raised
# c. Total amount of loan requests
# d. If it is a group, the number of group members
# get total number of pages, page size and total loan number
fetch=urllib.urlopen("http://api.kivaws.org/v1/loans/search.json?sort_by=popularity&status=fundraising")
fetch=json.loads(fetch.read())
total_pages=fetch['paging']['pages']
page_size=fetch['paging']['page_size']
total_loan=fetch['paging']['total']
# create CSV files and set default parameters
rank=1
loan_ID=0
name=name='borrower_list_'+time.strftime('%y')+time.strftime('%m')+time.strftime('%d')+time.strftime('%H')+'.csv'
prefix='/Users/huanxin/data_fetching/data/'
borrower_list=csv.writer(open(prefix+name,'w'),delimiter=',')
initial=['Ranking','LoanID','PercentageFunded','Amount','NumberOfMembers']
borrower_list.writerow(initial)
# scanning the whole list of loans
for i in xrange(total_pages):
fetch=urllib.urlopen("http://api.kivaws.org/v1/loans/search.json?sort_by=popularity"+"&status=fundraising"+"&page="+str(i+1))
fetch=json.loads(fetch.read())
for j in fetch['loans']:
loan_ID=j['id']
borrower_list.writerow([rank,loan_ID,'{:.2%}'.format(float(j['funded_amount'])/j['loan_amount']),j['loan_amount'],j['borrower_count']])
rank+=1
print i,'/',total_pages
print 'done successfully!'
#****************************************************************************************
"""
#******************** This part is for generating the borrower_page.*********************
loan_ID=0
borrower_page=csv.writer(open('borrower_page.csv','w'),delimiter=',')
# scanning the whole list of loans
for i in xrange(total_pages):
fetch=urllib.urlopen("http://api.kivaws.org/v1/loans/search.json?sort_by=popularity"+"&status=fundraising"+"&page="+str(i+1))
fetch=json.loads(fetch.read())
for j in xrange(page_size):
k=fetch['loans'][j]
loan_ID=k['id']
borrower_list.writerow(borrower(ID))
print i,'/',total_pages
#****************************************************************************************
#******************** This part is for generating the lender_page.***********************
lender_ID=''
lender_page=csv.writer(open('lender_page.csv','w'),delimiter=',')
# scanning all the whole list of lenders.
fetch=urllib.urlopen("http://api.kivaws.org/v1/lenders/search.json?")
fetch=json.loads(fetch.read())
total_pages=fetch['paging']['pages']
page_size=fetch['paging']['page_size']
for i in xrange(total_pages):
fetch=urllib.urlopen("http://api.kivaws.org/v1/lenders/search.json?page="+str(i+1))
fetch=json.loads(fetch.read())
for j in xrange(page_size):
k=fetch['lenders'][j]
lender_ID=k['lender_id']
lender_page.writerow(lenders(lender_ID))
print i,'/',total_pages
#****************************************************************************************
"""
| [
"[email protected]"
] | |
9ac99583682707ce42b410dce70f47eb3166e25d | 528f15ea76b605ae343adb068cac2704ba0ced1d | /db_services.py | 683723062505ae10b9ee0e53fa660333beaabfd2 | [] | no_license | allwinwilliams/PythonWebCrawler | c6e39236a688327ae0e5e5a9da3e5516cc8f010a | 3d89f7ab2f6c676af7a78dd38aeb1b7e0de28ef3 | refs/heads/master | 2021-09-06T21:15:50.336215 | 2018-02-11T14:08:07 | 2018-02-11T14:08:07 | 114,160,373 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,082 | py | from neo4j.v1 import GraphDatabase
import json
"""
.. module:: module interfacing with database
.. note:: used to store and retrive based on constrains
.. moduleauthor:: Allwin Williams <[email protected]>
"""
def close(self):
self._driver.close()
def classify_and_store(x):
pass
def get_categories(tx):
print "::::categories::::"
for result in tx.run("MATCH (c:Category) RETURN c.name"):
print result["c.name"]
def get_articles(tx, topics=[],q=""):
print ":::::articles:::::"
for result in tx.run("MATCH (a:Article) RETURN a"):
print result["a"]
def insert_article(tx, article):
tx.run("CREATE (a: Article {title:" +article.title+ ", url: "+article.url+ ", content:" +article.content+ " }) RETURN a")
def insert_category(tx, category):
tx.run("CREATE (c: Category {name: %S }) RETURN a" %category)
# driver = GraphDatabase.driver("bolt://localhost:7687", auth=("neo4j", "password"))
# with driver.session() as session:
# session.read_transaction(get_articles)
# session.read_transaction(get_categories)
| [
"[email protected]"
] | |
32b60736d8f63b4069a29b253ad0a0c167eccd34 | cf025ea3bf079748472557304a290593c753b884 | /Algorithm/SWEA/6297_자료구조_리스트,튜플_20.py | 2fc050e1f50c8852119f65a7407f301f011d9d79 | [] | no_license | Silentsoul04/my_software_study | 7dbb035ceea74f42c7ce2051b2320f6cae75ed88 | c27d33c57f59fe5244a1041c11bbd826dd481546 | refs/heads/master | 2023-03-29T02:43:40.861045 | 2019-07-10T08:09:55 | 2019-07-10T08:09:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 373 | py | # 콤마 (,)로 구분해 숫자를 입력하고, 입력된 숫자 중 홀수를 콤마(,)로 구분해 출력하는
# 리스트 내포 기능을 이용한 프로그램을 작성하십시오.
lis = input('').split(', ')
lis = list(map(int, lis))
lis = [a for a in lis if a % 2 != 0]
for i in lis:
if i != lis[-1]:
print(i, end=', ')
else:
print(i)
| [
"[email protected]"
] | |
a5a88d0d34300af6910c517ead1770df36c249c7 | d70d12d0a794ce7419cd08f4a3d09c142a56a72d | /Website/hockey_bet.py | 9d294608dd9f108423dbff3e27c271efc9a920e0 | [] | no_license | JTMachen/Sponsio-Computatum | 367d5c6d89ae0daf0ab5797e326a3fa2d3590187 | 12149fe153408e1ecfe1f414ea02bacc054127a4 | refs/heads/master | 2020-12-18T14:05:10.342478 | 2020-01-25T19:30:28 | 2020-01-25T19:30:28 | 235,410,744 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,027 | py | # Import required libraries
from datetime import datetime
from path import Path
import pandas as pd
from bs4 import BeautifulSoup as soup
from urllib.request import Request, urlopen as Ureq
import json
import warnings
warnings.filterwarnings('ignore')
def hockey_bet():
# Pull in url for schedule
# TODO: Check date, and if it is not during the season, exit function
url = 'https://www.hockey-reference.com/leagues/NHL_2020_games.html'
# Run through BeautifulSoup steps
uClient = Ureq(url)
raw_content = uClient.read()
page_soup = soup(raw_content, "html.parser")
html = list(page_soup.children)[3]
game = html.findAll(class_ = 'left')
game = [team.get_text() for team in game]
drop_list = ['Date','Visitor','Home','Notes','']
# Clean data
game = [game for game in game if game not in drop_list]
bin_len = 3
start = 0
end = start + bin_len
week_list = []
while end < (len(game) + 1):
week = game[start:end]
start = end
end = start + bin_len
week_list.append(week)
df = pd.DataFrame(week_list)
df.columns = ['Date','Visitor','Home']
# Clean team names into readable format
row_count = 0
visitor = df['Visitor'].str.split(" ", expand = True)
home = df['Home'].str.split(" ", expand = True)
while row_count < len(df):
if visitor[2][row_count] == None:
df['Visitor'][row_count] = visitor[1][row_count]
elif visitor[2][row_count] != None:
df['Visitor'][row_count] = visitor[2][row_count]
if home[2][row_count] == None:
df['Home'][row_count] = home[1][row_count]
elif home[2][row_count] != None:
df['Home'][row_count] = home[2][row_count]
row_count += 1
# Only select todays games
todays_date = datetime.now().strftime('%Y-%m-%d')
todays_games = df[df['Date'] == todays_date]
todays_games = todays_games.reset_index()
todays_games = todays_games[['Visitor','Home']]
return todays_games | [
"[email protected]"
] | |
61bb2ac703e693d0563de45b7bf8c0198ad19aff | 1ab15365c9a262c2d7d27d1df62f4bbb956c86c5 | /05. Capstone- Retrieving- Processing- and Visualizing Data with Python/PageRank/sprank.py | b9ab5a258ef639906d9c6364c0fb05bb8a341662 | [] | no_license | machinelearningxl/Coursera-Python-for-Everybody | c6162f6e1bbbd5b9d2249ad4a0b45f3d38a86bf3 | fa01a9967838c11332585d2eaae68fd44865037e | refs/heads/master | 2021-04-06T10:21:54.483178 | 2018-12-17T00:45:03 | 2018-12-17T00:45:03 | 125,256,652 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,173 | py | # @Author: Antero Maripuu Github:<machinelearningxl>
# @Date : 2018-12-17 00:10
# @Email: [email protected]
# @Project: Coursera
# @Filename : sprank.py.py
import sqlite3
conn = sqlite3.connect('spider.sqlite')
cur = conn.cursor()
# Find the ids that send out page rank - we only are interested
# in pages in the SCC that have in and out links
cur.execute('''SELECT DISTINCT from_id FROM Links''')
from_ids = list()
for row in cur:
from_ids.append(row[0])
# Find the ids that receive page rank
to_ids = list()
links = list()
cur.execute('''SELECT DISTINCT from_id, to_id FROM Links''')
for row in cur:
from_id = row[0]
to_id = row[1]
if from_id == to_id: continue
if from_id not in from_ids: continue
if to_id not in from_ids: continue
links.append(row)
if to_id not in to_ids: to_ids.append(to_id)
# Get latest page ranks for strongly connected component
prev_ranks = dict()
for node in from_ids:
cur.execute('''SELECT new_rank FROM Pages WHERE id = ?''', (node,))
row = cur.fetchone()
prev_ranks[node] = row[0]
sval = input('How many iterations:')
many = 1
if len(sval) > 0:
many = int(sval)
# Sanity check
if len(prev_ranks) < 1:
print("Nothing to page rank. Check data.")
quit()
# Lets do Page Rank in memory so it is really fast
for i in range(many):
# print prev_ranks.items()[:5]
next_ranks = dict();
total = 0.0
for (node, old_rank) in prev_ranks.items():
total = total + old_rank
next_ranks[node] = 0.0
# print total
# Find the number of outbound links and sent the page rank down each
for (node, old_rank) in prev_ranks.items():
# print node, old_rank
give_ids = list()
for (from_id, to_id) in links:
if from_id != node: continue
# print ' ',from_id,to_id
if to_id not in to_ids: continue
give_ids.append(to_id)
if len(give_ids) < 1:
continue
amount = old_rank / len(give_ids)
# print node, old_rank,amount, give_ids
for id in give_ids:
next_ranks[id] = next_ranks[id] + amount
newtot = 0
for (node, next_rank) in next_ranks.items():
newtot = newtot + next_rank
evap = (total - newtot) / len(next_ranks)
# print newtot, evap
for node in next_ranks:
next_ranks[node] = next_ranks[node] + evap
newtot = 0
for (node, next_rank) in next_ranks.items():
newtot = newtot + next_rank
# Compute the per-page average change from old rank to new rank
# As indication of convergence of the algorithm
totdiff = 0
for (node, old_rank) in prev_ranks.items():
new_rank = next_ranks[node]
diff = abs(old_rank - new_rank)
totdiff = totdiff + diff
avediff = totdiff / len(prev_ranks)
print(i + 1, avediff)
# rotate
prev_ranks = next_ranks
# Put the final ranks back into the database
print(next_ranks.items()[:5])
cur.execute('''UPDATE Pages SET old_rank=new_rank''')
for (id, new_rank) in next_ranks.items():
cur.execute('''UPDATE Pages SET new_rank=? WHERE id=?''', (new_rank, id))
conn.commit()
cur.close()
| [
"[email protected]"
] | |
561ff564fb0977c66e58acd7e4b919a5614a91d2 | f424c4aca9564ecbc345ef569bb54366a25f569b | /soundex.py | 6ebaaa4c910683add38232d427a22549b4ca96a0 | [] | no_license | dploy-team/ofac-search | a1460f9b92183cf647745a8fb2756923456b0d13 | 42eaecc37efb57e148b9f0215db10c514faa26db | refs/heads/master | 2023-05-13T21:58:13.563064 | 2019-09-20T02:21:13 | 2019-09-20T02:21:13 | 208,820,031 | 2 | 1 | null | 2023-05-01T21:15:25 | 2019-09-16T14:26:14 | Python | UTF-8 | Python | false | false | 2,286 | py | def Soundex(data):
result = ""
if (data):
previousCode = ""
currentCode = ""
result = result + data[0].upper()
for i in range(1, len(data)):
currentCode = EncodeChar(data[i])
if (currentCode != previousCode):
result = result + currentCode
if (len(result) == 4):
break
if (currentCode != ""):
previousCode = currentCode
if (len(result) < 4):
result = result + ('0' * (4 - len(result)))
return result
def EncodeChar(c):
c = c.lower()
if (c in ['b', 'f', 'p', 'v']):
return "1"
if (c in ['c', 'g', 'j', 'k', 'q', 's', 'x', 'z']):
return "2"
if (c in ['d', 't']):
return "3"
if (c in ['l']):
return "4"
if (c in ['m', 'n']):
return "5"
if (c in ['r']):
return "6"
return ""
def Difference(data1, data2):
result = 0
if (data1 == "" or data2 == ""):
return 0
soundex1 = Soundex(data1)
soundex2 = Soundex(data2)
if (soundex1 == soundex2):
result = 4
else:
if (soundex1[0] == soundex2[0]):
result = 1
sub1 = soundex1[1:3]
if (Index(soundex2, sub1) > -1):
result += 3
return result
sub2 = soundex1[2:2]
if(Index(soundex2, sub2) > -1):
result += 2
return result
sub3 = soundex1[1:2]
if (Index(soundex2, sub3) > -1):
result += 2
return result
sub4 = soundex1[1]
if (Index(soundex2, sub4) > -1):
result += 1
sub5 = soundex1[2]
if (Index(soundex2, sub5) > -1):
result += 1
sub6 = soundex1[3]
if (Index(soundex2, sub6) > -1):
result += 1
return result
def ParsedDifference(data1, data2):
result = Difference(data1, data2)
if (result == 1):
return 0
if (result == 2):
return 0.333333
if (result == 3):
return 0.666667
if (result == 4):
return 1
def Index(string, sub):
try:
return string.index(sub)
except ValueError as e:
return -1
if __name__ == "__main__":
print(ParsedDifference('OREJUELA', 'SERCUBA'))
| [
"[email protected]"
] | |
ae40e4e4a638cc46946cb2d4bf4cfbebed4a2069 | 685872d988dc063c68a869b3b3dedfdc2d22775f | /Keg 2 p11.py | 78c85cd09d49b3983a1289ebddb775aaccedb9f3 | [] | no_license | L200180167/Praktikum_Algopro | 2b1fb811ae32710659a6424c55700dd5648f6e9c | bd0b325075689687388545e3e55e1986b817c4d5 | refs/heads/master | 2020-04-06T19:05:29.917344 | 2018-12-23T01:28:09 | 2018-12-23T01:28:09 | 157,725,271 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,679 | py | from Tkinter import *
calc = Tk(className="Kalkulator Sederhana")
L1 = Label(calc, text='Angka 1', font="Times 14")
L1.place(x=10, y=10)
L2 = Label(calc, text='Angka 2', font="Times 14")
L2.place(x=10, y=60)
E1 = Entry(calc, justify=RIGHT, font="Times 14")
E1.place(x=90, y=10, height=35, width=180)
E2 = Entry(calc, justify=RIGHT, font="Times 14")
E2.place(x=90, y=60, height=35, width=180)
def tambah():
"menjumlahkan dua bilangan"
bil1 = float(E1.get())
bil2 = float(E2.get())
hasil = bil1 + bil2
L4.config(text=hasil)
def kurang():
"menjumlahkan dua bilangan"
bil1 = float(E1.get())
bil2 = float(E2.get())
hasil = bil1 - bil2
L4.config(text=hasil)
def kali():
"menjumlahkan dua bilangan"
bil1 = float(E1.get())
bil2 = float(E2.get())
hasil = bil1 * bil2
L4.config(text=hasil)
def bagi():
"menjumlahkan dua bilangan"
bil1 = float(E1.get())
bil2 = float(E2.get())
hasil = bil1 / bil2
L4.config(text=hasil)
B1 = Button(calc, text="+", command=tambah, font="Times 14")
B1.place(x=40, y=110, height=30, width=30)
B2 = Button(calc, text="-", command=kurang, font="Times 14")
B2.place(x=95, y=110, height=30, width=30)
B3 = Button(calc, text="x", command=kali, font="Times 14")
B3.place(x=150, y=110, height=30, width=30)
B4 = Button(calc, text=":", command=bagi, font="Times 14")
B4.place(x=205, y=110, height=30, width=30)
L3 = Label(calc, text='Hasil', font="Times 14")
L3.place(x=20, y=170)
L4 = Label(calc, font="Times 14", justify=CENTER, relief=RIDGE)
L4.place(x=90, y=165, height=35, width=180)
calc.mainloop()
| [
"[email protected]"
] | |
3307638c5f7d810e5eb2f680210a4550baeccb1e | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_130/109.py | 299fb5c1eb010bc2a3f47d88949f82537fdcd7a1 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 680 | py | #!/usr/bin/python2
#-*- coding: utf-8 -*-
import math
def readint(): return int(raw_input())
def readarray(f): return map(f, raw_input().split())
def can_win(N, n):
return int(math.log(2**N - n, 2))
def can_lose(N, n):
return int(math.log(n+1, 2))
for test in range(readint()):
print 'Case #%i:'%(test+1),
N, P = readarray(int)
if P == 2**N:
print 2**N-1, 2**N-1
continue
max_ones_low = int(math.log(P, 2))
min_ones_high = N - int(math.log(2**N - P, 2))
goal_lose = min_ones_high
goal_win = N - max_ones_low
guaranteed = 2**goal_lose - 2 if goal_lose > 0 else 0
could = 2**N - (2**goal_win if goal_win > 0 else 0)
print guaranteed, could
| [
"[email protected]"
] | |
d12d74276acfd467a1c127c96f33fb575a541490 | 751cf52d62dba7d88387fc5734d6ee3954054fc2 | /romeo/robot/conv.py | 7580d5f5f3b7a223ca160fc7aea2f9066c60765d | [
"MIT"
] | permissive | nooralight/lab-computer-vision | 70a4d84a47a14dc8f5e9796ff6ccb59d4451ff27 | 0c3d89b35d0468d4d3cc5ce2653b3f0ac82652a9 | refs/heads/master | 2023-03-17T12:45:22.700237 | 2017-07-11T22:17:09 | 2017-07-11T22:17:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,469 | py | ###############################################################################
# Author: Daniil Budanov
# Contact: [email protected]
# Summer Internship - 2016
###############################################################################
# Title: conv.py
# Project: Romeo Robot
# Description:
# useful conversion macros
# convert distance <-> ticks <-> rotation angle
# Last Modified: 7.13.2016
###############################################################################
"""Module of Spacial, Angular, and Tick conversions macros"""
# Number of ticks is int
# Distance in Meters
# Angle in Radians
import math
DIAM = .042 # 42 mm wheels
TPR = 70 # number of ticks per revolution on encoder
AXWID = .09 # axle width; distance between wheel centers
### Spacial <-> Ticks
# convert from distance to number of edges
def dist2tick(dist):
ticks = dist * TPR / (math.pi * DIAM)
return ticks
# convert number of edges to distance
def tick2dist(tick):
dist = DIAM * math.pi * tick / TPR
return dist
### Rotation <-> Ticks
## single wheel rotation
# radians to edges
def ang2tick_s(ang):
ticks = dist2tick(ang * AXWID)
return ticks
# edges to radians
def tick2ang_s(ticks):
ang = ticks2dist(ticks) / AXWID
return ang
## point rotation
# radians to edges
def ang2tick_p(ang):
ticks = dist2tick(ang * AXWID/2)
return ticks
# edges to radians
def tick2ang_p(ticks):
ang = ticks2dist(ticks) / (AXWID/2)
return ang
| [
"[email protected]"
] | |
554b941fc452621f4a78ebcd0eedaf68ba6c57ba | 80caf973742bed3283b3516cf1e186dbee30259c | /tests/test_main.py | 4d02245a111da036d5bf41531087d1cc1149b2af | [] | no_license | michalkurdziel/sudoku | 6f8a3384c11d4d0b573f5067d42ecf7ecb044d63 | 2a22af4a74dc0730f8ff1c0ecddbb8cf5215b44d | refs/heads/master | 2020-05-19T00:06:42.411586 | 2020-02-24T09:34:42 | 2020-02-24T09:34:42 | 184,728,143 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,191 | py | from unittest import TestCase
from unittest import mock
from src.consts import puzzle
from src.consts import puzzle_results
from src.main import Cell
from src.main import CellList
from src.main import get_border
FULL_SET = [x for x in range(1, 10)]
TEST_MATRIX = [
[0, 3, 6, 0, 4, 7, 5, 2, 0],
[0, 4, 0, 6, 2, 5, 0, 0, 8],
[0, 0, 0, 3, 1, 0, 0, 7, 0],
[0, 1, 0, 5, 0, 6, 7, 0, 0],
[3, 0, 0, 0, 0, 0, 0, 0, 5],
[0, 0, 5, 7, 0, 4, 0, 8, 0],
[0, 2, 0, 0, 6, 8, 0, 0, 0],
[4, 0, 0, 2, 5, 3, 0, 9, 0],
[0, 5, 9, 4, 7, 0, 6, 3, 0]
]
CORRECT_MATRIX = [
[1, 3, 6, 8, 4, 7, 5, 2, 9],
[9, 4, 7, 6, 2, 5, 3, 1, 8],
[5, 8, 2, 3, 1, 9, 4, 7, 6],
[2, 1, 8, 5, 9, 6, 7, 4, 3],
[3, 7, 4, 1, 8, 2, 9, 6, 5],
[6, 9, 5, 7, 3, 4, 2, 8, 1],
[7, 2, 3, 9, 6, 8, 1, 5, 4],
[4, 6, 1, 2, 5, 3, 8, 9, 7],
[8, 5, 9, 4, 7, 1, 6, 3, 2]
]
class TestGrubAllvalues(TestCase):
def setUp(self):
self.cells = CellList(TEST_MATRIX)
def tearDown(self):
self.cells = None
def test_get_values_from_row(self):
expected_results = [4, 6, 2, 5, 8]
results = self.cells.get_values_from_row(1)
assert expected_results == results, 'Expected {} but results is {}'.format(expected_results, results)
def test_get_values_from_column(self):
expected_results = [6, 5, 9]
results = self.cells.get_values_from_column(2)
assert expected_results == results, 'Expected {} but results is {}'.format(expected_results, results)
def test_get_first_small_square_indexes(self):
expected_results = [3, 6, 4]
self.cells1 = CellList(TEST_MATRIX)
values_in_square = self.cells1.get_values_in_small_square(0, 0)
self.assertListEqual(values_in_square, expected_results)
def test_get_fourth_small_square_indexes(self):
expected_results = [5, 6, 7, 4]
results = self.cells.get_values_in_small_square(4, 4)
assert expected_results == results, 'Expected {} but results is {}'.format(expected_results, results)
def test_get_border(self):
self.assertListEqual([0, 1, 2], get_border(1))
self.assertListEqual([0, 1, 2], get_border(2))
self.assertListEqual([3, 4, 5], get_border(3))
self.assertListEqual([3, 4, 5], get_border(4))
self.assertListEqual([3, 4, 5], get_border(5))
self.assertListEqual([6, 7, 8], get_border(6))
self.assertListEqual([6, 7, 8], get_border(7))
self.assertListEqual([6, 7, 8], get_border(8))
class TestBaseConfiguration(TestCase):
full_set = [1, 2, 3, 4, 5, 6, 7, 8, 9]
def test_full_set(self):
assert self.full_set == FULL_SET
class TestCellValidator(TestCase):
def setUp(self):
self.cells = CellList(TEST_MATRIX)
def test_validate_by_row(self):
cell = Cell(0, 0, 0)
self.cells._set_possible_values_by_row(cell, [3, 6, 4])
self.assertListEqual(cell.value, [1, 2, 5, 7, 8, 9])
def test_validate_by_column(self):
cell = Cell(0, 0, 0)
self.cells._set_possible_values_by_column(cell, [3, 4])
self.assertListEqual(cell.value, [1, 2, 5, 6, 7, 8, 9])
def test_validate_by_square(self):
cell = Cell(0, 0, 0)
self.cells._set_possible_values_by_square(cell, [3, 4, 6])
self.assertListEqual(cell.value, [1, 2, 5, 7, 8, 9])
def test_validate_by_three_dimenssions(self):
cell = Cell(0, 0, 0)
self.cells.run(cell)
self.assertListEqual(cell.value, [1, 8, 9])
class TestCellList(TestCase):
@mock.patch('src.main.Cell', autospec=True)
def test_list_stores_proper_values(self, mock_cell):
cells = CellList([[mock_cell, mock_cell, mock_cell]])
self.assertEqual(len(cells), 3, len(cells))
def test_status_check(self):
cells = CellList(TEST_MATRIX)
cells.print()
res = cells.isFinished()
self.assertFalse(res, "res: " + str(res))
cells = CellList(CORRECT_MATRIX)
res = cells.isFinished()
self.assertTrue(res)
class TestCell(TestCase):
def test_create_intsance(self):
cell = Cell(1, 1, 1)
self.assertIsInstance(cell, Cell)
class TestCheckInputMatrixes(TestCase):
def test_compare_two_matrix(self):
for i in range(9):
for j in range(9):
if TEST_MATRIX[i][j] != 0:
self.assertEqual(TEST_MATRIX[i][j], CORRECT_MATRIX[i][j], "i: {}, j {}".format(i, j))
def test_general_validation_1(self):
cells = CellList(TEST_MATRIX)
while not cells.isFinished():
cells.validate()
matrix = cells.generate_matrix()
for i in range(9):
for j in range(9):
self.assertEqual(matrix[i][j], CORRECT_MATRIX[i][j], "i= {}, j= {}".format(i, j))
def test_general_validation_2(self):
cells = CellList(puzzle)
while not cells.isFinished():
cells.validate()
matrix = cells.generate_matrix()
for i in range(9):
for j in range(9):
self.assertEqual(matrix[i][j], puzzle_results[i][j], "i= {}, j= {}".format(i, j))
| [
"[email protected]"
] | |
7470e26156b17d458bc7416e9cbdf3ef2d2d50e0 | 34aef6a91384490d5c5bb2542835b0a302982a36 | /mpcInf_R.py | 7645f0cfab27616eec6f456cacfa5d05abaed2b2 | [] | no_license | naru-naru/sparseControl_byNN | 117d3a302f0ad506396069973c7d52043cda83cc | 7279d25869d11ad6b378359394b910ad8b9d5d33 | refs/heads/main | 2023-03-27T21:25:32.471704 | 2021-04-05T16:12:58 | 2021-04-05T16:12:58 | 354,888,147 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,455 | py | # import cvxpy
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import japanize_matplotlib
# from scipy.integrate import odeint
# import cvxopt
# from cvxopt import matrix
import scipy.linalg
import scipy.signal
from l1sample import mpc_modeling_tool_v5
import random
import csv
import pandas as pd
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from torch.utils.data import Dataset
from sklearn.model_selection import train_test_split
from torchvision import datasets, transforms
from torch.utils.data import DataLoader, Dataset
import time
from operator import mul
from network import netR as Net
# モデルの定義
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = Net().to(device)
# print(len(dataset)) #36300 =11*11*300
# optimizer = optim.Adam(net.parameters())
# fn_loss=nn.BCEWithLogitsLoss()
# losses=[]
# running_loss=0.0
# cntfile=[]
# cnt=0
# epoches=10
# batch_size=100
model.load_state_dict(torch.load("./params/model_R.pth", map_location=device))
print("----------------------------")
#学習終わり,推論はじめ
print("---------------------")
A = np.array([[0, -1],
[2, 0]])
B = np.array([[2], [0]])
C = np.array([[1], [0]])
(nx, nu) = B.shape
Q = np.eye(nx)
R = np.eye(nu)
Ts = 0.05
Th = 3
N=int(Th/Ts)
# a=1 #L1差分ノルム
# b=0.5 #L1ノルム
# c=1 #L2ノルム
Ad, Bd, _, _, dt = scipy.signal.cont2discrete((A,B,C,0),Ts)
# # print(Ad)
# # print(Bd)
x0 = np.array([[1.28], [-1.89]]) # init state
simutime=1/Th
# simutime=5
itr=int(simutime*Th)
rx0 =[]
rx1 =[]
ru=[]
noiseLi=[]
xcurr = x0
ucurr = 0
t1 = time.time()
l2norm = 0
l1norm = 0
# noisedf = pd.read_csv('./dataset/noise.csv')
# noiseLi = noisedf.values.tolist()
for count in range(itr):
# 分類
data = [xcurr[0,0], xcurr[1,0]]
data = torch.tensor(data, dtype=torch.float32)
output = model(data)
# output = torch.sigmoid(output)
output = output.detach().tolist()
# Signs = [] #分類のみ用
# for fm in range(0, 60, 3):
# cnt = [output[fm], output[fm+1], output[fm+2]]
# sign = cnt.index(max(cnt)) - 1
# Signs.append(sign) #分類のみ用
# 分類のみの場合
ucurr = output
for horizon in range(N):
ru.append(ucurr[horizon])
rx0.append(xcurr[0])
rx1.append(xcurr[1])
l1norm += abs(ucurr[horizon])
l2norm += (xcurr[0])**2
#外乱を与える
noise = 0
# noise = noiseLi[count*N+horizon][0]
xnext = Ad@xcurr + Bd*(ucurr[horizon]+noise) # x(i) → x(i+1)
xcurr = xnext
t2 = time.time()
fig, ax = plt.subplots(1)
# time= np.arange(0, simutime, Ts)
# 目盛を内側に
plt.tick_params(direction='in')
plt.ylim(-1.1, 1.1)
plt.rcParams["font.family"] = "Times New Roman"
plt.rcParams["mathtext.fontset"] = "stix"
plt.rcParams["font.size"] = 10
plt.rcParams['axes.linewidth'] = 1.0# 軸の線幅edge linewidth。囲みの太さ
plt.plot(ru, ls="-", color="b", linewidth="2")
plt.grid(True)
\
# ラベル
plt.xlabel("$k$", fontsize=24)
plt.ylabel("$u$", fontsize=24)
# fig, ax = plt.subplots(2, 1)
# time= np.arange(0, simutime, Ts)
# # 目盛を内側に
# ax[0].tick_params(direction='in')
# ax[1].tick_params(direction='in')
# plt.rcParams["font.family"] = "Times New Roman"
# plt.rcParams["mathtext.fontset"] = "stix"
# plt.rcParams["font.size"] = 10
# plt.rcParams['axes.linewidth'] = 1.0# 軸の線幅edge linewidth。囲みの太さ
# ax[0].plot(rx0, ls="-", color="b")
# ax[1].plot(ru, ls="-", color="g")
# # ax[1].plot(time, rx1, ls="-", label="Classification")
# # ax[2].plot(time, ru, ls="-", label="Classification")
# # plt.plot(time, noiseLi, label="noise")
# # plt.ylim(-5.0, 5.0)
# # 凡例・グリッド
# # plt.legend(loc='upper right', borderaxespad=0)
# ax[0].grid(True)
# ax[1].grid(True)
# # ax[0].ylim(-2.0, 2.0)
# # ax[1].ylim(-2.0, 2.0)
# # ax[1].legend(loc='upper right', borderaxespad=0)
# # ax[1].grid(True)
# # ax[2].legend(loc='lower right', borderaxespad=0)
# # ax[2].grid(True)
# # ラベル
# ax[0].set_xlabel("$t$", fontsize=18)
# ax[0].set_ylabel("$x$", fontsize=18)
# ax[1].set_xlabel("$t$", fontsize=18)
# ax[1].set_ylabel("$u$", fontsize=18)
fig.tight_layout()
plt.show()
stop = [i for i in range(len(ru)) if abs(ru[i])<=0.01]
stopRate = 100 * (len(stop)/len(ru))
print(stopRate)
print(l2norm)
print(t2 - t1)
| [
"[email protected]"
] | |
e3e34122924799d34bc42307f300a83bf916269b | 4b653e08fe60e7758bb50c3cb6438cc68b12b9af | /bestshaper2/apps/migrations/0002_auto_20170530_1847.py | 6e4445a5731f040b39bd163d2142aa55bfcb2a17 | [] | no_license | atsuo4610/bestshaper | b83359faea0e3973e7ed716676e4a2e9dd7f53ee | 335309d1795f83a5de4187eae6c2bc5fc37c455f | refs/heads/master | 2021-01-22T21:53:03.443092 | 2017-06-02T08:12:10 | 2017-06-02T08:12:10 | 92,740,899 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,085 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.2 on 2017-05-30 09:47
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('apps', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Brassiere',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('bra_name', models.CharField(max_length=32)),
('bra_started_at', models.DateTimeField()),
('wash_num', models.IntegerField()),
],
),
migrations.AlterField(
model_name='user',
name='name',
field=models.CharField(max_length=32),
),
migrations.AddField(
model_name='brassiere',
name='user',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='bras', to='apps.User'),
),
]
| [
"[email protected]"
] | |
1f41add4ccb7a992fbf010e0494a8d6bfeadedb5 | 0f13f1a7039c697fee4664ce78cb556196e52c71 | /new/analysis/migrations/0003_auto_20200813_0914.py | 9c665392e5e389f9757b649057a96d1a789afabd | [] | no_license | daisy-ctrl/ParkingAnalysis | 8937e40832ac8345c14f6112e3c1559d626db295 | 45e3944327ae3d1872fac007adf3d92fe5e33d69 | refs/heads/master | 2022-12-04T08:41:43.478914 | 2020-08-25T10:58:49 | 2020-08-25T10:58:49 | 290,184,958 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,373 | py | # Generated by Django 3.1 on 2020-08-13 09:14
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('analysis', '0002_auto_20200813_0743'),
]
operations = [
migrations.CreateModel(
name='All',
fields=[
('id', models.IntegerField(primary_key=True, serialize=False)),
('sale_id', models.CharField(max_length=10)),
('amount', models.IntegerField()),
('record_date', models.DateTimeField()),
('raw_record_date_formatted', models.DateTimeField()),
('Hour', models.IntegerField()),
('query_date', models.DateField()),
('server_date', models.DateTimeField()),
('account_reference', models.CharField(max_length=10)),
('created_at', models.DateTimeField()),
('updated_at', models.DateTimeField()),
('Weekday_Name', models.CharField(max_length=10)),
('subcounty_name', models.CharField(max_length=10)),
('part_id', models.IntegerField()),
('subpart_id', models.IntegerField()),
('transaction_mode_id', models.IntegerField()),
],
),
migrations.DeleteModel(
name='Mass',
),
]
| [
"[email protected]"
] | |
609ba118086b4090d614ac15757d8608e9f25ad1 | 75a642ab9d63580bf9c762b6626ecdff1e787d4a | /Data.py | ee318e6af4c85d3695cfd438a4a1aabe26990bfd | [] | no_license | haitrr/character-recognition | 74faeaa659360457e5225f03f3beb5d4bd0b9a38 | 9548b7cbdfd6a94f056bc2563ea911d7c3da3ba5 | refs/heads/master | 2022-02-10T07:19:33.592800 | 2017-10-13T19:38:42 | 2017-10-13T19:38:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,174 | py | from PIL import Image
import numpy as np
from os import listdir
en_char = [
'A', 'B', 'C', 'D', 'E', 'F', 'G', 'H', 'I', 'J', 'K', 'L', 'M', 'N', 'O',
'P', 'Q', 'R', 'S', 'T', 'U', 'V', 'W', 'X', 'Y', 'Z', 'a', 'b', 'c', 'd',
'e', 'f', 'g', 'h', 'i', 'j', 'k', 'l', 'm', 'n', 'o', 'p', 'q', 'r', 's',
't', 'u', 'v', 'w', 'x', 'y', 'z'
]
vn_char = [
'Ă', 'Â', 'Đ', 'Ê', 'Ô', 'Ơ', 'Ư', 'ă', 'â', 'đ', 'ê', 'ô', 'ơ', 'ư'
]
char = en_char + vn_char
input_size = (16, 16)
output_size = 66
samples = "Samples/Sample"
test = "Tests/Sample"
# generate the output layer
def load_output(sample_size):
output_layer = get_output_layer(sample_size)
# get input from samples
def get_input_from_samples(data):
input_layer = []
output_layer = []
for i in range(1, output_size + 1):
print("Loading sample " + str(i))
for j in listdir(data + str(i).zfill(3)):
input_layer.append(get_pixels(data + str(i).zfill(3) + "/" + j))
temp = np.zeros((output_size, 1))
temp[i - 1] = 1
output_layer.append(temp)
return list(zip(input_layer, output_layer))
def scale(val, src, dst):
"""
Scale the given value from the scale of src to the scale of dst.
"""
return ((val - src[0]) / (src[1] - src[0])) * (dst[1] - dst[0]) + dst[0]
# get pixel from image
def get_pixels(image):
im = Image.open(image)
input_vector = []
pixels = list(im.getdata())
for i in pixels:
input_vector.append(1 - scale(sum(i), (0, 765), (0, 1)))
return np.reshape(input_vector, (input_size[0] * input_size[1], 1))
# generate input layer
def get_output_layer(sample_size):
output_layer = []
for i in range(0, output_size):
for j in range(0, sample_size):
temp = np.zeros((output_size, 1))
temp[i] = 1
output_layer.append(temp)
return output_layer
# get training data
def get_training_data():
return get_input_from_samples(samples)
# get test data
def get_testing_data():
return get_input_from_samples(test)
| [
"[email protected]"
] | |
e606292d85ba5c8ed69dd635940b950b332a9b70 | 2c91dd88913bcffe6f4c1f6c13e55a97562211b0 | /tests/test_by_state.py | 61c1799bde66b2e506adad77086cc512352098c7 | [] | no_license | Joylizzie/covid19 | 9b228cb19df3575162adb239de22f3b1fbd6d915 | 2ed057701df6facfb52f5eba88c28a223098ba47 | refs/heads/master | 2022-07-23T05:24:56.519485 | 2020-05-16T19:05:53 | 2020-05-16T19:05:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,399 | py | import sys
import os
import datetime
import shutil
import test_data
sys.path.append('..')
import unittest
from unittest import mock
from henry_covid19.mock_query_job import MockQeryJob
import by_state
from test_data import state_deaths
from test_data import state_cases
from test_data import us_states
import copy
def mocked_client1(*args, **kwargs):
class MockClient:
def __init__(self):
pass
def dataset(self, datset_name):
return Table()
def query(self, sql, job_config = None):
if '/* STATE DEATHS */' in sql:
d = state_deaths.d
elif '/* STATE CASES */' in sql:
d = state_cases.d
elif '/* STATE BY DAY */' in sql:
d = test_data.us_states.d
else:
raise ValueError('no sql match')
return MockQeryJob(d)
return MockClient()
class TestMakeCountries(unittest.TestCase):
def setUp(self):
return
if os.path.isdir('html_temp'):
shutil.rmtree('html_temp')
def tearDown(self):
return
if os.path.isdir('html_temp'):
shutil.rmtree('html_temp')
@mock.patch('google.cloud.bigquery.Client', side_effect=mocked_client1)
def test_main(self, bq):
by_state.make_state_graphs(verbose = False)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
00485ccbe30be1378517fcb81e50a0de1a25e315 | 210b968876a8aea36eae94e4720e23f2daa8552b | /lib/polysimplify.py | 816a9eb2f94da396da73313ec65d9ea5c07428b4 | [] | no_license | beefoo/coloring-book | 3ce5c0d5497b199cd470f640dd0b3778d545577f | cee77b7f863ddee4323c8c16111d353fe27e5b36 | refs/heads/master | 2021-01-12T09:37:36.490913 | 2017-06-26T19:07:43 | 2017-06-26T19:07:43 | 76,204,083 | 9 | 1 | null | null | null | null | UTF-8 | Python | false | false | 16,049 | py | '''
Source: https://github.com/Permafacture/Py-Visvalingam-Whyatt/
=========================================
Visvalingam-Whyatt method of poly-line vertex reduction
Visvalingam, M and Whyatt J D (1993)
"Line Generalisation by Repeated Elimination of Points", Cartographic J., 30 (1), 46 - 51
Described here:
http://web.archive.org/web/20100428020453/http://www2.dcs.hull.ac.uk/CISRG/publications/DPs/DP10/DP10.html
=========================================
The MIT License (MIT)
Copyright (c) 2014 Elliot Hallmark
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
================================
'''
from numpy import array, argmin
import numpy as np
def triangle_area(p1,p2,p3):
"""
calculates the area of a triangle given its vertices
"""
return abs(p1[0]*(p2[1]-p3[1])+p2[0]*(p3[1]-p1[1])+p3[0]*(p1[1]-p2[1]))/2.
def triangle_areas_from_array(arr):
'''
take an (N,2) array of points and return an (N,1)
array of the areas of those triangles, where the first
and last areas are np.inf
see triangle_area for algorithm
'''
result = np.empty((len(arr),),arr.dtype)
result[0] = np.inf; result[-1] = np.inf
p1 = arr[:-2]
p2 = arr[1:-1]
p3 = arr[2:]
#an accumulators to avoid unnecessary intermediate arrays
accr = result[1:-1] #Accumulate directly into result
acc1 = np.empty_like(accr)
np.subtract(p2[:,1], p3[:,1], out = accr)
np.multiply(p1[:,0], accr, out = accr)
np.subtract(p3[:,1], p1[:,1], out = acc1 )
np.multiply(p2[:,0], acc1, out = acc1 )
np.add(acc1, accr, out = accr)
np.subtract(p1[:,1], p2[:,1], out = acc1 )
np.multiply(p3[:,0], acc1, out = acc1 )
np.add(acc1, accr, out = accr)
np.abs(accr, out = accr)
accr /= 2.
#Notice: accr was writing into result, so the answer is in there
return result
#the final value in thresholds is np.inf, which will never be
# the min value. So, I am safe in "deleting" an index by
# just shifting the array over on top of it
def remove(s,i):
'''
Quick trick to remove an item from a numpy array without
creating a new object. Rather than the array shape changing,
the final value just gets repeated to fill the space.
~3.5x faster than numpy.delete
'''
s[i:-1]=s[i+1:]
class VWSimplifier(object):
def __init__(self,pts):
'''Initialize with points. takes some time to build
the thresholds but then all threshold filtering later
is ultra fast'''
self.pts = np.array(pts)
self.thresholds = self.build_thresholds()
self.ordered_thresholds = sorted(self.thresholds,reverse=True)
def build_thresholds(self):
'''compute the area value of each vertex, which one would
use to mask an array of points for any threshold value.
returns a numpy.array (length of pts) of the areas.
'''
pts = self.pts
nmax = len(pts)
real_areas = triangle_areas_from_array(pts)
real_indices = range(nmax)
#destructable copies
#ARG! areas=real_areas[:] doesn't make a copy!
areas = np.copy(real_areas)
i = real_indices[:]
#pick first point and set up for loop
min_vert = argmin(areas)
this_area = areas[min_vert]
# areas and i are modified for each point finished
remove(areas,min_vert) #faster
#areas = np.delete(areas,min_vert) #slower
real_idx = i.pop(min_vert)
#cntr = 3
while this_area<np.inf:
'''min_vert was removed from areas and i. Now,
adjust the adjacent areas and remove the new
min_vert.
Now that min_vert was filtered out, min_vert points
to the point after the deleted point.'''
skip = False #modified area may be the next minvert
try:
right_area = triangle_area(pts[i[min_vert-1]],
pts[i[min_vert]],pts[i[min_vert+1]])
except IndexError:
#trying to update area of endpoint. Don't do it
pass
else:
right_idx = i[min_vert]
if right_area <= this_area:
#even if the point now has a smaller area,
# it ultimately is not more significant than
# the last point, which needs to be removed
# first to justify removing this point.
# Though this point is the next most significant
right_area = this_area
#min_vert refers to the point to the right of
# the previous min_vert, so we can leave it
# unchanged if it is still the min_vert
skip = min_vert
#update both collections of areas
real_areas[right_idx] = right_area
areas[min_vert] = right_area
if min_vert > 1:
#cant try/except because 0-1=-1 is a valid index
left_area = triangle_area(pts[i[min_vert-2]],
pts[i[min_vert-1]],pts[i[min_vert]])
if left_area <= this_area:
#same justification as above
left_area = this_area
skip = min_vert-1
real_areas[i[min_vert-1]] = left_area
areas[min_vert-1] = left_area
#only argmin if we have too.
min_vert = skip or argmin(areas)
real_idx = i.pop(min_vert)
this_area = areas[min_vert]
#areas = np.delete(areas,min_vert) #slower
remove(areas,min_vert) #faster
'''if sum(np.where(areas==np.inf)[0]) != sum(list(reversed(range(len(areas))))[:cntr]):
print "broke:",np.where(areas==np.inf)[0],cntr
break
cntr+=1
#if real_areas[0]<np.inf or real_areas[-1]<np.inf:
# print "NO!", real_areas[0], real_areas[-1]
'''
return real_areas
def from_threshold(self,threshold):
return self.pts[self.thresholds >= threshold]
def from_number(self,n):
thresholds = self.ordered_thresholds
try:
threshold = thresholds[int(n)]
except IndexError:
return self.pts
return self.pts[self.thresholds > threshold]
def from_ratio(self,r):
if r<=0 or r>1:
raise ValueError("Ratio must be 0<r<=1")
else:
return self.from_number(r*len(self.thresholds))
class WKTSimplifier(VWSimplifier):
'''VWSimplifier that returns strings suitable for WKT
creation'''
def __init__(self,*args,**kwargs):
if 'precision' in kwargs:
p=kwargs.pop('precision')
else:
p=None
VWSimplifier.__init__(self,*args,**kwargs)
self.set_precision(p)
def set_precision(self,precision):
if precision:
self.pts_as_strs = self.pts.astype('S%s'%precision)
else:
self.pts_as_strs = self.pts.astype(str)
'''slow
def from_threshold(self,threshold,precision=None):
arr = np.array2string(self.pts[self.thresholds > threshold],precision=precision)
return arr.replace('[[ ','(').replace(']]',')').replace(']\n [ ',',')
'''
def wkt_from_threshold(self,threshold, precision=None):
if precision:
self.set_precision(precision)
pts = self.pts_as_strs[self.thresholds >= threshold]
return '(%s)'%','.join(['%s %s'%(x,y) for x,y in pts])
def wkt_from_number(self,n,precision=None):
thresholds = self.ordered_thresholds
if n<3: n=3 #For polygons. TODO something better
try:
threshold = thresholds[int(n)]
except IndexError:
threshold = 0
return self.wkt_from_threshold(threshold,precision=precision)
def wkt_from_ratio(self,r,precision=None):
if r<=0 or r>1:
raise ValueError("Ratio must be 0<r<=1")
else:
return self.wkt_from_number(r*len(self.thresholds))
try:
from django.contrib.gis.gdal import OGRGeometry,OGRException
from django.contrib.gis.geos import GEOSGeometry, fromstr
except ImportError:
class GDALSimplifier(object):
'''Dummy object that would be replaced by a real one if
correct module exists'''
def __init__(*args,**kwargs):
print """
django.contrib.gis.gdal not found.
GDALSimplifier not available.
"""
else:
from json import loads
import re
p = re.compile( '([ 0123456789.]+) ([0123456789.]+)')
class GDALSimplifier(object):
'''Warning, there is a slight loss of precision just in the
conversion from geometry object to numpy.array even if no
threshold is applied. ie:
originalpolygeom.area -> 413962.65495176613
gdalsimplifierpoly.area -> 413962.65495339036
'''
def __init__(self,geom,precision=None,return_GDAL = True):
'''accepts a gdal.OGRGeometry or geos.GEOSGeometry
object and wraps multiple
VWSimplifiers. set return_GDAL to False for faster
filtering with arrays of floats returned instead of
geometry objects.'''
global p
self.return_GDAL = return_GDAL
if isinstance(geom,OGRGeometry):
name = geom.geom_name
self.Geometry = lambda w: OGRGeometry(w,srs=geom.srs)
self.pts = np.array(geom.tuple)
elif isinstance(geom,GEOSGeometry):
name = geom.geom_type.upper()
self.Geometry = lambda w: fromstr(w)
self.pts = np.array(geom.tuple)
elif isinstance(geom, unicode) or isinstance(geom,str):
#assume wkt
#for WKT
def str2tuple(q):
return '(%s,%s)' % (q.group(1),q.group(2))
self.return_GDAL = False #don't even try
self.Geometry = lambda w: w #this will never be used
name, pts = geom.split(' ',1)
self.pts = loads(p.sub(str2tuple,pts).\
replace('(','[').replace(')',']'))
self.precision = precision
if name == 'LINESTRING':
self.maskfunc = self.linemask
self.buildfunc = self.linebuild
self.fromnumfunc = self.notimplemented
elif name == "POLYGON":
self.maskfunc = self.polymask
self.buildfunc = self.polybuild
self.fromnumfunc = self.notimplemented
elif name == "MULTIPOLYGON":
self.maskfunc = self.multimask
self.buildfunc = self.multibuild
self.fromnumfunc = self.notimplemented
else:
raise OGRGeometryError("""
Only types LINESTRING, POLYGON and MULTIPOLYGON
supported, but got %s"""%name)
#sets self.simplifiers to a list of VWSimplifiers
self.buildfunc()
#rather than concise, I'd rather be explicit and clear.
def pt2str(self,pt):
'''make length 2 numpy.array.__str__() fit for wkt'''
return ' '.join(pt)
def linebuild(self):
self.simplifiers = [WKTSimplifier(self.pts)]
def line2wkt(self,pts):
return u'LINESTRING %s'%pts
def linemask(self,threshold):
get_pts = self.get_pts
pts = get_pts(self.simplifiers[0],threshold)
if self.return_GDAL:
return self.Geometry(self.line2wkt(pts))
else:
return pts
def polybuild(self):
list_of_pts = self.pts
result = []
for pts in list_of_pts:
result.append(WKTSimplifier(pts))
self.simplifiers = result
def poly2wkt(self,list_of_pts):
return u'POLYGON (%s)'%','.join(list_of_pts)
def polymask(self,threshold):
get_pts = self.get_pts
sims = self.simplifiers
list_of_pts = [get_pts(sim,threshold) for sim in sims]
if self.return_GDAL:
return self.Geometry(self.poly2wkt(list_of_pts))
else:
return array(list_of_pts)
def multibuild(self):
list_of_list_of_pts = self.pts
result = []
for list_of_pts in list_of_list_of_pts:
subresult = []
for pts in list_of_pts:
subresult.append(WKTSimplifier(pts))
result.append(subresult)
self.simplifiers = result
def multi2wkt(self,list_of_list_of_pts):
outerlist = []
for list_of_pts in list_of_list_of_pts:
outerlist.append('(%s)'%','.join(list_of_pts))
return u'MULTIPOLYGON (%s)'%','.join(outerlist)
def multimask(self,threshold):
loflofsims = self.simplifiers
result = []
get_pts = self.get_pts
if self.return_GDAL:
ret_func = lambda r: self.Geometry(self.multi2wkt(r))
else:
ret_func = lambda r: r
for list_of_simplifiers in loflofsims:
subresult = []
for simplifier in list_of_simplifiers:
subresult.append(get_pts(simplifier,threshold))
result.append(subresult)
return ret_func(result)
def notimplemented(self,n):
print "This function is not yet implemented"
def from_threshold(self,threshold):
precision = self.precision
if self.return_GDAL:
self.get_pts = lambda obj,t: obj.wkt_from_threshold(t,precision)
else:
self.get_pts = lambda obj,t: obj.from_threshold(t)
return self.maskfunc(threshold)
def from_number(self,n):
precision = self.precision
if self.return_GDAL:
self.get_pts = lambda obj,t: obj.wkt_from_number(t,precision)
else:
self.get_pts = lambda obj,t: obj.from_number(t)
return self.maskfunc(n)
def from_ratio(self,r):
precision = self.precision
if self.return_GDAL:
self.get_pts = lambda obj,t: obj.wkt_from_ratio(t,precision)
else:
self.get_pts = lambda obj,t: obj.from_ratio(t)
return self.maskfunc(r)
def fancy_parametric(k):
''' good k's: .33,.5,.65,.7,1.3,1.4,1.9,3,4,5'''
cos = np.cos
sin = np.sin
xt = lambda t: (k-1)*cos(t) + cos(t*(k-1))
yt = lambda t: (k-1)*sin(t) - sin(t*(k-1))
return xt,yt
if __name__ == "__main__":
from time import time
n = 5000
thetas = np.linspace(0,16*np.pi,n)
xt,yt = fancy_parametric(1.4)
pts = np.array([[xt(t),yt(t)] for t in thetas])
start = time()
simplifier = VWSimplifier(pts)
pts = simplifier.from_number(1000)
end = time()
print "%s vertices removed in %02f seconds"%(n-len(pts), end-start)
import matplotlib
matplotlib.use('AGG')
import matplotlib.pyplot as plot
plot.plot(pts[:,0],pts[:,1],color='r')
plot.savefig('visvalingam.png')
print "saved visvalingam.png"
#plot.show()
| [
"[email protected]"
] | |
163b990bf7551bd22257719bfa9e5ff8df2d238a | 9add45eda7fbfc8fb165fc9f0a518e8b87f8f42c | /llatb/common/util.py | f493725d42e28a4d445e2618dae01a9b7f10598c | [] | no_license | gohankuma92/LLSIF-AutoTeamBuilder | 1850fdd6733d27adfa2668b8fbec840fda3848f0 | 398105d8262214d48855c4c7b8818086a886458a | refs/heads/master | 2021-03-27T20:13:14.809883 | 2017-07-20T17:52:13 | 2017-07-20T17:52:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,827 | py | import numpy as np
import pandas as pd
import urllib.request, sqlite3, json
from bs4 import BeautifulSoup
from pathlib import Path
from llatb.common.config import *
from llatb.framework.card import Card
from llatb.framework.team import Team
def update_card_data():
def card_summary(unit_id):
unit_info = df_unit.loc[unit_id]
attr_dict = {1:'Smile', 2:'Pure', 3:'Cool'}
# Generate stats list
level_up_info = df_level_up.loc[unit_info['unit_level_up_pattern_id']]
stats_list = np.array([
unit_info['smile_max'] - level_up_info['smile_diff'],
unit_info['pure_max'] - level_up_info['pure_diff'],
unit_info['cool_max'] - level_up_info['cool_diff'],
level_up_info['sale_price'],
level_up_info['merge_exp'],
unit_info['hp_max'] - level_up_info['hp_diff']
]).T.tolist()
# Generate skill info
if np.isnan(unit_info['default_unit_skill_id']):
skill = None
else:
skill_info = df_skill.loc[unit_info['default_unit_skill_id']]
skill_level_info = df_skill_level.loc[unit_info['default_unit_skill_id']]
trigger_type_dict = {1:'Time', 3:'Note', 4:'Combo', 5:'Score', 6:'Perfect', 12:'Star'}
effect_type_dict = {4:'Weak Judge', 5:'Strong Judge', 9:'Stamina Restore', 11:'Score Up'}
skill = {
'name': skill_info['name'],
'trigger_type': trigger_type_dict[skill_info['trigger_type']],
'trigger_count': int(skill_level_info['trigger_value'].values[0]),
'effect_type': effect_type_dict[skill_info['skill_effect_type']],
'odds_list': skill_level_info['activation_rate'].values.tolist(),
}
if skill['effect_type'] in ['Weak Judge', 'Strong Judge']:
skill['rewards_list'] = skill_level_info['discharge_time'].values.tolist()
else:
skill['rewards_list'] = skill_level_info['effect_value'].values.tolist()
# Generate center skill info
if np.isnan(unit_info['default_leader_skill_id']):
cskill = None
else:
cskill1_info = df_cskill1.loc[unit_info['default_leader_skill_id']]
temp = cskill1_info['leader_skill_effect_type']
if len(str(temp)) == 1:
main_attr, base_attr = attr_dict[temp], attr_dict[temp]
else:
main_attr, base_attr = attr_dict[temp%10], attr_dict[int((temp-100)/10)]
if unit_info['default_leader_skill_id'] not in df_cskill2.index:
bonus_range, bonus_ratio = None, None
else:
cskill2_info = df_cskill2.loc[unit_info['default_leader_skill_id']]
tag_dict = {1:'1st-year', 2:'2nd-year', 3:'3rd-year', 4:"μ's", 5:'Aqours',
6:'Printemps', 7:'lily white', 8:'BiBi',
9:'CYaRon!', 10:'AZALEA', 11:'Guilty Kiss'}
bonus_range, bonus_ratio = tag_dict[cskill2_info['member_tag_id']], int(cskill2_info['effect_value'])
cskill = {
'name': cskill1_info['name'],
'main_attr': main_attr,
'base_attr': base_attr,
'main_ratio': int(cskill1_info['effect_value']),
'bonus_range': bonus_range,
'bonus_ratio': bonus_ratio
}
# Generate whole summary
rarity_dict = {1:'N', 2:'R', 3:'SR', 4:'UR', 5:'SSR'}
if id_crown_dict.get(unit_info['unit_number']) is None:
card_name = ' ' if unit_info['eponym'] is None else unit_info['eponym']
else:
card_name = id_crown_dict.get(unit_info['unit_number'])
card_info = {
'promo': bool(unit_info['is_promo']),
'card_name': card_name,
'card_id': int(unit_info['unit_number']),
'main_attr': attr_dict[unit_info['attribute_id']],
'member_name': unit_info['name'],
'stats_list': stats_list,
'cskill': cskill,
'skill': skill,
'rarity': rarity_dict[unit_info['rarity']]
}
return unit_info['unit_number'], card_info
print('Downloading minaraishi\'s member.json')
minaraishi = json.loads(urllib.request.urlopen(minaraishi_json_url).read().decode('utf-8'))
id_crown_dict = dict()
for member, d1 in minaraishi.items():
for attribute, d2 in d1.items():
for rarity, d3 in d2.items():
for crown, d4 in d3.items():
id_crown_dict[d4['id']] = crown
print('Downloading latest unit.db_')
opener = urllib.request.URLopener()
opener.addheader('User-Agent', 'whatever')
opener.retrieve(unit_db_download_url, unit_db_dir)
print('Generating basic card stats')
conn = sqlite3.connect(unit_db_dir)
df_level_up = pd.read_sql('SELECT * FROM unit_level_up_pattern_m', con=conn, index_col='unit_level_up_pattern_id')
df_skill = pd.read_sql('SELECT * FROM unit_skill_m', con=conn, index_col='unit_skill_id')
df_skill_level = pd.read_sql('SELECT * FROM unit_skill_level_m', con=conn, index_col='unit_skill_id')
df_cskill1 = pd.read_sql('SELECT * FROM unit_leader_skill_m', con=conn, index_col='unit_leader_skill_id')
df_cskill2 = pd.read_sql('SELECT * FROM unit_leader_skill_extra_m', con=conn, index_col='unit_leader_skill_id')
df_unit = pd.read_sql('SELECT * FROM unit_m', con=conn, index_col='unit_id')
df_unit = df_unit[df_unit['unit_number']>0]
df_unit['is_support'] = df_unit['smile_max'] == 1
df_unit['is_promo'] = df_unit.apply(lambda x: x['smile_max'] > 1 and
x['normal_icon_asset'] == x['rank_max_icon_asset'], axis=1)
# Generate card basic stat and save it to JSON
card_basic_stat = dict()
for unit_id, row in df_unit.iterrows():
if not row['is_support']:
card_id, card_info = card_summary(unit_id)
card_basic_stat[str(card_id)] = card_info
with open(card_archive_dir, 'w') as fp:
json.dump(card_basic_stat, fp)
print('Basic card data has been saved in', card_archive_dir)
def update_live_data(download=False):
def live_summary(live_setting_id):
group_dict = {1:"μ's", 2:'Aqours'}
attr_dict = {1:'Smile', 2:'Pure', 3:'Cool'}
diff_dict = {1:'Easy', 2:'Normal', 3:'Hard', 4:'Expert', 6:'Master'}
setting = df_live_setting.loc[live_setting_id]
track_info = df_live_track.loc[setting['live_track_id']]
live_info = {
'cover': cover_path(setting['live_icon_asset']),
'name': track_info['name'],
'group': group_dict[track_info['member_category']],
'attr': attr_dict[setting['attribute_icon_id']],
'note_number': int(setting['s_rank_combo']),
'diff_level': diff_dict[setting['difficulty']],
'diff_star': int(setting['stage_level']),
'file_dir': live_path(setting['notes_setting_asset'])
}
return live_info
print('Downloading latest live.db_')
opener = urllib.request.URLopener()
opener.addheader('User-Agent', 'whatever')
opener.retrieve(live_db_download_url, live_db_dir)
print('Generating basic live stats')
conn = sqlite3.connect(live_db_dir)
df_live_track = pd.read_sql('SELECT * FROM live_track_m', con=conn, index_col='live_track_id')
df_live_setting = pd.read_sql('SELECT * FROM live_setting_m', con=conn, index_col='live_setting_id')
live_data = [live_summary(live_setting_id) for live_setting_id, row in df_live_setting.iterrows() if row['difficulty']!=5]
with open(live_archive_dir, 'w') as fp:
json.dump(live_data, fp)
print('Basic live data has been saved in', live_archive_dir) | [
"[email protected]"
] | |
95bd1f16c83e9df914341a1351bfc120644f6b06 | 147bc95b8b8a36014ec001bb1f79100095fa90a4 | /day13/类的三大特征_继承4.py | e05f51930dbbd35a1c8fd15ebd171e253fa7571a | [] | no_license | zhangwei725/PythonBase | fd20293b7f7ebee9f11a5df8f4761cad7f1ff4c7 | 6c9165caed48418eb55cf7622359105c9124e580 | refs/heads/master | 2020-04-30T00:32:40.360722 | 2019-04-03T01:41:51 | 2019-04-03T01:41:51 | 176,505,618 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 481 | py | """
super() 函数
"""
class Month:
def __init__(self):
super().__init__()
self.name = '这个是母类的name'
class Father:
def __init__(self):
super().__init__()
print('这个是父类的__init__')
def give_money(self):
print('父类赚钱的方法')
class Child(Father,Month):
pass
# 将复杂的问题简单化
if __name__ == '__main__':
child = Child()
print(child.name)
print(child.give_money())
| [
"[email protected]"
] | |
24daa541611085bb6699409a4ca2f5234c34ff57 | 4b9a0f99b8934de19f908edf5e2dd9ae40d1183e | /hw1fmodel.py | 74b479b8f3dd32353f7e322c040576598b1920ea | [] | no_license | harvey1673/pyqtlib | 316109c03ea48f64e0adde87789838e0d0c44c85 | 00ece212896d32a8974823ecb9bac052a754641b | refs/heads/master | 2023-08-29T20:46:32.092598 | 2023-08-25T01:14:00 | 2023-08-25T01:14:00 | 77,933,043 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 14,791 | py | #-*- coding:utf-8 -*-
from math import log, exp
from repoze.lru import lru_cache
import numpy as np
from scipy.stats import norm
from scipy.optimize import brentq as solver, minimize_scalar
import QuantLib
from curve import DiscountCurve
from misc import *
from finite_diff import FDM1D
from rate_options import BlackLognormalModel
class Volatility:
def __init__(self, startdate, sigma=0.0, kappa=0.0):
self.kappa = kappa if abs(kappa) > 5e-5 else 0.0 # this is about the optimal cutoff value
self.startdate = startdate # startdate >= 0
# t[0]=0 t[1] t[2] t[n-1] t[n]=999
# |----------|----------|--- ... --|----------|
# |---v[1]---| . . .
# v[0] ---v[2]---| . .
# --- ... --| .
# ---v[n]---|
self.tenors = startdate.t + np.array([0, 999.]) # tenors[0] can be greater than 0
self.vars = np.array([sigma ** 2] * 2) # variances
self.__update()
def __update(self,):
self.__nodes.cache_clear()
self.phi.cache_clear()
self.chi.cache_clear()
def __setitem__(self, t, sigma):
"""update piecewise constant volatility of the longest maturity"""
if t == self.tenors[-2]: # updating, note that tenors[-1] = startdate + 999
self.vars[-2] = sigma ** 2.
elif t > self.tenors[-2]: # bootstrapping, insert one more entry
self.tenors = np.insert(self.tenors, -1, t)
self.vars = np.insert(self.vars, -1, sigma ** 2.)
else:
raise BaseException('tenor must be >= current tenor structure ...')
self.vars[0] = self.vars[1]
self.vars[-1] = self.vars[-2]
self.__update()
def sig_sq(self, t):
return self.vars[np.searchsorted(self.tenors, t)]
def bond_exp(self, s, t, T, x):
"""
bond exponential under risk neutral measure: x ~ N(0, xi2(s,t,t,T))
P(t,T) = P(s,t,T) * exp(-xi2(s,t,t,T)/2 - B(t,T)*chi(s,t,t,t) - B(t,T)*x)
s0 < s < t < T, s & t must be scalar
"""
return np.exp(-self.xi2(s,t,t,T) / 2 - self._b(t,T) * (self.chi(s,t,t,t) + x))
def xi2(self, s, t, T, V):
"""
variance of forward bond P(t,T,V):
xi2(s,t,T,V) = integral{(B(u,V)-B(u,T))^2*sigma(u)^2 * du} / 2 from s to t
= B(T,V)^2 * phi(s,t,T,T)
s0 < s < t < T < V, s & t must be scalar
"""
return self._b(T,V) ** 2 * self.phi(s,t,T,T)
@lru_cache(maxsize=100)
def __nodes(self, t): # t must be scalar
i = np.searchsorted(self.tenors, t) + 1
tnodes = self.tenors[:i] * 1 # make a copy, faster than numpy.copy()
tnodes[-1] = t
vnodes = self.vars[1:i]
return tnodes, vnodes
def __gamma(self, t, n):
if n == 0.0:
x = t
elif self.kappa == 0.0:
x = t ** (n + 1) / (n + 1)
else:
x = np.exp(n * self.kappa * t) / (n * self.kappa)
return np.diff(x)
@lru_cache(maxsize=100)
def phi(self, s, t, T, V):
"""
phi(s,t,T,V) = integral{E(u,T)*E(u,V)*sigma(u)^2*du} from s to t
s0 < s < t < T < V, s & t must be scalar
"""
def fn(u):
a, b = self.__nodes(u)
if self.kappa == 0.0:
return b.dot(self.__gamma(a,0))
else:
return b.dot(self.__gamma(a,2)) * exp(-self.kappa * (T + V))
return fn(t) if s is None else fn(t) - fn(s) # s is None means s = tenor[0]
@lru_cache(maxsize=100)
def chi(self, s, t, T, V):
"""
chi(s,t,T,V) = integral{E(u,T)*B(u,V)*sigma(u)^2*du} from s to t
s0 < s < t < T < V, s & t must be scalar
"""
k = self.kappa
g = self.__gamma
def fn(u):
a, b = self.__nodes(u)
if k == 0.0:
return b.dot(V * g(a,0) - g(a,1))
else:
return b.dot(exp(-k * T) * g(a,1) - exp(-k * (T + V)) * g(a,2)) / k
return fn(t) if s is None else fn(t) - fn(s) # s is None means s = tenor[0]
def psi(self, s, t, T, V):
"""
psi(s,t,T,V) = integral{B(u,T)*B(u,V)*sigma(u)^2*du} from s to t
s0 < s < t < T < V, s & t must be scalar
"""
k = self.kappa
g = self.__gamma
def fn(u):
a, b = self.__nodes(u)
if k == 0.0:
return b.dot(T * V * g(a,0) - (T + V) * g(a,1) + g(a,2))
else:
return b.dot(g(a,0) - (exp(-k * T) + exp(-k * V)) * g(a,1) + exp(-k * (T + V)) * g(a,2)) / k ** 2
return fn(t) if s is None else fn(t) - fn(s) # s is None means s = tenor[0]
def xvol(self, s=None, t=None): # total volatility of x
if t is None: t = self.tenors[-2] # = maturity
return self.phi(s,t,t,t) ** 0.5
def _b(self, t, T):
""" B(t,T) = (1 - E(t,T)) / kappa """
return T - t if self.kappa == 0.0 else (1 - np.exp(-self.kappa * (T - t))) / self.kappa
def _e(self, t, T):
""" E(t,T) = exp(-kappa * (T-t)) """
return 1.0 if self.kappa == 0.0 else exp(-self.kappa * (T - t))
def __repr__(self):
info = [str(self.startdate.origin + round(t * 365)) + ":\n " + str(v ** 0.5) for t, v in zip(self.tenors[:-1], self.vars[1:])]
return '\n'.join(info)
def test_volatility(self,s=.1,t=.3,T=.5,V=.7):
a = self.psi(s,t,V,V) - self.psi(s,t,T,T)
b = self.xi2(s,t,T,V) + 2 * self._b(T,V) * self.chi(s,t,T,T)
print(a, b, log(a / b))
class HullWhite1F:
def __init__(self, proj, disc, spotdate, kappa=0, z_fwd_date=None):
self.disc = disc
self.proj = proj
self.spotdate = spotdate
self.vol = Volatility(startdate=spotdate, kappa=kappa)
self.z_fwd_date = z_fwd_date
# minimization function
def __minimize(self, fn, bounds):
return minimize_scalar(fn, bounds=bounds, method='bounded').x
def curves(self, t, x):
# x adjustment from Z-forward to risk neutral: x_zf = x_rn + chi(s,t,t,Z)
if self.z_fwd_date is not None:
x -= self.vol.chi(None,t,t,self.z_fwd_date.t)
def bump(curve):
curve_t = curve(t)
def newcurve(T): # T can be float or Numpy.Array
return curve(T) / curve_t * self.vol.bond_exp(None,t,T,x)
return Curve.from_fn(t, newcurve)
return bump(self.proj), bump(self.disc)
#@np.vectorize
def fn_abc(self, t, x):
"""PDE: dU/dt = a * d2U/dx2 + b * dU/dx + c * U"""
if self.z_fwd_date is None:
a = self.vol.sig_sq(t) / -2.
b = self.vol.kappa * x
c = self.disc.forward(t) + self.vol.chi(None,t,t,t) + x
else:
s2 = self.vol.sig_sq(t)
B_tZ = self.vol._b(t, self.z_fwd_date.t)
a = s2 / -2.
b = -B_tZ * s2 + self.vol.kappa * x
c = self.disc.forward(t) - B_tZ * self.vol.phi(None,t,t,t) + x
return a, b, c
def __caplet_value(self, libor, strike):
ts = libor.effdate.t # offset 2 days, so start from startdate, this makes tf = ts
te = libor.matdate.t
xi = self.vol.xi2(None,ts,ts,te) ** 0.5 # == xi2(s0,ts,ts,te), and xi2(s0,ts,ts,ts) = 0
p = self.proj(ts) / self.proj(te)
q = (1 + strike * libor.coverage)
zstar = log(q / p) / xi - xi / 2.
return self.disc(te) * (p * norm.cdf(-zstar) - q * norm.cdf(-zstar - xi))
def __payer_swpn_value(self, swap, strike):
def eta(x): #constant multiplicative spread between proj and disc curve
return self.disc(x.matdate.t) / self.disc(x.effdate.t) * self.proj(x.effdate.t) / self.proj(x.matdate.t)
# floating leg cashflows
dateflows = [(swap.rleg.effdate, 1.), (swap.rleg.matdate, -1.)] # P(T,te ) - P(T,tm )
dateflows += [(p.index.effdate, eta(p.index) - 1.) for p in swap.rleg.cp]
# fixed leg cashflows
dateflows += [(p.paydate, -strike * p.accr_cov) for p in swap.pleg.cp]
t = swap.effdate.t # option expiry (should have been swap.fixdate, but has been offset to swap.effdate)
def disc_tf(T, z): # bond price under t-forward measure, a martingale
# bond price: P(t,T)/P(s,t,T) = exp(-xi2(s,t,t,T)/2 - xi(s,t,t,T)*z), z ~ N(0,1)
xi2 = self.vol.xi2(None,t,t,T)
return self.disc(T) / self.disc(t) * np.exp(-xi2 / 2 - xi2 ** 0.5 * z)
def find_zstar(z):
return sum(cf * disc_tf(d.t,z) for d, cf in dateflows) # sum of cashflows = 0
bound = 5.0
while True:
try:
zstar = solver(find_zstar, -bound, bound) # z can be regarded as a standard normal
break
except ValueError:
print("bound exception: z=%s" % bound)
bound *= 2
return sum(cf * self.disc(d.t) * norm.cdf(-zstar - self.vol.xi2(None,t,t,d.t) ** 0.5) for d, cf in dateflows)
@classmethod
def from_calibration(cls, proj, disc, volsurf, kappa=0.0, z_fwd_date=None, tradedate=None, tenor=None, lastdate=None):
vanillas = volsurf.get_calibration_instruments(tradedate=tradedate, tenor=tenor, lastdate=lastdate)
if PRINT:
print('effdate\t matdate\t rate\t atm_value\t blackvol')
for prod in vanillas:
print('%s\t %s\t %s\t %s\t %s' % (prod.underlier.effdate, prod.underlier.matdate,
prod.forward, prod.value(), prod.stdev / prod.sqr_term))
hw = cls(proj, disc, spotdate=volsurf.spotdate, kappa=kappa, z_fwd_date =z_fwd_date) #
hw.calibrate_to_vanillas(volsurf.mode, vanillas)
if PRINT: print(hw.vol)
return hw
def calibrate_to_vanillas(self, mode, vanillas, strike=None):
if mode == 'capfloor':
hw_model_value = self.__caplet_value
elif mode == 'swaption':
hw_model_value = self.__payer_swpn_value
else:
raise BaseException('invalid calibration instruments ...')
for opt in vanillas:
if isinstance(opt, BlackLognormalModel) and opt.forward <= 0:
continue # ignore negative rates for lognormal process
t = opt.underlier.effdate.t # shifted by spotlag, rather than using fixdate
k = opt.forward if strike is None else strike
black_model_value = opt.value(strike=k) #* 1.003
def find_sigma(sigma):
self.vol[t] = sigma
return (hw_model_value(opt.underlier, strike=k) - black_model_value) ** 2
self.vol[t] = self.__minimize(find_sigma, bounds=(1e-6, 0.5)) # set to solved sigma
def finite_difference(self, nt, nx, ns):
return FDM1D(self.vol.xvol(), nt=nt, nx=nx, ns=ns, fn_abc=self.fn_abc)
def monte_carlo(self, timeline, npaths, rng='sobol', seed=None):
np.random.seed(seed)
def sample_generator(dim): # generate normal samples
if rng == 'sobol':
sobol = QuantLib.SobolRsg(dim) # not repeatable if dim > 16 or so (no idea why)
zmat = np.array([sobol.nextSequence().value() for i in range(npaths)])
zmat = norm.ppf(np.mod(zmat + np.random.uniform(size=dim), 1)).T # shifted by uniform(0,1)
for row in zmat: # yield row by row, each row is a time slice
yield row
elif rng == 'basic':
for i in range(dim):
yield np.random.standard_normal(npaths)
else:
raise BaseException('invalid random number generator ...')
tline = [d.t for d in timeline] # convert from Date's to float's
steps = len(tline) - 1 # excluding spotdate
vol = self.vol
if self.z_fwd_date is None: # risk neutral measure
sample = sample_generator(2 * steps)
x = np.empty((steps, npaths)) #each row is a time slice, each column is a path
y = np.empty_like(x)
xv = yv = 0
for i in range(steps): # generate paths
v, t = tline[i:i + 2]
phi = vol.phi(v,t,t,t) ** 0.5
psi = vol.psi(v,t,t,t) ** 0.5
r0 = vol.chi(v,t,t,t) / phi / psi # correlation
r1 = (1 - r0 ** 2) ** 0.5
z0 = next(sample)
z1 = next(sample)
yv += vol._b(v,t) * xv + psi * (r0 * z0 + r1 * z1) # update yv using old xv
xv = vol._e(v,t) * xv + phi * z0 # update xv
x[i,:] = xv
y[i,:] = yv
def fetch_state(date): # fetch the state vector @ date
if date == timeline[0]: # date is spotdate
return 0.0, 1.0
else:
s, t = timeline[0].t, date.t
i = np.searchsorted(timeline, date) - 1
spot = self.disc(s) / self.disc(t)
return x[i,:], spot * np.exp(vol.psi(s,t,t,t) / 2 + y[i,:]) # stochastic factor and numeraire
else: # z-forward measure
sample = sample_generator(steps)
x = np.empty((steps, npaths)) #each row is a time slice, each column is a path
xv = 0
for i in range(steps): # generate paths
v, t = tline[i:i + 2]
xv = vol._e(v,t) * xv + next(sample) * vol.phi(v,t,t,t) ** 0.5
x[i,:] = xv
def fetch_state(date): # fetch the state vector @ date
s, t, z = timeline[0].t, date.t, self.z_fwd_date.t
spot = self.disc(z) / self.disc(t)
if date == timeline[0]: # date is spotdate
return 0.0, spot
else:
i = np.searchsorted(timeline, date) - 1
return x[i,:], spot * np.exp(vol.xi2(s,t,t,z) / 2 - vol._b(t,z) * x[i,:]) # stochastic factor and numeraire
return fetch_state
if __name__ == '__main__':
today = Date.set_origin(16,7,2014)
d = today + 5
dd = d
vol = Volatility(startdate=d, kappa=0.0)
d += 50
vol[d] = 0.01
d += 50
vol[d] = 0.02
d += 50
vol[d] = 0.03
d += 50
vol[d] = 0.04
d += 50
vol[d] = 0.05
d += 50
vol[d] = 0.06
vol.test_volatility()
pass
| [
"[email protected]"
] | |
aa2772f0473880dd7c1525b8bb328ba123580ff4 | 1903aa0028dd91a128f1630c6eb9a1f3467ed951 | /src/icemac/addressbook/browser/conftest.py | 129602467fc097430f592c46ab552416cb159f7d | [
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"BSD-2-Clause"
] | permissive | icemac/icemac.addressbook | a5ae04feb2b2fb1f0ecc7bf3e60b1666f5bfedd8 | 6197e6e01da922feb100dd0943576523050cd703 | refs/heads/master | 2021-01-03T12:44:08.365040 | 2020-08-06T06:50:51 | 2020-08-06T06:51:05 | 242,137,124 | 2 | 0 | BSD-2-Clause | 2020-04-12T07:41:02 | 2020-02-21T12:43:45 | Python | UTF-8 | Python | false | false | 1,562 | py | # -*- coding: utf-8 -*-
from icemac.addressbook.interfaces import IPerson
import icemac.addressbook.testing
import pytest
import zope.component.hooks
# Fixtures to set-up infrastructure which are usable in tests,
# see also in ./fixtures.py (which are imported via src/../conftest.py):
@pytest.yield_fixture(scope='function')
def person_with_field_data(personWithFieldDataS):
"""Provide predefined person data, see `personWithFieldDataS`."""
for connection in icemac.addressbook.testing.pyTestStackDemoStorage(
personWithFieldDataS.zodb, 'PersonWithFieldFunction'):
yield connection
# Infrastructure fixtures
@pytest.yield_fixture(scope='session')
def personWithFieldDataS(
addressBookS, FullPersonFactory, PostalAddressFactory, KeywordFactory,
PhoneNumberFactory, EMailAddressFactory, HomepageAddressFactory,
FieldFactory):
"""Create base data used in person tests."""
for connection in icemac.addressbook.testing.pyTestStackDemoStorage(
addressBookS, 'SearchSession'):
address_book = connection.rootFolder['ab']
with zope.component.hooks.site(address_book):
field_name = FieldFactory(
address_book, IPerson, 'TextLine', u'foobar').__name__
icemac.addressbook.conftest._create_person(
address_book, FullPersonFactory, PostalAddressFactory,
KeywordFactory, PhoneNumberFactory, EMailAddressFactory,
HomepageAddressFactory, **{field_name: u'my value'})
yield connection
| [
"[email protected]"
] | |
0637e7a3438aebc0e79656f9593180645405e8d1 | fb316d849ff0616a959eb38b007d8f6ee06cd5e8 | /lib/sqlalchemy/future/engine.py | d3b13b51077df1abe20a17d1a5a1f7c0636abdef | [
"MIT"
] | permissive | miguelgrinberg/sqlalchemy | e37c04ac7cb7749ca23fdc80f9ee6e93d8e8cfad | 3d55263c92ee29a0257d823124c353a35246cf31 | refs/heads/master | 2023-09-01T13:28:51.022925 | 2020-05-19T22:55:16 | 2020-05-19T22:56:11 | 265,706,107 | 6 | 0 | MIT | 2020-05-20T23:16:34 | 2020-05-20T23:16:33 | null | UTF-8 | Python | false | false | 16,535 | py | from .. import util
from ..engine import Connection as _LegacyConnection
from ..engine import create_engine as _create_engine
from ..engine import Engine as _LegacyEngine
from ..engine.base import OptionEngineMixin
NO_OPTIONS = util.immutabledict()
def create_engine(*arg, **kw):
"""Create a new :class:`_future.Engine` instance.
Arguments passed to :func:`_future.create_engine` are mostly identical
to those passed to the 1.x :func:`_sa.create_engine` function.
The difference is that the object returned is the :class:`._future.Engine`
which has the 2.0 version of the API.
"""
kw["_future_engine_class"] = Engine
return _create_engine(*arg, **kw)
class Connection(_LegacyConnection):
"""Provides high-level functionality for a wrapped DB-API connection.
**This is the SQLAlchemy 2.0 version** of the :class:`_engine.Connection`
class. The API and behavior of this object is largely the same, with the
following differences in behavior:
* The result object returned for results is the :class:`_engine.Result`
object. This object has a slightly different API and behavior than the
prior :class:`_engine.CursorResult` object.
* The object has :meth:`_future.Connection.commit` and
:meth:`_future.Connection.rollback` methods which commit or roll back
the current transaction in progress, if any.
* The object features "autobegin" behavior, such that any call to
:meth:`_future.Connection.execute` will
unconditionally start a
transaction which can be controlled using the above mentioned
:meth:`_future.Connection.commit` and
:meth:`_future.Connection.rollback` methods.
* The object does not have any "autocommit" functionality. Any SQL
statement or DDL statement will not be followed by any COMMIT until
the transaction is explicitly committed, either via the
:meth:`_future.Connection.commit` method, or if the connection is
being used in a context manager that commits such as the one
returned by :meth:`_future.Engine.begin`.
* The SAVEPOINT method :meth:`_future.Connection.begin_nested` returns
a :class:`_engine.NestedTransaction` as was always the case, and the
savepoint can be controlled by invoking
:meth:`_engine.NestedTransaction.commit` or
:meth:`_engine.NestedTransaction.rollback` as was the case before.
However, this savepoint "transaction" is not associated with the
transaction that is controlled by the connection itself; the overall
transaction can be committed or rolled back directly which will not emit
any special instructions for the SAVEPOINT (this will typically have the
effect that one desires).
* There are no "nested" connections or transactions.
"""
_is_future = True
def _branch(self):
raise NotImplementedError(
"sqlalchemy.future.Connection does not support "
"'branching' of new connections."
)
def begin(self):
"""Begin a transaction prior to autobegin occurring.
The :meth:`_future.Connection.begin` method in SQLAlchemy 2.0 begins a
transaction that normally will be begun in any case when the connection
is first used to execute a statement. The reason this method might be
used would be to invoke the :meth:`_events.ConnectionEvents.begin`
event at a specific time, or to organize code within the scope of a
connection checkout in terms of context managed blocks, such as::
with engine.connect() as conn:
with conn.begin():
conn.execute(...)
conn.execute(...)
with conn.begin():
conn.execute(...)
conn.execute(...)
The above code is not fundamentally any different in its behavior than
the following code which does not use
:meth:`_future.Connection.begin`::
with engine.connect() as conn:
conn.execute(...)
conn.execute(...)
conn.commit()
conn.execute(...)
conn.execute(...)
conn.commit()
In both examples, if an exception is raised, the transaction will not
be committed. An explicit rollback of the transaction will occur,
including that the :meth:`_events.ConnectionEvents.rollback` event will
be emitted, as connection's context manager will call
:meth:`_future.Connection.close`, which will call
:meth:`_future.Connection.rollback` for any transaction in place
(excluding that of a SAVEPOINT).
From a database point of view, the :meth:`_future.Connection.begin`
method does not emit any SQL or change the state of the underlying
DBAPI connection in any way; the Python DBAPI does not have any
concept of explicit transaction begin.
:return: a :class:`_engine.Transaction` object. This object supports
context-manager operation which will commit a transaction or
emit a rollback in case of error.
. If this event is not being used, then there is
no real effect from invoking :meth:`_future.Connection.begin` ahead
of time as the Python DBAPI does not implement any explicit BEGIN
The returned object is an instance of :class:`_engine.Transaction`.
This object represents the "scope" of the transaction,
which completes when either the :meth:`_engine.Transaction.rollback`
or :meth:`_engine.Transaction.commit` method is called.
Nested calls to :meth:`_future.Connection.begin` on the same
:class:`_future.Connection` will return new
:class:`_engine.Transaction` objects that represent an emulated
transaction within the scope of the enclosing transaction, that is::
trans = conn.begin() # outermost transaction
trans2 = conn.begin() # "nested"
trans2.commit() # does nothing
trans.commit() # actually commits
Calls to :meth:`_engine.Transaction.commit` only have an effect when
invoked via the outermost :class:`_engine.Transaction` object, though
the :meth:`_engine.Transaction.rollback` method of any of the
:class:`_engine.Transaction` objects will roll back the transaction.
.. seealso::
:meth:`_future.Connection.begin_nested` - use a SAVEPOINT
:meth:`_future.Connection.begin_twophase` -
use a two phase /XID transaction
:meth:`_future.Engine.begin` - context manager available from
:class:`_future.Engine`
"""
return super(Connection, self).begin()
def begin_nested(self):
"""Begin a nested transaction and return a transaction handle.
The returned object is an instance of
:class:`_engine.NestedTransaction`.
Nested transactions require SAVEPOINT support in the
underlying database. Any transaction in the hierarchy may
``commit`` and ``rollback``, however the outermost transaction
still controls the overall ``commit`` or ``rollback`` of the
transaction of a whole.
In SQLAlchemy 2.0, the :class:`_engine.NestedTransaction` remains
independent of the :class:`_future.Connection` object itself. Calling
the :meth:`_future.Connection.commit` or
:meth:`_future.Connection.rollback` will always affect the actual
containing database transaction itself, and not the SAVEPOINT itself.
When a database transaction is committed, any SAVEPOINTs that have been
established are cleared and the data changes within their scope is also
committed.
.. seealso::
:meth:`_future.Connection.begin`
"""
return super(Connection, self).begin_nested()
def commit(self):
"""Commit the transaction that is currently in progress.
This method commits the current transaction if one has been started.
If no transaction was started, the method has no effect, assuming
the connection is in a non-invalidated state.
A transaction is begun on a :class:`_future.Connection` automatically
whenever a statement is first executed, or when the
:meth:`_future.Connection.begin` method is called.
.. note:: The :meth:`_future.Connection.commit` method only acts upon
the primary database transaction that is linked to the
:class:`_future.Connection` object. It does not operate upon a
SAVEPOINT that would have been invoked from the
:meth:`_future.Connection.begin_nested` method; for control of a
SAVEPOINT, call :meth:`_engine.NestedTransaction.commit` on the
:class:`_engine.NestedTransaction` that is returned by the
:meth:`_future.Connection.begin_nested` method itself.
"""
if self._transaction:
self._transaction.commit()
def rollback(self):
"""Roll back the transaction that is currently in progress.
This method rolls back the current transaction if one has been started.
If no transaction was started, the method has no effect. If a
transaction was started and the connection is in an invalidated state,
the transaction is cleared using this method.
A transaction is begun on a :class:`_future.Connection` automatically
whenever a statement is first executed, or when the
:meth:`_future.Connection.begin` method is called.
.. note:: The :meth:`_future.Connection.rollback` method only acts
upon the primary database transaction that is linked to the
:class:`_future.Connection` object. It does not operate upon a
SAVEPOINT that would have been invoked from the
:meth:`_future.Connection.begin_nested` method; for control of a
SAVEPOINT, call :meth:`_engine.NestedTransaction.rollback` on the
:class:`_engine.NestedTransaction` that is returned by the
:meth:`_future.Connection.begin_nested` method itself.
"""
if self._transaction:
self._transaction.rollback()
def close(self):
"""Close this :class:`_future.Connection`.
This has the effect of also calling :meth:`_future.Connection.rollback`
if any transaction is in place.
"""
super(Connection, self).close()
def execute(self, statement, parameters=None, execution_options=None):
r"""Executes a SQL statement construct and returns a
:class:`_engine.Result`.
:param object: The statement to be executed. This is always
an object that is in both the :class:`_expression.ClauseElement` and
:class:`_expression.Executable` hierarchies, including:
* :class:`_expression.Select`
* :class:`_expression.Insert`, :class:`_expression.Update`,
:class:`_expression.Delete`
* :class:`_expression.TextClause` and
:class:`_expression.TextualSelect`
* :class:`_schema.DDL` and objects which inherit from
:class:`_schema.DDLElement`
:param parameters: parameters which will be bound into the statment.
This may be either a dictionary of parameter names to values,
or a mutable sequence (e.g. a list) of dictionaries. When a
list of dictionaries is passed, the underlying statement execution
will make use of the DBAPI ``cursor.executemany()`` method.
When a single dictionary is passed, the DBAPI ``cursor.execute()``
method will be used.
:param execution_options: optional dictionary of execution options,
which will be associated with the statement execution. This
dictionary can provide a subset of the options that are accepted
by :meth:`_future.Connection.execution_options`.
:return: a :class:`_engine.Result` object.
"""
return self._execute_20(
statement, parameters, execution_options or NO_OPTIONS
)
def scalar(self, statement, parameters=None, execution_options=None):
r"""Executes a SQL statement construct and returns a scalar object.
This method is shorthand for invoking the
:meth:`_engine.Result.scalar` method after invoking the
:meth:`_future.Connection.execute` method. Parameters are equivalent.
:return: a scalar Python value representing the first column of the
first row returned.
"""
return self.execute(statement, parameters, execution_options).scalar()
class Engine(_LegacyEngine):
"""Connects a :class:`_pool.Pool` and
:class:`_engine.Dialect` together to provide a
source of database connectivity and behavior.
**This is the SQLAlchemy 2.0 version** of the :class:`~.engine.Engine`.
An :class:`.future.Engine` object is instantiated publicly using the
:func:`~sqlalchemy.future.create_engine` function.
.. seealso::
:doc:`/core/engines`
:ref:`connections_toplevel`
"""
_connection_cls = Connection
_is_future = True
def _not_implemented(self, *arg, **kw):
raise NotImplementedError(
"This method is not implemented for SQLAlchemy 2.0."
)
transaction = (
run_callable
) = (
execute
) = (
scalar
) = (
_execute_clauseelement
) = _execute_compiled = table_names = has_table = _not_implemented
def _run_ddl_visitor(self, visitorcallable, element, **kwargs):
# TODO: this is for create_all support etc. not clear if we
# want to provide this in 2.0, that is, a way to execute SQL where
# they aren't calling "engine.begin()" explicitly, however, DDL
# may be a special case for which we want to continue doing it this
# way. A big win here is that the full DDL sequence is inside of a
# single transaction rather than COMMIT for each statment.
with self.begin() as conn:
conn._run_ddl_visitor(visitorcallable, element, **kwargs)
@classmethod
def _future_facade(self, legacy_engine):
return Engine(
legacy_engine.pool,
legacy_engine.dialect,
legacy_engine.url,
logging_name=legacy_engine.logging_name,
echo=legacy_engine.echo,
hide_parameters=legacy_engine.hide_parameters,
execution_options=legacy_engine._execution_options,
)
def begin(self):
"""Return a :class:`_future.Connection` object with a transaction
begun.
Use of this method is similar to that of
:meth:`_future.Engine.connect`, typically as a context manager, which
will automatically maintain the state of the transaction when the block
ends, either by calling :meth:`_future.Connection.commit` when the
block succeeds normally, or :meth:`_future.Connection.rollback` when an
exception is raised, before propagating the exception outwards::
with engine.begin() as connection:
connection.execute(text("insert into table values ('foo')"))
.. seealso::
:meth:`_future.Engine.connect`
:meth:`_future.Connection.begin`
"""
return super(Engine, self).begin()
def connect(self):
"""Return a new :class:`_future.Connection` object.
The :class:`_future.Connection` acts as a Python context manager, so
the typical use of this method looks like::
with engine.connect() as connection:
connection.execute(text("insert into table values ('foo')"))
connection.commit()
Where above, after the block is completed, the connection is "closed"
and its underlying DBAPI resources are returned to the connection pool.
This also has the effect of rolling back any transaction that
was explicitly begun or was begun via autobegin, and will
emit the :meth:`_events.ConnectionEvents.rollback` event if one was
started and is still in progress.
.. seealso::
:meth:`_future.Engine.begin`
"""
return super(Engine, self).connect()
class OptionEngine(OptionEngineMixin, Engine):
pass
Engine._option_cls = OptionEngine
| [
"[email protected]"
] | |
b23d5173c1f776683ce83e53f7ea9ec44fa942ab | ecd6824977a83bc6d54963d1a1052bcc49dba9da | /tradingsystem/settings.py | ae08f05341ed42a42dfc0b080cbcdca167170621 | [] | no_license | jinoespinola/tradingsystem | 5d00a7a2bf94343122abd98579bb979309111632 | 918d40e8acf60a0e9edcfd924e2bccc3b7f7bc25 | refs/heads/master | 2023-08-23T09:44:03.277119 | 2021-10-26T12:23:11 | 2021-10-26T12:23:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,833 | py | """
Django settings for tradingsystem project.
Generated by 'django-admin startproject' using Django 3.2.8.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-u@f8g@()fyyr%_v%xft0b34!oc^(mq2yn7h_62+8(a(wko-cwe'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'rest_framework.authtoken',
'tradingsystem',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
#'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'tradingsystem.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'tradingsystem.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
REST_FRAMEWORK = {
# Use Django's standard `django.contrib.auth` permissions,
# or allow read-only access for unauthenticated users.
'DEFAULT_PERMISSION_CLASSES': [
'rest_framework.permissions.DjangoModelPermissionsOrAnonReadOnly',
#'rest_framework.permissions.DjangoModelPermissions',
#'rest_framework.authentication.TokenAuthentication',
],
'DEFAULT_AUTHENTICATION_CLASSES': (
'rest_framework.authentication.TokenAuthentication',
),
} | [
"[email protected] config --global user.name Jino"
] | [email protected] config --global user.name Jino |
3836078788f27000967c875cd7431a23997e0982 | a92d2add8aea04da46f94c6ab1f0d1af0ee843aa | /qytPytorch/core/vocabulary.py | e782a4307915dcb2e6adf825a09703a3f0f96784 | [
"Apache-2.0"
] | permissive | q759729997/qytPytorch | 34280445fb527b76b99b7b03c9f74eac34182d96 | b9b4b6aeff67596c493871c0842dc72c5b66c548 | refs/heads/master | 2021-03-22T19:05:46.089603 | 2020-04-19T15:39:37 | 2020-04-19T15:39:37 | 247,394,065 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,261 | py | """
module(vocabulary) - 词典模块.
Main members:
# get_tokenized_vocab - 构造词典.
# save_vocab_words - 保存词典中的单词与单词id.
"""
import codecs
import collections
import torchtext.vocab as Vocab
def get_tokenized_vocab(tokenized_data, min_freq=5):
""" 构造词典.
@params:
tokenized_data - 分词后的数据,words为单词列表,[[words], [words]].
min_freq - 构造Vocab时的最小词频.
@return:
On success - Vocab对象.
On failure - 错误信息.
"""
tokenized_counter = collections.Counter([tk for st in tokenized_data for tk in st])
return Vocab.Vocab(tokenized_counter, min_freq=5)
def save_vocab_words(vocab_obj, file_name):
""" 保存词典中的单词与单词id.
@params:
vocab_obj - Vocab对象.
file_name - 文件名称.
@return:
On success - Vocab对象.
On failure - 错误信息.
"""
with codecs.open(file_name, mode='w', encoding='utf8') as fw:
for word in vocab_obj.itos:
word_id = vocab_obj.stoi[word]
fw.write('{}\t{}\n'.format(word, word_id))
| [
"[email protected]"
] | |
9e6dafe244592ad4dbf36d2cc49cc8d03283b10c | d0fc62123774a7b0ce592f5b137b4e08702fc9ae | /Huffman_coding/asgi.py | ab8993a5da42fadbca325633c019f2abff099555 | [] | no_license | dheerajk7/Text-Compression | 4fb324db9ae444cfc72d3ebfc12522d19dcb60d3 | 667f40ea1c9b981f74c7709caeb53bd4c552d718 | refs/heads/master | 2022-09-01T01:14:10.611806 | 2020-05-27T04:48:08 | 2020-05-27T04:48:08 | 255,061,674 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | """
ASGI config for Huffman_coding project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Huffman_coding.settings')
application = get_asgi_application()
| [
"[email protected]"
] | |
cca60f5d6dbbaffd9037eaa7163785fe3df4a6d8 | 81f2560004d0c654ed228bc3848f04afdc64b3a5 | /gui/graphicItems/commandWidgets/smallGenericCommand.py | 873c51b32344dc376be2c7256b2f4a852162560f | [] | no_license | timoButtenbruck/microRay | d9ba6988315bbd474d355cae4a2e1d5ed7e0d83e | 811afb79c712038c86795a8b0d3b6d9c5c2f43f2 | refs/heads/master | 2023-06-26T06:15:53.196082 | 2021-07-22T11:43:48 | 2021-07-22T11:43:48 | 388,088,064 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 24,951 | py | # -*- encoding: utf-8 -*-
import time
from PyQt4 import QtGui, QtCore
from core.command import Command
from core.engineeringNotation.engineeringNotation import EngNumber
from baseCommand import BaseCommand
from gui.graphicItems.floatValidator import FloatValidator
from gui.graphicItems.lineEditDoubleClickSpecial import LineEditDoubleClickSpecial
from gui.graphicItems.listWidgetSpecial import ListWidgetSpecial
from gui.graphicItems.button import SymbolButton
from gui.graphicItems.commandWidgets.smallGenericCommandSettingsWindow import SmallGenericCommandSettingsWindow
from gui.constants import *
class SmallGenericCommand(BaseCommand):
def __init__(self, command):
super(SmallGenericCommand, self).__init__(command)
self.command.inputMethodChanged.connect(self.actualizeInputMethod)
self.width = 370
self.height = 70
self.hValueSpace = 100
self.vValueSpace = 25
self.labelAreaHeight = 30
self.editAreaHeight = self.height - self.labelAreaHeight
self.editAreaHCenter = self.labelAreaHeight + 0.5 * self.editAreaHeight
self.separatingLinePath = QtGui.QPainterPath()
self.separatingLinePath.moveTo(1, self.labelAreaHeight)
self.separatingLinePath.lineTo(self.width - 2, self.labelAreaHeight)
self.commandNameFont = QtGui.QFont("sans-serif", 12, QtGui.QFont.Bold)
self.otherFont = QtGui.QFont("sans-serif", 12)
self.redFont = QtGui.QFont("sans-serif", 12)
self.blackPen = QtGui.QPen(QtCore.Qt.black)
self.valueLineEdit = self._layoutLineEdit(LineEditDoubleClickSpecial())
self.historyListWidget = ListWidgetSpecial()
listWidgetProxy = QtGui.QGraphicsProxyWidget()
listWidgetProxy.setWidget(self.historyListWidget)
self.historyListWidget.closeMe.connect(self.closeHistory)
self.historyListWidget.valueSelected.connect(self.setFromHistory)
self.historyListWidget.hide()
self.pendingButton = SymbolButton(SymbolButton.TEXT, parent=self)
self.pendingButton.setPos(55, self.editAreaHCenter - 0.5 * self.pendingButton.boundingRect().height())
self.pendingButton.clicked.connect(self.togglePendingMode)
if self.command.getPendingSendMode() is True:
self.pendingButton.symbol.setColor(QtCore.Qt.red)
else:
self.pendingButton.symbol.setColor(QtCore.Qt.darkGray)
self.pendingButton.drawBorder = False
self.pendingButton.symbol.setText(u"P")
self.toggleButton = SymbolButton(SymbolButton.TEXT, parent=self)
self.toggleButton.setPos(85, self.editAreaHCenter - 0.5 * self.pendingButton.boundingRect().height())
self.toggleButton.clicked.connect(self.switchToMaxAndThenToMin)
self.toggleButton.symbol.setText(u"T")
self.toggleButton.hide()
# self.toggleButton.drawBorder = False
borderPen = QtGui.QPen()
borderPen.setColor(QtGui.QColor(0, 0, 0))
borderPen.setCosmetic(True)
borderPen.setWidth(1)
self.toggleButton.borderPen = borderPen
self.switchButton = SymbolButton(SymbolButton.TEXT, parent=self)
self.switchButton.setPos(85, self.editAreaHCenter - 0.5 * self.switchButton.boundingRect().height())
self.switchButton.clicked.connect(self.toggleMaxAndMin)
self.switchButton.hide()
# self.switchButton.drawBorder = False
self.switchButton.borderPen = borderPen
self.switchBoxState = False
self.switchButton.symbol.setText(u"1")
if self.command.valueOfLastResponse > 0.5:
self.switchBoxState = True
self.switchButton.symbol.setText(u"0")
# horizonal Slider implementierung
self.horizontalSlider = QtGui.QSlider()
self.horizontalSliderProxy = QtGui.QGraphicsProxyWidget(self)
self.horizontalSliderProxy.setWidget(self.horizontalSlider)
self.horizontalSlider.setGeometry(QtCore.QRect(85, self.editAreaHCenter - 0.5 * self.switchButton.boundingRect().height(), 110, 28))
self.horizontalSlider.setOrientation(QtCore.Qt.Horizontal)
self.horizontalSlider.setMinimum(0)
self.horizontalSlider.setMaximum(100)
self.horizontalSlider.sliderReleased.connect(self.updateSlider)
self.sliderLowerLimit = 0
self.sliderUpperLimit = 100
self.horizontalSlider.hide()
# the order of initializing the proxies affects the tab order
self.valueLineEditProxy = QtGui.QGraphicsProxyWidget(self)
self.valueLineEditProxy.setWidget(self.valueLineEdit)
self.commandNameLabelRect = QtCore.QRectF(10, 0, self.width - 50, self.labelAreaHeight)
self.inputLabelRect = QtCore.QRectF(10, self.labelAreaHeight, 50, self.editAreaHeight)
self.pendingLabelRect = QtCore.QRectF(55, self.labelAreaHeight, 10, self.editAreaHeight)
self.returnLabelRect = QtCore.QRectF(205, self.labelAreaHeight, 50, self.editAreaHeight)
self.returnValueRect = QtCore.QRectF(self.width - 10 - self.hValueSpace,
self.editAreaHCenter - 0.5 * self.vValueSpace,
self.hValueSpace,
self.vValueSpace)
self.valueLineEdit.move(85, self.editAreaHCenter - 0.5 * self.valueLineEdit.height())
self.valueLineEditValidator = FloatValidator()
self.valueLineEdit.setValidator(self.valueLineEditValidator)
# self.valueLineEdit.setText(str(self.command.getValue()))
self.valueLineEdit.editingFinished.connect(self.valueEditingFinished)
self.valueLineEdit.returnPressed.connect(self.valueEditingReturnPressed)
self.valueLineEdit.downArrowPressed.connect(self.showHistory)
self.valueLineEdit.setAttribute(QtCore.Qt.WA_TranslucentBackground)
self.valueLineEdit.doubleClick.connect(self.showHistory)
self.userInputWarningStyleSheet = """.LineEditDoubleClickSpecial {
background-color: orange;
border: 1px solid black; }"""
self.normalStyleSheet = """.LineEditDoubleClickSpecial {
background-color: white;
border: 1px solid black; }"""
self.valuePendingStyleSheet = """.LineEditDoubleClickSpecial {
border: 3px solid red; }"""
self.valueLineEdit.setStyleSheet(self.normalStyleSheet)
self.valueLineEdit.hide()
self.settingsButton = SymbolButton(SymbolButton.SETTINGS, self)
self.settingsButton.setPos(self.width - self.settingsButton.boundingRect().width() - 5, 2)
self.settingsButton.drawBorder = False
self.settingsButton.clicked.connect(self.showSettingsWindow)
self.settingsWindow = SmallGenericCommandSettingsWindow()
self.settingsWindow.lineEditMin.setValidator(FloatValidator())
self.settingsWindow.lineEditMax.setValidator(FloatValidator())
self.onePixelGrayPen = QtGui.QPen()
self.onePixelGrayPen.setWidth(1)
# self.onePixelGrayPen.setCosmetic(True)
self.onePixelGrayPen.setColor(QtGui.QColor(50, 50, 50))
self.onePixelLightGrayPen = QtGui.QPen()
self.onePixelLightGrayPen.setWidth(1)
self.onePixelLightGrayPen.setCosmetic(True)
self.onePixelLightGrayPen.setColor(QtGui.QColor(200, 200, 200))
# self.onePixelLightGrayPen.setColor(QtCore.Qt.black)
self.pendingValuePen = QtGui.QPen()
self.pendingValuePen.setColor(PENDING_VALUE_COLOR)
self.pendingValuePenGray = QtGui.QPen()
self.pendingValuePenGray.setColor(QtCore.Qt.darkGray)
self.boundingRectPath = QtGui.QPainterPath()
self.boundingRectPath.addRect(0, 0, 200, self.height)
self.headerAreaPath = QtGui.QPainterPath()
self.headerAreaPath.addRect(0, 0, self.width, self.labelAreaHeight)
self.headerAreaBrush = QtGui.QBrush(QtGui.QColor(0, 153, 153, 50))
self.editAreaPath = QtGui.QPainterPath()
self.editAreaPath.addRect(0, self.labelAreaHeight, self.width, self.editAreaHeight)
self.editAreaBrush = QtGui.QBrush(QtGui.QColor(0, 153, 250, 30))
self.outerPath = QtGui.QPainterPath()
self.outerPath.moveTo(0, 0)
self.outerPath.lineTo(self.width, 0)
self.outerPath.lineTo(self.width, self.height)
self.outerPath.lineTo(0, self.height)
self.outerPath.closeSubpath()
self.returnValueRectPath = QtGui.QPainterPath()
self.returnValueRectPath.addRect(self.returnValueRect)
self.littleTimer = QtCore.QTimer()
self.littleTimer.setSingleShot(True)
self.littleTimer.timeout.connect(self.switchToMin)
self.actualizeInputMethod()
def showHistory(self):
# print self.command.history
self.historyListWidget.clear()
for aValueString in self.command.history:
self.historyListWidget.addItem(str(aValueString))
self.historyListWidget.show()
self.historyListWidget.move(self.valueLineEdit.pos().x() + 0, self.valueLineEdit.pos().y() + self.valueLineEdit.height())
self.historyListWidget.setFocus(QtCore.Qt.PopupFocusReason)
self.historyListWidget.setCurrentRow(0)
def closeHistory(self):
self.historyListWidget.hide()
def setFromHistory(self, value):
self.command.setValue(value)
self.historyListWidget.hide()
def _layoutLineEdit(self, lineEdit):
lineEdit.setFixedSize(self.hValueSpace, self.vValueSpace)
p = QtGui.QPalette()
p.setBrush(QtGui.QPalette.Window, QtGui.QBrush(QtGui.QColor(0,0,0,0)))
# label.setStyleSheet("""border: none; background-color: rgba(0, 0, 0, 0);""") #; margin-top: 8px """)
lineEdit.setPalette(p)
# lineEdit.move(self.width - 10 - self.valueLineEdit.width(), self.labelAreaHeight + 0.5 * self.editAreaHeight - 0.5 * self.valueLineEdit.height())
# lineEdit.setText("0.0")
# lineEdit.setMaxLength(6)
lineEdit.setFont(QtGui.QFont("sans-serif", 12))
return lineEdit
def switchToOneAndThenToZero(self):
self.command.setValue(1.0)
self.toggleButton.symbol.setText(u"0")
self.littleTimer.start(100)
def switchToZero(self):
self.command.setValue(0.0)
self.toggleButton.symbol.setText(u"T")
def toggleOneAndZero(self):
if self.switchBoxState is False:
self.command.setValue(1.0)
self.switchButton.symbol.setText(u"0")
else:
self.command.setValue(0.0)
self.switchButton.symbol.setText(u"1")
self.switchBoxState = not self.switchBoxState
def switchToMaxAndThenToMin(self):
self.command.setValue(self.command.getUpperLimit())
self.toggleButton.symbol.setText(u"0")
self.littleTimer.start(100)
def switchToMin(self):
self.command.setValue(self.command.getLowerLimit())
self.toggleButton.symbol.setText(u"T")
def toggleMaxAndMin(self):
if self.switchBoxState is False:
self.command.setValue(self.command.getUpperLimit())
self.switchButton.symbol.setText(u"0")
else:
self.command.setValue(self.command.getLowerLimit())
self.switchButton.symbol.setText(u"1")
self.switchBoxState = not self.switchBoxState
def showSettingsWindow(self):
self.settingsWindow.lineEditDisplayName.setText(self.command.displayName)
self.settingsWindow.lineEditMin.setText(str(self.command.getLowerLimit()))
self.settingsWindow.lineEditMax.setText(str(self.command.getUpperLimit()))
if self.command.getInputMethod() == Command.VALUE_INPUT:
self.settingsWindow.radioButtonValueMode.setChecked(True)
elif self.command.getInputMethod() == Command.TOGGLE_INPUT:
self.settingsWindow.radioButtonToggleMode.setChecked(True)
elif self.command.getInputMethod() == Command.SWITCH_INPUT:
self.settingsWindow.radioButtonSwitchMode.setChecked(True)
elif self.command.getInputMethod() == Command.SLIDER_INPUT:
self.settingsWindow.radioButtonSliderMode.setChecked(True)
self.settingsWindow.checkBoxPendingMode.setChecked(self.command.getPendingSendMode())
if self.settingsWindow.exec_() == QtGui.QDialog.Accepted:
self.setSettingsFromSettingsWindow()
else:
pass
def setSettingsFromSettingsWindow(self):
self.valueLineEdit.hide()
self.toggleButton.hide()
self.switchButton.hide()
self.horizontalSlider.hide()
self.command.displayName = unicode(self.settingsWindow.lineEditDisplayName.text())
self.command.setPendingSendMode(self.settingsWindow.checkBoxPendingMode.isChecked())
minText = self.settingsWindow.lineEditMin.text()
self.setMinimum(minText)
maxText = self.settingsWindow.lineEditMax.text()
self.setMaximum(maxText)
if self.settingsWindow.radioButtonValueMode.isChecked() is True:
self.valueLineEdit.show()
self.command.setInputMethod(Command.VALUE_INPUT)
if self.settingsWindow.radioButtonSwitchMode.isChecked() is True:
self.switchButton.show()
self.command.setInputMethod(Command.SWITCH_INPUT)
if self.settingsWindow.radioButtonToggleMode.isChecked() is True:
self.toggleButton.show()
self.command.setInputMethod(Command.TOGGLE_INPUT)
if self.settingsWindow.radioButtonSliderMode.isChecked() is True:
self.horizontalSlider.show()
self.command.setInputMethod(Command.SLIDER_INPUT)
self.updateSliderRange()
self.update()
def updateSliderRange(self):
low = self.command.getLowerLimit()
high = self.command.getUpperLimit()
self.horizontalSlider.setMinimum(low)
self.horizontalSlider.setMaximum(high)
self.horizontalSlider.setValue(low)
self.command.setValue(self.horizontalSlider.value())
# if low < 0 and high > 0:
# self.sliderLowerLimit = -50
# self.sliderUpperLimit = 50
# elif low == 0 and high > 0:
# self.sliderLowerLimit = 0
# self.sliderUpperLimit = 100
# elif low < 0 and high == 0:
# self.sliderLowerLimit = -100
# self.sliderUpperLimit = 0
# elif low < 0 and high < 0:
# self.sliderLowerLimit = -101
# self.sliderUpperLimit = -1
# elif low > 0 and high > 0:
# self.sliderLowerLimit = 1
# self.sliderUpperLimit = 101
# self.horizontalSlider.setMinimum(self.sliderLowerLimit)
# self.horizontalSlider.setMaximum(self.sliderUpperLimit)
self.update()
def updateSlider(self):
# lowerLimit = self.command.getLowerLimit()
# upperLimit = self.command.getUpperLimit()
# limitDif = upperLimit - lowerLimit
#
# sliderDif = self.sliderUpperLimit - self.sliderLowerLimit
#
# self.horizontalSlider.setTickInterval(100)
# self.command.setValue(self.horizontalSlider.value() * limitDif / sliderDif)
self.command.setValue(self.horizontalSlider.value())
def actualizeInputMethod(self):
self.valueLineEdit.hide()
self.switchButton.hide()
self.toggleButton.hide()
self.horizontalSlider.hide()
if self.command.getInputMethod() == Command.VALUE_INPUT:
self.valueLineEdit.show()
if self.command.getInputMethod() == Command.SWITCH_INPUT:
self.switchButton.show()
if self.command.getInputMethod() == Command.TOGGLE_INPUT:
self.toggleButton.show()
if self.command.getInputMethod() == Command.SLIDER_INPUT:
self.horizontalSlider.show()
# TODO move to settings window
def setMinimum(self, text):
if text == "":
min = 0
else:
min = float(text)
if min > self.command.getUpperLimit():
self.command.setLowerLimit(self.command.getUpperLimit(), self)
else:
self.command.setLowerLimit(min, self)
def setMaximum(self, text):
if text == "":
max = 0
else:
max = float(text)
if max < self.command.getLowerLimit():
self.command.setUpperLimit(self.command.getLowerLimit(), self)
else:
self.command.setUpperLimit(max, self)
def valueEditingFinished(self):
if self.command.getPendingValue() is None and self.valueLineEdit.hasFocus() is False:
self.valueLineEdit.clear()
self.valueLineEdit.setStyleSheet(self.normalStyleSheet)
def valueEditingReturnPressed(self):
text = self.valueLineEdit.text()
# print "command given", text
self.command.clearPendingValue()
# if nothing is in the textBox, the lower limit of the command will be set to the text box but not send
if len(text) is 0:
# self.valueLineEdit.setText(str(self.command.getLowerLimit()))
# self.valueLineEdit.setCursorPosition(len(self.valueLineEdit.text()))
# self.valueLineEdit.selectAll()
self.activateUserInputWarning()
else:
# allowed for the decimal point are a comma and a dot
text = text.replace(",", ".")
number = float(text)
if number < self.command.getLowerLimit():
self.valueLineEdit.setText(str(self.command.getLowerLimit()))
self.valueLineEdit.setCursorPosition(len(self.valueLineEdit.text()))
# self.valueLineEdit.selectAll()
self.activateUserInputWarning()
elif number > self.command.getUpperLimit():
self.valueLineEdit.setText(str(self.command.getUpperLimit()))
self.valueLineEdit.setCursorPosition(len(self.valueLineEdit.text()))
# self.valueLineEdit.selectAll()
self.activateUserInputWarning()
else:
self.command.setValue(number, self)
self.clearUserInputWarning()
self.valueLineEdit.clear()
if self.command.getPendingSendMode() is True:
self.valueLineEdit.setStyleSheet(self.valuePendingStyleSheet)
self.valueLineEdit.setText(str(self.command.getPendingValue()))
# TODO how to set color back to black
# if self.command.getPendingSendMode() is True:
# self.valueLineEdit.setStyleSheet(""".LineEditDoubleClickSpecial { color: red; }""")
# self.valueLineEdit.setText(str(self.command.getValue()))
# self.valueLineEdit.setCursorPosition(0)
# self.valueLineEdit.selectAll()
def valueChangedPerWidget(self, widgetInstance):
pass
def minChangedPerWidget(self, widgetInstance):
pass
def maxChangedPerWidget(self, widgetInstance):
pass
def togglePendingMode(self):
if self.command.getPendingSendMode() is True:
self.command.setPendingSendMode(False)
else:
self.command.setPendingSendMode(True)
@QtCore.pyqtSlot()
def pendingModeChanged(self, command=None):
super(SmallGenericCommand, self).pendingModeChanged(command)
if self.command.getPendingSendMode() is False:
self.valueLineEdit.setStyleSheet(self.normalStyleSheet)
self.valueLineEdit.clear()
self.pendingButton.symbol.setColor(QtCore.Qt.darkGray)
else:
self.pendingButton.symbol.setColor(QtCore.Qt.red)
@QtCore.pyqtSlot()
def pendingValueCanceled(self, command=None):
super(SmallGenericCommand, self).pendingValueCanceled(command)
self.valueLineEdit.clear()
self.valueLineEdit.setStyleSheet(self.normalStyleSheet)
def sameValueReceived(self):
super(SmallGenericCommand, self).sameValueReceived()
# self.returnValueDisplay.setStyleSheet(
# """.LineEditDoubleClickSpecial { background-color: lightgray;
# border-style: solid;
# border-color: black; }""")
self.update()
# overwrites method of super class
def differentValueReceived(self):
# this call is needed to start the blink timer
super(SmallGenericCommand, self).differentValueReceived()
# self.returnValueDisplay.setStyleSheet(
# """.LineEditDoubleClickSpecial { background-color: red;
# border-style: solid;
# border-color: black; }""")
# self.valueLineEdit.setText(str(self.command.getValue()))
# self.valueLineEdit.setCursorPosition(0)
def activateUserInputWarning(self):
super(SmallGenericCommand, self).activateUserInputWarning()
self.valueLineEdit.setStyleSheet(self.userInputWarningStyleSheet)
def clearUserInputWarning(self):
super(SmallGenericCommand, self).clearUserInputWarning()
self.valueLineEdit.setStyleSheet(self.normalStyleSheet)
def paint(self, QPainter, QStyleOptionGraphicsItem, QWidget_widget=None):
QPainter.setRenderHint(QtGui.QPainter.Antialiasing, True)
# draw background of the label
QPainter.fillPath(self.headerAreaPath, self.headerAreaBrush)
# draw background of edit area
QPainter.fillPath(self.editAreaPath, self.editAreaBrush)
# # draw a warning
# if self.showUserInputWarning is True:
# QPainter.fillPath(self.editAreaPath, self.userInputWarningBrush)
# # draw a warning
# if self.showCommFailureWarning is True:
# QPainter.fillPath(self.editAreaPath, self.commFailureWarningBrush)
# draw background of return value
QPainter.fillPath(self.returnValueRectPath, QtGui.QBrush(QtGui.QColor(200, 200, 200)))
# draw a warning
if self.showDifferentValueReceivedWarning is True:
QPainter.fillRect(self.returnValueRect, self.differentValueReceivedWarningBrush)
# draw return value
QPainter.setFont(self.otherFont)
# engNumber = EngNumber(self.command.valueOfLastResponse)
# numberToShow = "".format(engNumber.__repr__())
# if isinstance(self.command.valueOfLastResponse, float):
# numberToShow = u"{:0.2E}".format(self.command.valueOfLastResponse)
# if isinstance(self.command.valueOfLastResponse, int):
# numberToShow = u"{}".format(self.command.valueOfLastResponse)
QPainter.drawText(self.returnValueRect,
QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter,
QtCore.QString(str(self.command.getValue())))
# QtCore.QString(str(self.command.valueOfLastResponse)))
# QtCore.QString(u"{:0.2e}".format(self.command.valueOfLastResponse)))
# QtCore.QString(numberToShow))
QPainter.setPen(self.blackPen)
# draw the command name
QPainter.setFont(self.commandNameFont)
if len(self.command.displayName) > 0:
QPainter.drawText(self.commandNameLabelRect,
QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter,
QtCore.QString(self.command.displayName))
else:
QPainter.drawText(self.commandNameLabelRect,
QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter,
QtCore.QString(self.command.name))
# draw input label
QPainter.setFont(self.otherFont)
QPainter.drawText(self.inputLabelRect,
QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter,
QtCore.QString(u"Input"))
# draw return value label
QPainter.setFont(self.otherFont)
QPainter.drawText(self.returnLabelRect,
QtCore.Qt.AlignLeft | QtCore.Qt.AlignVCenter,
QtCore.QString(u"aktuell"))
# draw bounding paths
QPainter.setPen(self.onePixelGrayPen)
# QPainter.drawPath(self.headerAreaPath)
# QPainter.drawPath(self.editAreaPath)
QPainter.drawPath(self.outerPath)
# draw a line to separate edit area and header area
QPainter.setRenderHint(QtGui.QPainter.Antialiasing, False)
QPainter.setPen(self.onePixelLightGrayPen)
QPainter.drawPath(self.separatingLinePath)
def boundingRect(self):
return QtCore.QRectF(0, 0, self.width, self.height)
| [
"[email protected]"
] | |
de3f99c8e32612bd00580afe644786500f42771b | 6ed034d0a5e239d7b0c528b287451409ffb4a494 | /configs/animal/2d_kpt_sview_rgb_img/topdown_heatmap/apt36k/ViTPose_large_apt36k_256x192.py | d4ae268d4c68f35ac2d757c15406706f90483d4e | [
"Apache-2.0"
] | permissive | ViTAE-Transformer/ViTPose | 8f9462bd5bc2fb3e66de31ca1d03e5a9135cb2bf | d5216452796c90c6bc29f5c5ec0bdba94366768a | refs/heads/main | 2023-05-23T16:32:22.359076 | 2023-03-01T06:42:22 | 2023-03-01T06:42:22 | 485,999,907 | 869 | 132 | Apache-2.0 | 2023-03-01T06:42:24 | 2022-04-27T01:09:19 | Python | UTF-8 | Python | false | false | 4,296 | py | _base_ = [
'../../../../_base_/default_runtime.py',
'../../../../_base_/datasets/ap10k.py'
]
evaluation = dict(interval=10, metric='mAP', save_best='AP')
optimizer = dict(
type='Adam',
lr=5e-4,
)
optimizer_config = dict(grad_clip=None)
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=0.001,
step=[170, 200])
total_epochs = 210
log_config = dict(
interval=1,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
channel_cfg = dict(
num_output_channels=17,
dataset_joints=17,
dataset_channel=[
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16],
],
inference_channel=[
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16
])
# model settings
model = dict(
type='TopDown',
pretrained=None,
backbone=dict(
type='ViT',
img_size=(256, 192),
patch_size=16,
embed_dim=1024,
depth=24,
num_heads=16,
ratio=1,
use_checkpoint=False,
mlp_ratio=4,
qkv_bias=True,
drop_path_rate=0.3,
),
keypoint_head=dict(
type='TopdownHeatmapSimpleHead',
in_channels=1024,
num_deconv_layers=2,
num_deconv_filters=(256, 256),
num_deconv_kernels=(4, 4),
extra=dict(final_conv_kernel=1, ),
out_channels=channel_cfg['num_output_channels'],
loss_keypoint=dict(type='JointsMSELoss', use_target_weight=True)),
train_cfg=dict(),
test_cfg=dict(
flip_test=True,
post_process='default',
shift_heatmap=True,
modulate_kernel=11))
data_cfg = dict(
image_size=[192, 256],
heatmap_size=[48, 64],
num_output_channels=channel_cfg['num_output_channels'],
num_joints=channel_cfg['dataset_joints'],
dataset_channel=channel_cfg['dataset_channel'],
inference_channel=channel_cfg['inference_channel'],
soft_nms=False,
nms_thr=1.0,
oks_thr=0.9,
vis_thr=0.2,
use_gt_bbox=True,
det_bbox_thr=0.0,
bbox_file='',
)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownRandomFlip', flip_prob=0.5),
dict(
type='TopDownHalfBodyTransform',
num_joints_half_body=8,
prob_half_body=0.3),
dict(
type='TopDownGetRandomScaleRotation', rot_factor=40, scale_factor=0.5),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(type='TopDownGenerateTarget', sigma=2),
dict(
type='Collect',
keys=['img', 'target', 'target_weight'],
meta_keys=[
'image_file', 'joints_3d', 'joints_3d_visible', 'center', 'scale',
'rotation', 'bbox_score', 'flip_pairs'
]),
]
val_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='TopDownAffine'),
dict(type='ToTensor'),
dict(
type='NormalizeTensor',
mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225]),
dict(
type='Collect',
keys=['img'],
meta_keys=[
'image_file', 'center', 'scale', 'rotation', 'bbox_score',
'flip_pairs'
]),
]
test_pipeline = val_pipeline
data_root = 'data/apt36k'
data = dict(
samples_per_gpu=32,
workers_per_gpu=4,
val_dataloader=dict(samples_per_gpu=32),
test_dataloader=dict(samples_per_gpu=32),
train=dict(
type='AnimalAP10KDataset',
ann_file=f'{data_root}/annotations/train_annotations_1.json',
img_prefix=f'{data_root}/',
data_cfg=data_cfg,
pipeline=train_pipeline,
dataset_info={{_base_.dataset_info}}),
val=dict(
type='AnimalAP10KDataset',
ann_file=f'{data_root}/annotations/val_annotations_1.json',
img_prefix=f'{data_root}/',
data_cfg=data_cfg,
pipeline=val_pipeline,
dataset_info={{_base_.dataset_info}}),
test=dict(
type='AnimalAP10KDataset',
ann_file=f'{data_root}/annotations/val_annotations_1.json',
img_prefix=f'{data_root}/',
data_cfg=data_cfg,
pipeline=val_pipeline,
dataset_info={{_base_.dataset_info}}),
)
| [
"[email protected]"
] | |
1318158f62eb23b882fccf35446671a7a62a59c1 | 9a2983c37b4b3982262250d2dce33b0938912c05 | /Control_Structures/Lists.py | cb012742f2575deae528446a3ebc0b3a4d2dc4ac | [] | no_license | 00Bren00/hello-world | 6d1de2f8f0b7bbdb13e4b26628429f7b447a906f | 8c1ff1c8d86dee8293715af7ea17260bd62d6806 | refs/heads/master | 2021-01-20T17:47:56.592813 | 2017-05-12T20:36:18 | 2017-05-12T20:36:18 | 90,889,917 | 0 | 0 | null | 2017-05-12T20:36:19 | 2017-05-10T17:18:54 | Python | UTF-8 | Python | false | false | 1,982 | py | str = "Hello world!"
print(str[6])#w
words = ["Hello", "world", "!"]
print(words[0])
print(words[1])
print(words[2])
nums = [1, 2, 3]
print(nums + [4, 5, 6])
print(nums * 3)#can add and multiply lists like strings
words = ["spam", "egg", "spam", "sausage"]
print("spam" in words)
print("egg" in words)
print("tomato" in words)# checks to see if the item is in the list
#true true false
#The in operator is also used to determine whether or not
# a string is a substring of another string.
nums = [1, 2, 3]
print(not 4 in nums)
print(4 not in nums)
print(not 3 in nums)
print(3 not in nums)
#can combine not with in
nums = [1, 2, 3]
nums.append(4)
print(nums)
#can append items to the end of a list
nums = [1, 3, 5, 2, 4]
print(len(nums))#5
#len function gets the num of items in a list
words = ["Python", "fun"]
index = 1
words.insert(index, "is")
print(words)#['Python', 'is', 'fun']
#insert adds items at a specific index
letters = ['p', 'q', 'r', 's', 'p', 'u']
print(letters.index('r'))
print(letters.index('p'))
print(letters.index('z'))
#The index method finds the first occurrence of a list item and returns its index.
#If the item isn't in the list, it raises a ValueError.
#There are a few more useful functions and methods for lists.
#max(list): Returns the list item with the maximum value
#min(list): Returns the list item with minimum value
#list.count(obj): Returns a count of how many times an item occurs in a list
#list.remove(obj): Removes an object from a list
#list.reverse(): Reverses objects in a list
numbers = list(range(10))
print(numbers)#[0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
#The range function creates a sequential list of numbers
#The call to list is necessary because range by
# itself creates a range object, and this must be converted to a list if you want to use it as one.
numbers = list(range(3, 8))
print(numbers)#[3, 4, 5, 6, 7]
print(range(20) == range(0, 20))
numbers = list(range(5, 20, 2))
print(numbers)#[5, 7, 9, 11, 13, 15, 17, 19] | [
"[email protected]"
] | |
6303abe03f9194cd8be87372d7b768e2ee779bec | d544961463f04cc41d76ad914bab8797ad8dfe7c | /canteenGraphql/query.py | 710ab8e64343df835bb90970f6b8934727fb42be | [
"MIT"
] | permissive | djunicode/canteen-automation-web | 0f979efdeb892d2b7c3fd6571fe4efabe9df1052 | 3202d92b50ce91f6808c90ed415e9b6568195a1e | refs/heads/master | 2023-08-10T00:43:09.393558 | 2023-07-29T07:14:43 | 2023-07-29T07:14:43 | 162,983,460 | 7 | 24 | MIT | 2023-01-04T09:15:31 | 2018-12-24T11:24:20 | JavaScript | UTF-8 | Python | false | false | 1,106 | py | from graphene import Field, List, ObjectType, Int, Boolean
from .types import *
class Query(ObjectType):
orders = List(OrderType, is_fulfilled=Boolean(required=False))
order_by_id = Field(OrderType, id=Int())
bills = List(BillType)
bill_by_id = Field(BillType, id=Int())
categories = List(CategoryType)
category_by_id = Field(CategoryType, id=Int())
menu = List(MenuItemType)
def resolve_orders(self, info, is_fulfilled=None):
if is_fulfilled is not None:
return Order.objects.filter(is_fulfilled=is_fulfilled)
else:
return Order.objects.all()
def resolve_order_by_id(self, info, id):
return Order.objects.get(id=id)
def resolve_bills(self, info):
return Bill.objects.all()
def resolve_bill_by_id(self, info, id):
return Bill.objects.get(id=id)
def resolve_categories(self, info):
return Category.objects.all()
def resolve_category_by_id(self, info, id):
return Category.objects.get(id=id)
def resolve_menu(self, info):
return MenuItem.objects.all()
| [
"[email protected]"
] | |
07eeaa1fdda36825d0f06a3dbd3817a06dd68bf9 | 819ecc18033a487512f58d39f9f3535381b95df2 | /fynd/search_indexes.py | 9df42291ce82298360aae2a383a9a0c9c5d69d1c | [] | no_license | saket-mishra/MovieEye---IMDB-like-API-integration-with-rest-framework | 1da934de802ed747a95d36aa029a0f53b25c9b24 | cb34fb98e4265a5eb51c3551c59e7745a3246b48 | refs/heads/master | 2021-01-10T03:56:22.547951 | 2015-12-28T12:32:47 | 2015-12-28T12:32:47 | 48,690,948 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 555 | py | import datetime
from haystack import indexes
from movieapi.models import Movie
class MovieIndex(indexes.SearchIndex, indexes.Indexable):
text = indexes.CharField(document=True, use_template=True)
movie_name = indexes.CharField(model_attr='movie_name')
director = indexes.CharField(model_attr='director')
def get_model(self):
return Movie
def index_queryset(self, using=None):
"""Used when the entire index for model is updated."""
return self.get_model().objects.all()
site.register(Movie, MovieIndex)
| [
"[email protected]"
] | |
6834bd6a5d5334b0726f2d3e7537a65691494423 | 6246d7a09f2a8f115f4be472e7711559284f8129 | /venv/Scripts/pip3-script.py | 049c8d056ec15b4138e79a4466509c869099f740 | [] | no_license | TarenaWebOnlineStore/onlineStore | 6403ca1118a80c8be08a16a5ba76a366d905399d | 97d4cbc2f0944e30060aa2651baf4ff07977466b | refs/heads/master | 2020-03-31T07:46:59.217993 | 2018-10-08T07:03:53 | 2018-10-08T07:03:53 | 152,034,135 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 416 | py | #!C:\Users\GS63\PycharmProjects\onlineStore\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3')()
)
| [
"[email protected]"
] | |
3307f57428527be72fbc6e8c0681dec11411b8ca | 7eb17fd6d726b421cbb8e158dad0da97a23b4d07 | /zomato-review.py | e340c33576f9a5853864bf9b25a857332ef93e8d | [] | no_license | harrysaini/zomato-review-scrapper | 4589f3254c847ef386e7fc63e728d49c016c115a | 584bf17bc40bb908ce1fa0300fc853a1a537e762 | refs/heads/master | 2020-03-16T07:49:08.752639 | 2018-05-08T09:17:17 | 2018-05-08T09:17:17 | 132,584,115 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,664 | py | """
Python class to scrap data for a particular restaurant whose zomato link is given
"""
import re
import urllib
from urllib import parse
from bs4 import BeautifulSoup
from urllib.parse import urlparse
import urllib.request
from selenium import webdriver
from selenium.common.exceptions import *
from bs4 import NavigableString
import sys
import json
import time
browser = None
try:
browser = webdriver.Firefox()
except Exception as error:
print(error)
class ZomatoRestaurant:
def __init__(self, url):
self.url = url
print("opening")
self.html_text = None
try:
browser.get(self.url)
self.html_text = browser.page_source
# self.html_text = urllib.request.urlopen(url).read().decode('utf-8')
# self.html_text = requests.get(url).text
except Exception as err:
print(str(err))
return
else:
print('Access successful.')
self.soup = None
if self.html_text is not None:
self.soup = BeautifulSoup(self.html_text)
def load_all_reviews(self):
browser.implicitly_wait(7);
elem = browser.find_element_by_class_name('load-more')
while(elem.is_displayed()):
print(elem.text);
elem.click()
browser.implicitly_wait(4)
try:
elem = browser.find_element_by_class_name('load-more')
except NoSuchElementException:
print("No element found")
break;
def get_reviews(self):
try:
browser.get(self.url+'/reviews')
self.review_html_text = browser.page_source
except Exception as err:
print(str(err))
return
self.reviewSoup = BeautifulSoup(self.review_html_text)
link = browser.find_element_by_xpath("//a[@data-sort='reviews-dd']");
print("waaiting 15 s")
time.sleep(15)
# link.click();
# action = webdriver.common.action_chains.ActionChains(browser)
# action.move_to_element(link)
# action.perform()
print("clicking all review btn " + link.text);
link.click()
self.load_all_reviews();
new_source = browser.execute_script("return document.body.innerHTML")
print(new_source)
new_soup = BeautifulSoup(new_source)
reviews_body = new_soup.find_all('div' , attrs = {"class" : "res-review-body"})
reviews = [];
for review_body in reviews_body:
review = dict()
review_text = review_body.find('div' , attrs = {"class" : "rev-text"}).text.strip()
review_author = review_body.find('div' , attrs = {"class" : "header"}).text.strip()
review_date = review_body.find('time').text.strip()
review_text = " ".join(review_text.split()[1:])
review_author = " ".join(review_author.split())
review['text'] = review_text;
review['author'] = review_author;
review['date'] = review_date;
reviews.append(review);
print(review);
return reviews;
def scrap(self):
if self.soup is None:
return {}
soup = self.soup
rest_details = dict()
name_anchor = soup.find("a", attrs={"class": "ui large header left"})
if name_anchor:
rest_details['name'] = name_anchor.text.strip()
else:
rest_details['name'] = ''
rating_div = soup.find("div", attrs={"class": re.compile("rating-for")})
if rating_div:
rest_details['rating'] = rating_div.text.strip()[:-2]
else:
rest_details['rating'] = 'N' # default
contact_span = soup.find("span", attrs={"class": 'tel'})
if contact_span:
rest_details['contact'] = contact_span.text.strip()
else:
rest_details['contact'] = ''
cuisine_box = soup.find('div', attrs={'class': 'res-info-cuisines clearfix'})
rest_details['cuisines'] = []
if cuisine_box:
for it in cuisine_box.find_all('a', attrs={'class': 'zred'}):
rest_details['cuisines'].append(it.text)
geo_locale = soup.find("div", attrs={"class": "resmap-img"})
if geo_locale:
geo_url = geo_locale.attrs['data-url']
parsed_url = urlparse(geo_url)
geo_arr = str(urllib.parse.parse_qs(parsed_url.query)['center']).split(',')
rest_details['geo_location'] = [re.sub("[^0-9\.]", "", geo_arr[0]), re.sub("[^0-9\.]", "", geo_arr[1])]
if 'geo_location' not in rest_details:
rest_details['geo_location'] = ['undefined', 'undefined']
price_two_tag = soup.find('div', attrs={'class': 'res-info-detail'})
if price_two_tag:
price_two_tag = price_two_tag.find('span', attrs={'tabindex': '0'})
if price_two_tag:
rest_details['price_two'] = re.sub("[^0-9]", "", price_two_tag.text.strip())
price_beer_tag = soup.find('div', attrs={'class': 'res-info-detail'})
if price_beer_tag:
price_beer_tag = price_beer_tag.find('div', attrs={'class': 'mt5'})
if price_beer_tag:
rest_details['price_beer'] = re.sub("[^0-9]", "", price_beer_tag.text.strip())
res_info = []
for it in soup.findAll("div", attrs={'class': 'res-info-feature-text'}):
try:
res_info.append(it.text.strip())
except NavigableString:
pass
rest_details['facility'] = res_info
week_schedule = soup.find("div", attrs={"id": "res-week-timetable"})
data = []
if week_schedule:
time_table = week_schedule.table
rows = time_table.findAll('tr')
for row in rows:
cols = row.find_all('td')
cols = [ele.text.strip() for ele in cols]
data.append([ele for ele in cols if ele])
rest_details['timetable'] = data
collection_box = soup.find('div', attrs={'class': 'ln24'})
rest_details['featured_collections'] = []
if collection_box:
for it in collection_box.find_all('a', attrs={'class': 'zred'}):
rest_details['featured_collections'].append(it.text.strip())
address_div = soup.find("div", attrs={"class": "resinfo-icon"})
if address_div:
rest_details['address'] = address_div.span.get_text()
else:
rest_details['address'] = ""
known_for_div = soup.find("div", attrs={'class': 'res-info-known-for-text mr5'})
if known_for_div:
rest_details['known_for'] = known_for_div.text.strip()
else:
rest_details['known_for'] = ''
rest_details['what_people_love_here'] = []
for div in soup.find_all("div", attrs={'class': 'rv_highlights__section pr10'}):
child_div = div.find("div", attrs={'class': 'grey-text'})
if child_div:
rest_details['what_people_love_here'].append(child_div.get_text())
rest_details['reviews'] = self.get_reviews()
return rest_details
if __name__ == '__main__':
if browser is None:
sys.exit()
out_file = open("zomato_chandigarh_2.json", "a")
with open('chandigarh_restaurant_details_2.txt', 'r', encoding="utf-8") as f:
for line in f:
zr = ZomatoRestaurant(line)
json.dump(zr.scrap(), out_file)
out_file.write('\n')
out_file.close()
browser.close()
| [
"[email protected]"
] | |
c16e912c2f8af62b47f04930ea6834f65f004685 | 2b167e29ba07e9f577c20c54cb943861d0ccfa69 | /numerical_analysis_backup/small-scale-multiobj/resource_usage/arch5_old/arch5_decomposition_new.py | ae530d6687b0514a5e7f818ed6800eb8daffdb67 | [] | no_license | LiYan1988/kthOld_OFC | 17aeeed21e195d1a9a3262ec2e67d6b1d3f9ff0f | b1237577ea68ad735a65981bf29584ebd889132b | refs/heads/master | 2021-01-11T17:27:25.574431 | 2017-01-23T05:32:35 | 2017-01-23T05:32:35 | 79,773,237 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,197 | py | # -*- coding: utf-8 -*-
"""
Created on Tue May 31 15:39:25 2016
@author: li
"""
from gurobipy import *
from scipy.linalg import toeplitz
import numpy as np
import time
import itertools
import csv
class Arch5_decompose(object):
"""Create models for different SDM DCN architectures
"""
def __init__(self, traffic_matrix, num_slots=320, num_cores=10,
slot_capacity =25, num_guard_slot=1, alpha=1, beta=0):
"""Initialize
"""
# traffic matrix
self.traffic_matrix = traffic_matrix
# number of PODs
self.num_pods = traffic_matrix.shape[0]
# capacity per spectrum slot, Gbps
self.slot_capacity = slot_capacity
# number of slot as guardband
self.num_guard_slot = num_guard_slot
# number of slots
self.num_slots = num_slots
# number of cores
self.num_cores = num_cores
# number of total demands
self.total_demands = sum(self.traffic_matrix.flatten()>0)
# Need to consider guardbands, no need to consider max capacity
# since a traffic can use the whole fiber
self.tm = self.traffic_matrix.copy()
# Model data
# set of pods
pods = list(range(self.num_pods))
# pairs of traffic demands
traffic_pairs = tuplelist([(i, j) for i in pods for j in pods
if self.tm[i, j]>0])
# Set of possible combinations of core and slot numbers
core_set = {}
slot_set = {}
volu_set = {}
for i, j in traffic_pairs:
tmp = self.core_slot(self.tm[i, j])
core_set[(i, j)] = tmp[:, 0]
slot_set[(i, j)] = tmp[:, 1]
volu_set[(i, j)] = tmp[:, 2]
# set of cores
cores = list(range(self.num_cores))
self.pods = pods
self.cores = cores
self.core_set = core_set
self.slot_set = slot_set
self.volu_set = volu_set
self.traffic_pairs = traffic_pairs
# weight factor
self.alpha = alpha
self.beta = beta
def volumn_model(self, **kwargs):
"""Estimate the volume of each connection, i.e., the combination of
core adn slot numbers.
"""
# Model
tic = time.clock()
model_vol = Model('model_vol')
# variable: choice of core-slot combination
# variable: succuss?
vol_choice = {}
is_suc = {}
vol_cnk = {}
for u in self.traffic_pairs:
is_suc[u] = model_vol.addVar(vtype=GRB.BINARY, obj=-1)
vol_cnk[u] = model_vol.addVar(vtype=GRB.CONTINUOUS)
for i in range(self.num_cores):
vol_choice[u, i] = model_vol.addVar(vtype=GRB.BINARY, obj=-0.00001)
# variable: volumn
vol_limit = self.num_cores*self.num_slots
vol_pod = {}
for i in self.pods:
vol_pod[i] = model_vol.addVar(vtype=GRB.CONTINUOUS, ub=vol_limit)
model_vol.update()
# constraints: success
for u in self.traffic_pairs:
model_vol.addConstr(quicksum(vol_choice[u, i]
for i in range(self.num_cores))==is_suc[u])
model_vol.addConstr(quicksum(vol_choice[u, i]*self.volu_set[u][i]
for i in range(self.num_cores))==vol_cnk[u])
for i in self.pods:
tmp = list((i, j) for (i, j) in self.traffic_pairs.select(i, '*'))
tmp0 = list((j, i) for (j, i) in self.traffic_pairs.select('*', i))
# all the traffics in link i
tmp.extend(tmp0)
model_vol.addConstr(quicksum(vol_cnk[u] for u in tmp)==vol_pod[i])
if len(kwargs):
for key, value in kwargs.items():
setattr(model_vol.params, key, value)
model_vol.optimize()
toc = time.clock()
is_sucx = {}
for u in self.traffic_pairs:
is_sucx[u] = is_suc[u].x
vol_choicex = {}
for u in self.traffic_pairs:
for i in range(self.num_cores):
if(vol_choice[u,i].x==1):
vol_choicex[u] = i
self.is_suc = is_sucx
self.vol_choice = vol_choicex
def core_slot(self, capacity):
"""Find all the possible combination of core and slot numbers for
a traffic demand with given capacity
The guardband is considered
Output: m * 2 numpy array, the first column is the number of cores,
and the second column is the number of slots, m is the number of
possible combinations.
"""
# total number of slots
n_slots = np.ceil(capacity / self.slot_capacity)
# list of all combinations of core and slot numbers
combination = []
for i in range(1, self.num_cores+1):
u = [i,int(np.ceil(n_slots/i)+self.num_guard_slot)]
u.append(u[0]*u[1])
combination.append(tuple(u))
combination = np.asarray(combination)
return combination
def create_model_routing(self, **kwargs):
channels_core = []
group_core = {}
tmp = 0
B = np.empty((self.num_cores, 0))
for n in range(1, self.num_cores+1):
channels_core.extend(list(range(tmp, tmp+self.num_cores-n+1)))
group_core[n] = list(range(tmp, tmp+self.num_cores-n+1))
tmp = tmp+self.num_cores-n+1
c = np.zeros((self.num_cores,))
c[:n] = 1
r = np.zeros((self.num_cores-n+1))
r[0] = 1
B = np.hstack((B, toeplitz(c,r)))
self.B = B
self.channels_core = channels_core
self.group_core = group_core
channels_core_nslot = {}
for u in self.traffic_pairs:
for n in range(1, self.num_cores+1):
for i in group_core[n]:
channels_core_nslot[u,i] = self.slot_set[u][n-1]
model_routing = Model('model_routing')
core_choice = {}
for u in self.traffic_pairs:
for i in channels_core:
core_choice[u,u[0],i] = model_routing.addVar(vtype=GRB.BINARY)
core_choice[u,u[1],i] = model_routing.addVar(vtype=GRB.BINARY)
is_suc = {}
for u in self.traffic_pairs:
is_suc[u] = model_routing.addVar(vtype=GRB.BINARY, obj=-(self.alpha+self.beta*self.tm[u[0],u[1]]))
flow_core = {}
for i in self.pods:
for j in self.cores:
flow_core[i,j] = model_routing.addVar(vtype=GRB.CONTINUOUS, ub=self.num_slots)
model_routing.update()
for u in self.traffic_pairs:
model_routing.addConstr(quicksum(core_choice[u,u[0],i]
for i in channels_core)==is_suc[u])
model_routing.addConstr(quicksum(core_choice[u,u[1],i]
for i in channels_core)==is_suc[u])
#core channel consistent
for n in range(1, self.num_cores+1):
model_routing.addConstr(quicksum(core_choice[u,u[0],i] for i in group_core[n])
== quicksum(core_choice[u,u[1],i] for i in group_core[n]))
for i in self.pods:
tmp = list((i, j) for (i, j) in self.traffic_pairs.select(i, '*'))
tmp0 = list((j, i) for (j, i) in self.traffic_pairs.select('*', i))
# all the traffics in link i
tmp.extend(tmp0)
for j in self.cores:
model_routing.addConstr(quicksum(
channels_core_nslot[u,k]*B[j,k]*core_choice[u,i,k]
for k in channels_core
for u in tmp)==flow_core[i,j])
if len(kwargs):
for key, value in kwargs.items():
setattr(model_routing.params, key, value)
model_routing.optimize()
core_choicex = {} # which core channel
nslot_choice = {} # number of spectral slots per core for connection u using channel i
for u in self.traffic_pairs:
if is_suc[u].x==1:
for i in channels_core:
if core_choice[u,u[0],i].x==1:
core_choicex[u,u[0]] = i
nslot_choice[u] = channels_core_nslot[u,i]
if core_choice[u,u[1],i].x==1:
core_choicex[u,u[1]] = i
core_usagex = {}
for u in self.traffic_pairs:
if is_suc[u].x==1:
chout = core_choicex[u,u[0]]
chin = core_choicex[u,u[1]]
core_out = np.where(B[:,chout]==1)[0]
core_in = np.where(B[:,chin]==1)[0]
core_usagex[u,u[0]] = core_out
core_usagex[u,u[1]] = core_in
is_sucx = {}
for u in self.traffic_pairs:
is_sucx[u] = is_suc[u].x
flow_corex = {}
for i in self.pods:
for j in self.cores:
flow_corex[i,j] = flow_core[i,j].x
cnk_in_core = {} # set of connections using a particular core
for i in self.pods:
tmp = list((i, j) for (i, j) in self.traffic_pairs.select(i, '*'))
tmp0 = list((j, i) for (j, i) in self.traffic_pairs.select('*', i))
# all the traffics in link i
tmp.extend(tmp0)
for j in self.cores:
cnk_in_core[i,j] = []
for u in tmp:
if sum(core_choice[u,i,k].x*B[j,k]for k in channels_core)==1:
cnk_in_core[i,j].append(u)
suclist = []
for u in self.traffic_pairs:
if is_sucx[u]==1:
suclist.append(u)
self.core_choice = core_choicex
self.core_usagex = core_usagex
self.is_suc_routing = is_sucx
self.flow_core = flow_corex
self.cnk_in_core = cnk_in_core
self.suclist = suclist
self.nslot_choice = nslot_choice
self.n_suc_routing = len(suclist)
self.model_routing = model_routing
self.connections_ub = len(self.suclist)
self.throughput_ub = sum(self.tm[u[0],u[1]] for u in self.suclist)
def create_model_sa(self, **kwargs):
smallM = self.num_slots
bigM = 10*smallM
model_sa = Model('model_sa')
spec_order = {}
for i in self.pods:
for k in self.cores:
for c in itertools.combinations(self.cnk_in_core[i,k],2):
spec_order[c[0],c[1]] = model_sa.addVar(vtype=GRB.BINARY)
spec_idx = {}
for u in self.suclist:
spec_idx[u] = model_sa.addVar(vtype=GRB.CONTINUOUS)
isfail = {}
for u in self.suclist:
isfail[u] = model_sa.addVar(vtype=GRB.BINARY, obj=self.alpha+self.beta*self.tm[u[0],u[1]])
model_sa.update()
for i in self.pods:
for k in self.cores:
for c in itertools.combinations(self.cnk_in_core[i,k],2):
model_sa.addConstr(
spec_idx[c[0]]+self.nslot_choice[c[0]]-spec_idx[c[1]]+
bigM*spec_order[c[0],c[1]]<=bigM)
model_sa.addConstr(
spec_idx[c[1]]+self.nslot_choice[c[1]]-spec_idx[c[0]]+
bigM*(1-spec_order[c[0],c[1]])<=bigM)
for u in self.suclist:
model_sa.addConstr(
bigM*isfail[u]>=spec_idx[u]+self.nslot_choice[u]-smallM)
if len(kwargs):
for key, value in kwargs.items():
setattr(model_sa.params, key, value)
model_sa.optimize()
self.model_sa = model_sa
tmp = list(self.suclist)
for u in self.suclist:
if isfail[u].x==1:
tmp.remove(u)
self.suclist_sa = list(tmp)
self.spec_idxx = {}
for u in self.suclist:
self.spec_idxx[u] = spec_idx[u].x
self.connections_lb = len(self.suclist)
self.throughput_lb = sum(self.tm[u[0],u[1]] for u in self.suclist)
# construct the resource tensor
tensor_milp = np.ones((self.num_pods, self.num_cores, self.num_slots))
for u in self.suclist_sa:
src = u[0]
dst = u[1]
core_src = self.core_usagex[u,src]
core_dst = self.core_usagex[u,dst]
spec_idx = int(round(self.spec_idxx[u]))
spec_bd = int(round(self.nslot_choice[u]))
res_src = tensor_milp[src,core_src,spec_idx:(spec_idx+spec_bd)]
res_dst = tensor_milp[dst,core_dst,spec_idx:(spec_idx+spec_bd)]
if (np.sum(res_src)==spec_bd*core_src.size) and (np.sum(res_dst)==spec_bd*core_dst.size):
tensor_milp[src,core_src,spec_idx:(spec_idx+spec_bd)] = 0
tensor_milp[dst,core_dst,spec_idx:(spec_idx+spec_bd)] = 0
self.tensor_milp = tensor_milp
self.efficiency_milp = (float(sum(self.tm[i] for i in self.suclist_sa))/
sum(self.nslot_choice[i]*self.core_usagex[i,i[0]].size*self.slot_capacity
for i in self.suclist_sa))
def write_result_csv(self, file_name, suclist):
with open(file_name, 'w') as f:
writer = csv.writer(f, delimiter=',')
writer.writerow(['src', 'dst', 'spec', 'core_src',
'core_dst', '#core', 'used_slot', 'tfk_slot'])
for u in suclist:
col_src = [self.B[j,self.core_choice[u,u[0]]] for j in self.cores]
core_src = self.one_runs(col_src)[0][0]
col_dst = [self.B[j,self.core_choice[u,u[1]]] for j in self.cores]
core_dst = self.one_runs(col_dst)[0][0]
num_cores = self.one_runs(col_dst)[0][1]
used_slot = self.nslot_choice[u]
tfk_slot = np.ceil(float(self.tm[u])/self.slot_capacity)
writer.writerow([u[0],u[1],
self.spec_idxx[u],core_src,core_dst,num_cores,
used_slot,tfk_slot])
def sa_heuristic(self, ascending1=False, ascending2=True):
"""
"""
suclist = list(self.suclist)
suclist_tm = [self.nslot_choice[u] for u in suclist]
if ascending1:
suclist = [x for (y,x) in sorted(zip(suclist_tm, suclist))]
else:
suclist = [x for (y, x) in sorted(zip(suclist_tm, suclist), reverse=True)]
IS_list = {} # independent set
IS_list[0] = []
cl_list = {}
cl_list[0] = set()
i = 0
while len(suclist):
tmplist = list(suclist)
for u in tmplist:
src = u[0]
dst = u[1]
src_core = list(self.core_usagex[u,src])
dst_core = list(self.core_usagex[u,dst])
srct = set(zip([src]*len(src_core),src_core))
dstt = set(zip([dst]*len(dst_core),dst_core))
sdset = srct|dstt
if len(sdset-cl_list[i])==len(sdset):
# add connection if it's independent to element in IS_list[i]
IS_list[i].append(u)
cl_list[i].update(sdset)
tmplist.remove(u)
i += 1
IS_list[i] = []
cl_list[i] = set()
suclist = tmplist
del cl_list[i]
del IS_list[i]
self.obj_sah_ = 0
self.obj_sah_connection_ = 0
self.obj_sah_throughput_ = 0
suclist = []
restensor = np.ones((self.num_pods, self.num_cores, self.num_slots))
for i in range(len(IS_list)):
for u in IS_list[i]:
src = u[0]
dst = u[1]
src_core = self.core_usagex[u,src]
dst_core = self.core_usagex[u,dst]
tmpsrc = np.prod(restensor[src,src_core,:],axis=0,dtype=bool)
tmpdst = np.prod(restensor[dst,dst_core,:],axis=0,dtype=bool)
tmp = tmpsrc*tmpdst
tmpavail = self.one_runs(tmp)
tmpidx = np.where(tmpavail[:,1]>=self.nslot_choice[u])[0]
if tmpidx.size:
spec_idx = tmpavail[tmpidx[0],0]
restensor[src,src_core,spec_idx:(spec_idx+self.nslot_choice[u])] = False
restensor[dst,dst_core,spec_idx:(spec_idx+self.nslot_choice[u])] = False
self.obj_sah_ += self.alpha+self.beta*self.tm[src,dst]
self.obj_sah_connection_ += 1
self.obj_sah_throughput_ += self.tm[src,dst]
suclist.append(u)
remain_cnk = [u for u in self.traffic_pairs if u not in suclist]
remain_tm = [self.tm[u]/float(self.slot_capacity) for u in remain_cnk]
if ascending2:
remain_cnk = [x for (y,x) in sorted(zip(remain_tm,remain_cnk))]
else:
remain_cnk = [x for (y,x) in sorted(zip(remain_tm,remain_cnk), reverse=False)]
for u in remain_cnk:
src = u[0]
dst = u[1]
tmpsrc = restensor[src,:,:]
tmpdst = restensor[dst,:,:]
tmpcmb = np.zeros((self.num_cores**2, self.num_slots))
k = 0
avail_slots = {}
for ksrc in self.cores:
for kdst in self.cores:
tmpcmb[k,:] = tmpsrc[ksrc,:]*tmpdst[kdst,:]
tmpavail = self.one_runs(tmpcmb[k,:])
tmpidx = np.where(tmpavail[:,1]>=self.tm[u]*self.slot_capacity)[0]
if not tmpidx.size:
avail_slots[ksrc,kdst] = np.array([-1, self.num_slots+1])
else:
idxm = np.argmin(tmpavail[tmpidx,1])
avail_slots[ksrc,kdst] = np.array(tmpavail[tmpidx[idxm],:])
k += 1
avail_slots = list(sorted(avail_slots.iteritems(), key=lambda (x,y):y[1]))
# avail_slots[0] has the form of ((core_out,core_in), [spec_idx,available_slots])
if avail_slots[0][1][1]<=self.num_slots:
src_core = avail_slots[0][0][0]
dst_core = avail_slots[0][0][1]
spec_idx = avail_slots[0][1][0]
spec_bd = self.nslot_choice[u]
restensor[src,src_core,spec_idx:(spec_idx+spec_bd)] = 0
restensor[dst,dst_core,spec_idx:(spec_idx+spec_bd)] = 0
self.obj_sah_ += self.alpha+self.beta*self.tm[src,dst]
self.obj_sah_connection_ += 1
self.obj_sah_throughput_ += self.tm[src,dst]
def one_runs(self, a):
# Create an array that is 1 where a is 0, and pad each end with an extra 0.
isone = np.concatenate(([0], np.equal(a, 1).view(np.int8), [0]))
absdiff = np.abs(np.diff(isone))
# Runs start and end where absdiff is 1.
ranges = np.where(absdiff == 1)[0].reshape(-1, 2)
ranges[:,1] = ranges[:,1]-ranges[:,0]
return ranges
def save_tensor(self, tensor, filename):
"""Save resource tensor
save as csv
"""
tmp = tensor.reshape((-1, self.num_slots))
np.savetxt(filename, tmp, fmt='%1d',delimiter=',')
if __name__=='__main__':
from sdm1 import Traffic
np.random.seed(2010)
#%% generate traffic
num_pods=250
max_pod_connected=int(num_pods*0.5)
min_pod_connected=1
mean_capacity=200
variance_capacity=200
num_cores=10
num_slots=320
t = Traffic(num_pods=num_pods, max_pod_connected=max_pod_connected,
min_pod_connected=min_pod_connected,
mean_capacity=mean_capacity,
variance_capacity=variance_capacity)
t.generate_traffic()
tm = t.traffic_matrix
# tmdf = pd.DataFrame(tm)
# tmdf.to_csv('tm_arch5.csv', header=False, index=False)
#%% read from file
# filename = 'traffic_matrix__matrix_0.csv'
# # print filename
# tm = []
# with open(filename) as f:
# reader = csv.reader(f)
# for idx, row in enumerate(reader):
# if idx>11:
# row.pop()
# row = [int(u) for u in row]
# tm.append(row)
# tm = np.array(tm)*25
#%% optimize
m = Arch5_decompose(tm, num_slots=num_slots, num_cores=num_cores, alpha=1, beta=0.01)
m.create_model_routing(mipfocus=1, timelimit=1000, method=2, mipgap=0.02)
m.create_model_sa(mipfocus=1,timelimit=1000, method=2, SubMIPNodes=2000, Heuristics=0.8)
print m.connections_lb/float(m.connections_ub)
print m.throughput_lb/float(m.throughput_ub)
# m.sa_heuristic(ascending1=True, ascending2=True)
# print float(m.obj_sah_connection_)/m.connections_ub
# print float(m.obj_sah_throughput_)/m.throughput_ub | [
"[email protected]"
] | |
156ad56c215bc575a383f08114e2f020fa456cc8 | d9c4ae9466e63b8e2f5a5ee6cc928b5edfae72c3 | /백준/1260 DFS와 BFS/안현진1.py | ef6e723fb3e825c7fdb257502487f6168dc145a8 | [] | no_license | HyeonJinGitHub/LearningRepository | 950dc3f2a78dbb8bbf713c04eb2e0c88a4e66894 | 35c309bfe8e2311a9f60c8b6ea4ddcfe70559e4b | refs/heads/master | 2023-04-19T23:56:31.881422 | 2021-05-11T04:12:32 | 2021-05-11T04:12:32 | 290,224,023 | 37 | 4 | null | 2021-05-02T14:28:35 | 2020-08-25T13:34:02 | Java | UTF-8 | Python | false | false | 787 | py | import sys
from collections import deque
def dfs(x):
global visit
visit[x] = True
print(x, end=' ')
for y in a[x]:
if not visit[y]:
dfs(y)
def bfs(x):
q = deque([x])
check = [False] * (N + 1)
check[x] = True
while q:
x = q.popleft()
print(x, end=' ')
for y in a[x]:
if not check[y]:
check[y] = True
q.append(y)
if __name__ == '__main__':
N, M, V = map(int, sys.stdin.readline().split())
a = [[] for _ in range(N + 1)]
for _ in range(M):
u, v = map(int, sys.stdin.readline().split())
a[u].append(v)
a[v].append(u)
for i in range(1, N + 1):
a[i].sort()
visit = [False] * (N + 1)
dfs(V)
print()
bfs(V) | [
"[email protected]"
] | |
250c85c2aba2881de7c6832f2f64bdc37ebb0f75 | 8927ba440e6572cc438d248f636a296671c83b33 | /search_dict.py | a06ef2a3c3ea1b6ab7b1f2733c6bf9e656556b1f | [] | no_license | coschmitt/BinarySeach | 7c1553eb6c96552e1349a980fad669d663967191 | 5c6d595e61143bb12704aeb97d0c6b62cfbfa0b2 | refs/heads/main | 2023-02-06T12:33:29.674613 | 2020-12-27T19:03:40 | 2020-12-27T19:03:40 | 304,171,881 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,037 | py | from sqlalchemy import create_engine, select, Table, MetaData, Column
from itertools import permutations
engine = create_engine("mysql+pymysql://root:{password}@localhost:3306/entries")
metadata = MetaData()
entries = Table('entries', metadata, autoload=True, autoload_with=engine)
stmt = select([entries.columns.word])
result = engine.execute(stmt).fetchall()
def find(L, target):
start = 0
end = len(L) - 1
while start <= end:
middle = (start + end) // 2
midpoint = L[middle][0]
if midpoint > target:
end = middle - 1
elif midpoint < target:
start = middle + 1
else:
return midpoint
return None
def find_word(list, length=None):
if not length:
length = len(list)
answers = []
perm = permutations(list, length)
for i in perm:
word = ''.join(i)
dictWord = find(result, word[0].upper() + word[1:len(word)])
if dictWord is not None:
answers.append(dictWord)
return answers
| [
"[email protected]"
] | |
afe54ee5012f5d71afac6e6618c6567c1a9ac1ce | 14392ffb7b6f13bc66bf3a17e3b26069743c2dee | /tools/enhance.py | 891e5abc3570d735e18062918658134a9a1dad33 | [
"MIT"
] | permissive | xiaojieli0903/SRDN | 560afaf4ee24e87a317e9304c3c507940ef58740 | e5cfec029942088523550cc62f47e6f6519a6474 | refs/heads/master | 2021-09-06T12:30:35.455146 | 2018-02-06T14:59:12 | 2018-02-06T14:59:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33,410 | py | # -*- coding: utf-8 -*-
#!/usr/bin/env python3
"""
*
* _ooOoo_
* o8888888o
* 88" . "88
* (| -_- |)
* O\ = /O
* ____/`---'\____
* .' \\| |// `.
* / \\||| : |||// \
* / _||||| -:- |||||- \
* | | \\\ - /// | |
* | \_| ''\---/'' | |
* \ .-\__ `-` ___/-. /
* ___`. .' /--.--\ `. . __
* ."" '< `.___\_<|>_/___.' >'"".
* | | : `- \`.;`\ _ /`;.`/ - ` : | |
* \ \ `-. \_ __\ /__ _/ .-` / /
* ======`-.____`-.___\_____/___.-`____.-'======
* `=---='
* ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
* 佛祖保佑 永无BUG
* 佛曰:
* 写字楼里写字间,写字间里程序员;
* 程序人员写程序,又拿程序换酒钱。
* 酒醒只在网上坐,酒醉还来网下眠;
* 酒醉酒醒日复日,网上网下年复年。
* 但愿老死电脑间,不愿鞠躬老板前;
* 奔驰宝马贵者趣,公交自行程序员。
* 别人笑我忒疯癫,我笑自己命太贱;
* 不见满街漂亮妹,哪个归得程序员?
*
*
"""
#
# Copyright (c) 2016, Alex J. Champandard.
#
# Neural Enhance is free software: you can redistribute it and/or modify it under the terms of the GNU Affero General
# Public License version 3. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
# without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
#
__version__ = '0.3'
import io
import os
import sys
import bz2
import glob
import math
import time
import pickle
import random
import argparse
import itertools
import threading
import collections
# Configure all options first so we can later custom-load other libraries (Theano) based on device specified by user.
parser = argparse.ArgumentParser(description='Generate a new image by applying style onto a content image.',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
add_arg = parser.add_argument
add_arg('files', nargs='*', default=[])
add_arg('--zoom', default=2, type=int, help='Resolution increase factor for inference.')
add_arg('--rendering-tile', default=80, type=int, help='Size of tiles used for rendering images.')
add_arg('--rendering-overlap', default=24, type=int, help='Number of pixels padding around each tile.')
add_arg('--rendering-histogram',default=False, action='store_true', help='Match color histogram of output to input.')
add_arg('--type', default='photo', type=str, help='Name of the neural network to load/save.')
add_arg('--model', default='default', type=str, help='Specific trained version of the model.')
add_arg('--train', default=False, type=str, help='File pattern to load for training.')
add_arg('--train-scales', default=0, type=int, help='Randomly resize images this many times.')
add_arg('--train-blur', default=None, type=int, help='Sigma value for gaussian blur preprocess.')
add_arg('--train-noise', default=None, type=float, help='Radius for preprocessing gaussian blur.')
add_arg('--train-jpeg', default=[], nargs='+', type=int, help='JPEG compression level & range in preproc.')
add_arg('--epochs', default=10, type=int, help='Total number of iterations in training.')
add_arg('--epoch-size', default=72, type=int, help='Number of batches trained in an epoch.')
add_arg('--save-every', default=10, type=int, help='Save generator after every training epoch.')
add_arg('--batch-shape', default=192, type=int, help='Resolution of images in training batch.')
add_arg('--batch-size', default=15, type=int, help='Number of images per training batch.')
add_arg('--buffer-size', default=1500, type=int, help='Total image fragments kept in cache.')
add_arg('--buffer-fraction', default=5, type=int, help='Fragments cached for each image loaded.')
add_arg('--learning-rate', default=1E-4, type=float, help='Parameter for the ADAM optimizer.')
add_arg('--learning-period', default=75, type=int, help='How often to decay the learning rate.')
add_arg('--learning-decay', default=0.5, type=float, help='How much to decay the learning rate.')
add_arg('--generator-upscale', default=2, type=int, help='Steps of 2x up-sampling as post-process.')
add_arg('--generator-downscale',default=0, type=int, help='Steps of 2x down-sampling as preprocess.')
add_arg('--generator-filters', default=[64], nargs='+', type=int, help='Number of convolution units in network.')
add_arg('--generator-blocks', default=4, type=int, help='Number of residual blocks per iteration.')
add_arg('--generator-residual', default=2, type=int, help='Number of layers in a residual block.')
add_arg('--perceptual-layer', default='conv2_2', type=str, help='Which VGG layer to use as loss component.')
add_arg('--perceptual-weight', default=1e0, type=float, help='Weight for VGG-layer perceptual loss.')
add_arg('--discriminator-size', default=32, type=int, help='Multiplier for number of filters in D.')
add_arg('--smoothness-weight', default=2e5, type=float, help='Weight of the total-variation loss.')
add_arg('--adversary-weight', default=5e2, type=float, help='Weight of adversarial loss compoment.')
add_arg('--generator-start', default=0, type=int, help='Epoch count to start training generator.')
add_arg('--discriminator-start',default=1, type=int, help='Epoch count to update the discriminator.')
add_arg('--adversarial-start', default=2, type=int, help='Epoch for generator to use discriminator.')
add_arg('--device', default='cpu', type=str, help='Name of the CPU/GPU to use, for Theano.')
args = parser.parse_args()
#----------------------------------------------------------------------------------------------------------------------
# Color coded output helps visualize the information a little better, plus it looks cool!
class ansi:
WHITE = '\033[0;97m'
WHITE_B = '\033[1;97m'
YELLOW = '\033[0;33m'
YELLOW_B = '\033[1;33m'
RED = '\033[0;31m'
RED_B = '\033[1;31m'
BLUE = '\033[0;94m'
BLUE_B = '\033[1;94m'
CYAN = '\033[0;36m'
CYAN_B = '\033[1;36m'
ENDC = '\033[0m'
def error(message, *lines):
string = "\n{}ERROR: " + message + "{}\n" + "\n".join(lines) + ("{}\n" if lines else "{}")
print(string.format(ansi.RED_B, ansi.RED, ansi.ENDC))
sys.exit(-1)
def warn(message, *lines):
string = "\n{}WARNING: " + message + "{}\n" + "\n".join(lines) + "{}\n"
print(string.format(ansi.YELLOW_B, ansi.YELLOW, ansi.ENDC))
def extend(lst): return itertools.chain(lst, itertools.repeat(lst[-1]))
print("""{} {}Super Resolution for images and videos powered by Deep Learning!{}
- Code licensed as AGPLv3, models under CC BY-NC-SA.{}""".format(ansi.CYAN_B, __doc__, ansi.CYAN, ansi.ENDC))
# Load the underlying deep learning libraries based on the device specified. If you specify THEANO_FLAGS manually,
# the code assumes you know what you are doing and they are not overriden!
os.environ.setdefault('THEANO_FLAGS', 'floatX=float32,device={},force_device=True,allow_gc=True,'\
'print_active_device=False'.format(args.device))
'''
# Scientific & Imaging Libraries
import numpy as np
#import scipy.ndimage
#from scipy.misc import ndimage
from PIL import Image
from scipy.misc import imread, imresize, imsave,fromimage,toimage
'''
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import scipy.signal as signal
import glob
import time
from scipy.misc import imread, imresize, imsave
# Numeric Computing (GPU)
import theano, theano.tensor as T
T.nnet.softminus = lambda x: x - T.nnet.softplus(x)
# Support ansi colors in Windows too.
if sys.platform == 'win32':
import colorama
# Deep Learning Framework
import lasagne
from lasagne.layers import Conv2DLayer as ConvLayer, Deconv2DLayer as DeconvLayer, Pool2DLayer as PoolLayer
from lasagne.layers import InputLayer, ConcatLayer, ElemwiseSumLayer, batch_norm
print('{} - Using the device `{}` for neural computation.{}\n'.format(ansi.CYAN, theano.config.device, ansi.ENDC))
#======================================================================================================================
# Image Processing
#======================================================================================================================
class DataLoader(threading.Thread):
def __init__(self):
super(DataLoader, self).__init__(daemon=True)
self.data_ready = threading.Event()
self.data_copied = threading.Event()
self.orig_shape, self.seed_shape = args.batch_shape, args.batch_shape // args.zoom
self.orig_buffer = np.zeros((args.buffer_size, 3, self.orig_shape, self.orig_shape), dtype=np.float32)
self.seed_buffer = np.zeros((args.buffer_size, 3, self.seed_shape, self.seed_shape), dtype=np.float32)
self.files = glob.glob(args.train)
if len(self.files) == 0:
error("There were no files found to train from searching for `{}`".format(args.train),
" - Try putting all your images in one folder and using `--train=data/*.jpg`")
self.available = set(range(args.buffer_size))
self.ready = set()
self.cwd = os.getcwd()
self.start()
def run(self):
while True:
random.shuffle(self.files)
for f in self.files:
self.add_to_buffer(f)
def add_to_buffer(self, f):
filename = os.path.join(self.cwd, f)
try:
orig = Image.open(filename).convert('RGB')
scale = 2 ** random.randint(0, args.train_scales)
if scale > 1 and all(s//scale >= args.batch_shape for s in orig.size):
orig = orig.resize((orig.size[0]//scale, orig.size[1]//scale), resample=Image.LANCZOS)
if any(s < args.batch_shape for s in orig.size):
raise ValueError('Image is too small for training with size {}'.format(orig.size))
except Exception as e:
warn('Could not load `{}` as image.'.format(filename),
' - Try fixing or removing the file before next run.')
self.files.remove(f)
return
seed = orig
if args.train_blur is not None:
seed = seed.filter(ImageFilter.GaussianBlur(radius=random.randint(0, args.train_blur*2)))
if args.zoom > 1:
seed = seed.resize((orig.size[0]//args.zoom, orig.size[1]//args.zoom), resample=Image.LANCZOS)
if len(args.train_jpeg) > 0:
buffer, rng = io.BytesIO(), args.train_jpeg[-1] if len(args.train_jpeg) > 1 else 15
seed.save(buffer, format='jpeg', quality=args.train_jpeg[0]+random.randrange(-rng, +rng))
seed = Image.open(buffer)
orig = fromimage(orig).astype(np.float32)
seed = fromimage(seed).astype(np.float32)
if args.train_noise is not None:
seed += scipy.random.normal(scale=args.train_noise, size=(seed.shape[0], seed.shape[1], 1))
for _ in range(seed.shape[0] * seed.shape[1] // (args.buffer_fraction * self.seed_shape ** 2)):
h = random.randint(0, seed.shape[0] - self.seed_shape)
w = random.randint(0, seed.shape[1] - self.seed_shape)
seed_chunk = seed[h:h+self.seed_shape, w:w+self.seed_shape]
h, w = h * args.zoom, w * args.zoom
orig_chunk = orig[h:h+self.orig_shape, w:w+self.orig_shape]
while len(self.available) == 0:
self.data_copied.wait()
self.data_copied.clear()
i = self.available.pop()
self.orig_buffer[i] = np.transpose(orig_chunk.astype(np.float32) / 255.0 - 0.5, (2, 0, 1))
self.seed_buffer[i] = np.transpose(seed_chunk.astype(np.float32) / 255.0 - 0.5, (2, 0, 1))
self.ready.add(i)
if len(self.ready) >= args.batch_size:
self.data_ready.set()
def copy(self, origs_out, seeds_out):
self.data_ready.wait()
self.data_ready.clear()
for i, j in enumerate(random.sample(self.ready, args.batch_size)):
origs_out[i] = self.orig_buffer[j]
seeds_out[i] = self.seed_buffer[j]
self.available.add(j)
self.data_copied.set()
#======================================================================================================================
# Convolution Networks
#======================================================================================================================
class SubpixelReshuffleLayer(lasagne.layers.Layer):
"""Based on the code by ajbrock: https://github.com/ajbrock/Neural-Photo-Editor/
"""
def __init__(self, incoming, channels, upscale, **kwargs):
super(SubpixelReshuffleLayer, self).__init__(incoming, **kwargs)
self.upscale = upscale
self.channels = channels
def get_output_shape_for(self, input_shape):
def up(d): return self.upscale * d if d else d
return (input_shape[0], self.channels, up(input_shape[2]), up(input_shape[3]))
def get_output_for(self, input, deterministic=False, **kwargs):
out, r = T.zeros(self.get_output_shape_for(input.shape)), self.upscale
for y, x in itertools.product(range(r), repeat=2):
out=T.inc_subtensor(out[:,:,y::r,x::r], input[:,r*y+x::r*r,:,:])
return out
class Model(object):
def __init__(self):
self.network = collections.OrderedDict()
self.network['img'] = InputLayer((None, 3, None, None))
self.network['seed'] = InputLayer((None, 3, None, None))
config, params = self.load_model()
self.setup_generator(self.last_layer(), config)
if args.train:
concatenated = lasagne.layers.ConcatLayer([self.network['img'], self.network['out']], axis=0)
self.setup_perceptual(concatenated)
self.load_perceptual()
self.setup_discriminator()
self.load_generator(params)
self.compile()
#------------------------------------------------------------------------------------------------------------------
# Network Configuration
#------------------------------------------------------------------------------------------------------------------
def last_layer(self):
return list(self.network.values())[-1]
def make_layer(self, name, input, units, filter_size=(3,3), stride=(1,1), pad=(1,1), alpha=0.25):
conv = ConvLayer(input, units, filter_size, stride=stride, pad=pad, nonlinearity=None)
prelu = lasagne.layers.ParametricRectifierLayer(conv, alpha=lasagne.init.Constant(alpha))
self.network[name+'x'] = conv
self.network[name+'>'] = prelu
return prelu
def make_block(self, name, input, units):
self.make_layer(name+'-A', input, units, alpha=0.1)
# self.make_layer(name+'-B', self.last_layer(), units, alpha=1.0)
return ElemwiseSumLayer([input, self.last_layer()]) if args.generator_residual else self.last_layer()
def setup_generator(self, input, config):
for k, v in config.items(): setattr(args, k, v)
args.zoom = 2**(args.generator_upscale - args.generator_downscale)
units_iter = extend(args.generator_filters)
units = next(units_iter)
self.make_layer('iter.0', input, units, filter_size=(7,7), pad=(3,3))
for i in range(0, args.generator_downscale):
self.make_layer('downscale%i'%i, self.last_layer(), next(units_iter), filter_size=(4,4), stride=(2,2))
units = next(units_iter)
for i in range(0, args.generator_blocks):
self.make_block('iter.%i'%(i+1), self.last_layer(), units)
for i in range(0, args.generator_upscale):
u = next(units_iter)
self.make_layer('upscale%i.2'%i, self.last_layer(), u*4)
self.network['upscale%i.1'%i] = SubpixelReshuffleLayer(self.last_layer(), u, 2)
self.network['out'] = ConvLayer(self.last_layer(), 3, filter_size=(7,7), pad=(3,3), nonlinearity=None)
def setup_perceptual(self, input):
"""Use lasagne to create a network of convolution layers using pre-trained VGG19 weights.
"""
offset = np.array([103.939, 116.779, 123.680], dtype=np.float32).reshape((1,3,1,1))
self.network['percept'] = lasagne.layers.NonlinearityLayer(input, lambda x: ((x+0.5)*255.0) - offset)
self.network['mse'] = self.network['percept']
self.network['conv1_1'] = ConvLayer(self.network['percept'], 64, 3, pad=1)
self.network['conv1_2'] = ConvLayer(self.network['conv1_1'], 64, 3, pad=1)
self.network['pool1'] = PoolLayer(self.network['conv1_2'], 2, mode='max')
self.network['conv2_1'] = ConvLayer(self.network['pool1'], 128, 3, pad=1)
self.network['conv2_2'] = ConvLayer(self.network['conv2_1'], 128, 3, pad=1)
self.network['pool2'] = PoolLayer(self.network['conv2_2'], 2, mode='max')
self.network['conv3_1'] = ConvLayer(self.network['pool2'], 256, 3, pad=1)
self.network['conv3_2'] = ConvLayer(self.network['conv3_1'], 256, 3, pad=1)
self.network['conv3_3'] = ConvLayer(self.network['conv3_2'], 256, 3, pad=1)
self.network['conv3_4'] = ConvLayer(self.network['conv3_3'], 256, 3, pad=1)
self.network['pool3'] = PoolLayer(self.network['conv3_4'], 2, mode='max')
self.network['conv4_1'] = ConvLayer(self.network['pool3'], 512, 3, pad=1)
self.network['conv4_2'] = ConvLayer(self.network['conv4_1'], 512, 3, pad=1)
self.network['conv4_3'] = ConvLayer(self.network['conv4_2'], 512, 3, pad=1)
self.network['conv4_4'] = ConvLayer(self.network['conv4_3'], 512, 3, pad=1)
self.network['pool4'] = PoolLayer(self.network['conv4_4'], 2, mode='max')
self.network['conv5_1'] = ConvLayer(self.network['pool4'], 512, 3, pad=1)
self.network['conv5_2'] = ConvLayer(self.network['conv5_1'], 512, 3, pad=1)
self.network['conv5_3'] = ConvLayer(self.network['conv5_2'], 512, 3, pad=1)
self.network['conv5_4'] = ConvLayer(self.network['conv5_3'], 512, 3, pad=1)
def setup_discriminator(self):
c = args.discriminator_size
self.make_layer('disc1.1', batch_norm(self.network['conv1_2']), 1*c, filter_size=(5,5), stride=(2,2), pad=(2,2))
self.make_layer('disc1.2', self.last_layer(), 1*c, filter_size=(5,5), stride=(2,2), pad=(2,2))
self.make_layer('disc2', batch_norm(self.network['conv2_2']), 2*c, filter_size=(5,5), stride=(2,2), pad=(2,2))
self.make_layer('disc3', batch_norm(self.network['conv3_2']), 3*c, filter_size=(3,3), stride=(1,1), pad=(1,1))
hypercolumn = ConcatLayer([self.network['disc1.2>'], self.network['disc2>'], self.network['disc3>']])
self.make_layer('disc4', hypercolumn, 4*c, filter_size=(1,1), stride=(1,1), pad=(0,0))
self.make_layer('disc5', self.last_layer(), 3*c, filter_size=(3,3), stride=(2,2))
self.make_layer('disc6', self.last_layer(), 2*c, filter_size=(1,1), stride=(1,1), pad=(0,0))
self.network['disc'] = batch_norm(ConvLayer(self.last_layer(), 1, filter_size=(1,1),
nonlinearity=lasagne.nonlinearities.linear))
#------------------------------------------------------------------------------------------------------------------
# Input / Output
#------------------------------------------------------------------------------------------------------------------
def load_perceptual(self):
"""Open the serialized parameters from a pre-trained network, and load them into the model created.
"""
vgg19_file = os.path.join(os.path.dirname(__file__), 'vgg19_conv.pkl.bz2')
if not os.path.exists(vgg19_file):
error("Model file with pre-trained convolution layers not found. Download here...",
"https://github.com/alexjc/neural-doodle/releases/download/v0.0/vgg19_conv.pkl.bz2")
data = pickle.load(bz2.open(vgg19_file, 'rb'))
layers = lasagne.layers.get_all_layers(self.last_layer(), treat_as_input=[self.network['percept']])
for p, d in zip(itertools.chain(*[l.get_params() for l in layers]), data): p.set_value(d)
def list_generator_layers(self):
for l in lasagne.layers.get_all_layers(self.network['out'], treat_as_input=[self.network['img']]):
if not l.get_params(): continue
name = list(self.network.keys())[list(self.network.values()).index(l)]
yield (name, l)
def get_filename(self, absolute=False):
filename = 'ne%ix-%s-%s-%s.pkl.bz2' % (args.zoom, args.type, args.model, __version__)
return os.path.join(os.path.dirname(__file__), filename) if absolute else filename
def save_generator(self):
def cast(p): return p.get_value().astype(np.float16)
params = {k: [cast(p) for p in l.get_params()] for (k, l) in self.list_generator_layers()}
config = {k: getattr(args, k) for k in ['generator_blocks', 'generator_residual', 'generator_filters'] + \
['generator_upscale', 'generator_downscale']}
pickle.dump((config, params), bz2.open(self.get_filename(absolute=True), 'wb'))
print(' - Saved model as `{}` after training.'.format(self.get_filename()))
def load_model(self):
if not os.path.exists(self.get_filename(absolute=True)):
if args.train: return {}, {}
error("Model file with pre-trained convolution layers not found. Download it here...",
"https://github.com/alexjc/neural-enhance/releases/download/v%s/%s"%(__version__, self.get_filename()))
print(' - Loaded file `{}` with trained model.'.format(self.get_filename()))
return pickle.load(bz2.open(self.get_filename(absolute=True), 'rb'))
def load_generator(self, params):
if len(params) == 0: return
for k, l in self.list_generator_layers():
assert k in params, "Couldn't find layer `%s` in loaded model.'" % k
assert len(l.get_params()) == len(params[k]), "Mismatch in types of layers."
for p, v in zip(l.get_params(), params[k]):
assert v.shape == p.get_value().shape, "Mismatch in number of parameters for layer {}.".format(k)
p.set_value(v.astype(np.float32))
#------------------------------------------------------------------------------------------------------------------
# Training & Loss Functions
#------------------------------------------------------------------------------------------------------------------
def loss_perceptual(self, p):
return lasagne.objectives.squared_error(p[:args.batch_size], p[args.batch_size:]).mean()
def loss_total_variation(self, x):
return T.mean(((x[:,:,:-1,:-1] - x[:,:,1:,:-1])**2 + (x[:,:,:-1,:-1] - x[:,:,:-1,1:])**2)**1.25)
def loss_adversarial(self, d):
return T.mean(1.0 - T.nnet.softminus(d[args.batch_size:]))
def loss_discriminator(self, d):
return T.mean(T.nnet.softminus(d[args.batch_size:]) - T.nnet.softplus(d[:args.batch_size]))
def compile(self):
# Helper function for rendering test images during training, or standalone inference mode.
input_tensor, seed_tensor = T.tensor4(), T.tensor4()
input_layers = {self.network['img']: input_tensor, self.network['seed']: seed_tensor}
output = lasagne.layers.get_output([self.network[k] for k in ['seed','out']], input_layers, deterministic=True)
self.predict = theano.function([seed_tensor], output)
if not args.train: return
output_layers = [self.network['out'], self.network[args.perceptual_layer], self.network['disc']]
gen_out, percept_out, disc_out = lasagne.layers.get_output(output_layers, input_layers, deterministic=False)
# Generator loss function, parameters and updates.
self.gen_lr = theano.shared(np.array(0.0, dtype=theano.config.floatX))
self.adversary_weight = theano.shared(np.array(0.0, dtype=theano.config.floatX))
gen_losses = [self.loss_perceptual(percept_out) * args.perceptual_weight,
self.loss_total_variation(gen_out) * args.smoothness_weight,
self.loss_adversarial(disc_out) * self.adversary_weight]
gen_params = lasagne.layers.get_all_params(self.network['out'], trainable=True)
print(' - {} tensors learned for generator.'.format(len(gen_params)))
gen_updates = lasagne.updates.adam(sum(gen_losses, 0.0), gen_params, learning_rate=self.gen_lr)
# Discriminator loss function, parameters and updates.
self.disc_lr = theano.shared(np.array(0.0, dtype=theano.config.floatX))
disc_losses = [self.loss_discriminator(disc_out)]
disc_params = list(itertools.chain(*[l.get_params() for k, l in self.network.items() if 'disc' in k]))
print(' - {} tensors learned for discriminator.'.format(len(disc_params)))
grads = [g.clip(-5.0, +5.0) for g in T.grad(sum(disc_losses, 0.0), disc_params)]
disc_updates = lasagne.updates.adam(grads, disc_params, learning_rate=self.disc_lr)
# Combined Theano function for updating both generator and discriminator at the same time.
updates = collections.OrderedDict(list(gen_updates.items()) + list(disc_updates.items()))
self.fit = theano.function([input_tensor, seed_tensor], gen_losses + [disc_out.mean(axis=(1,2,3))], updates=updates)
class NeuralEnhancer(object):
def __init__(self, loader):
if args.train:
print('{}Training {} epochs on random image sections with batch size {}.{}'\
.format(ansi.BLUE_B, args.epochs, args.batch_size, ansi.BLUE))
else:
if len(args.files) == 0: error("Specify the image(s) to enhance on the command-line.")
print('{}Enhancing {} image(s) specified on the command-line.{}'\
.format(ansi.BLUE_B, len(args.files), ansi.BLUE))
self.thread = DataLoader() if loader else None
self.model = Model()
print('{}'.format(ansi.ENDC))
def imsave(self, fn, img):
toimage(np.transpose(img + 0.5, (1, 2, 0)).clip(0.0, 1.0) * 255.0, cmin=0, cmax=255).save(fn)
def show_progress(self, orign, scald, repro):
os.makedirs('valid', exist_ok=True)
for i in range(args.batch_size):
self.imsave('valid/%s_%03i_origin.png' % (args.model, i), orign[i])
self.imsave('valid/%s_%03i_pixels.png' % (args.model, i), scald[i])
self.imsave('valid/%s_%03i_reprod.png' % (args.model, i), repro[i])
def decay_learning_rate(self):
l_r, t_cur = args.learning_rate, 0
while True:
yield l_r
t_cur += 1
if t_cur % args.learning_period == 0: l_r *= args.learning_decay
def train(self):
seed_size = args.batch_shape // args.zoom
images = np.zeros((args.batch_size, 3, args.batch_shape, args.batch_shape), dtype=np.float32)
seeds = np.zeros((args.batch_size, 3, seed_size, seed_size), dtype=np.float32)
learning_rate = self.decay_learning_rate()
try:
average, start = None, time.time()
for epoch in range(args.epochs):
total, stats = None, None
l_r = next(learning_rate)
if epoch >= args.generator_start: self.model.gen_lr.set_value(l_r)
if epoch >= args.discriminator_start: self.model.disc_lr.set_value(l_r)
for _ in range(args.epoch_size):
self.thread.copy(images, seeds)
output = self.model.fit(images, seeds)
losses = np.array(output[:3], dtype=np.float32)
stats = (stats + output[3]) if stats is not None else output[3]
total = total + losses if total is not None else losses
l = np.sum(losses)
assert not np.isnan(losses).any()
average = l if average is None else average * 0.95 + 0.05 * l
# print('' if l > average else '', end='', flush=True)
scald, repro = self.model.predict(seeds)
self.show_progress(images, scald, repro)
total /= args.epoch_size
stats /= args.epoch_size
totals, labels = [sum(total)] + list(total), ['total', 'prcpt', 'smthn', 'advrs']
gen_info = ['{}{}{}={:4.2e}'.format(ansi.WHITE_B, k, ansi.ENDC, v) for k, v in zip(labels, totals)]
print('\rEpoch #{} at {:4.1f}s, lr={:4.2e}{}'.format(epoch+1, time.time()-start, l_r, ' '*(args.epoch_size-30)))
print(' - generator {}'.format(' '.join(gen_info)))
real, fake = stats[:args.batch_size], stats[args.batch_size:]
print(' - discriminator', real.mean(), len(np.where(real > 0.5)[0]),
fake.mean(), len(np.where(fake < -0.5)[0]))
if epoch == args.adversarial_start-1:
print(' - generator now optimizing against discriminator.')
self.model.adversary_weight.set_value(args.adversary_weight)
running = None
if (epoch+1) % args.save_every == 0:
print(' - saving current generator layers to disk...')
self.model.save_generator()
except KeyboardInterrupt:
pass
print('\n{}Trained {}x super-resolution for {} epochs.{}'\
.format(ansi.CYAN_B, args.zoom, epoch+1, ansi.CYAN))
self.model.save_generator()
print(ansi.ENDC)
def match_histograms(self, A, B, rng=(0.0, 255.0), bins=64):
(Ha, Xa), (Hb, Xb) = [np.histogram(i, bins=bins, range=rng, density=True) for i in [A, B]]
X = np.linspace(rng[0], rng[1], bins, endpoint=True)
Hpa, Hpb = [np.cumsum(i) * (rng[1] - rng[0]) ** 2 / float(bins) for i in [Ha, Hb]]
inv_Ha = scipy.interpolate.interp1d(X, Hpa, bounds_error=False, fill_value='extrapolate')
map_Hb = scipy.interpolate.interp1d(Hpb, X, bounds_error=False, fill_value='extrapolate')
return map_Hb(inv_Ha(A).clip(0.0, 255.0))
def process(self, original):
# Snap the image to a shape that's compatible with the generator (2x, 4x)
s = 2 ** max(args.generator_upscale, args.generator_downscale)
by, bx = original.shape[0] % s, original.shape[1] % s
original = original[by-by//2:original.shape[0]-by//2,bx-bx//2:original.shape[1]-bx//2,:]
# Prepare paded input image as well as output buffer of zoomed size.
s, p, z = args.rendering_tile, args.rendering_overlap, args.zoom
image = np.pad(original, ((p, p), (p, p), (0, 0)), mode='reflect')
output = np.zeros((original.shape[0] * z, original.shape[1] * z, 3), dtype=np.float32)
# Iterate through the tile coordinates and pass them through the network.
for y, x in itertools.product(range(0, original.shape[0], s), range(0, original.shape[1], s)):
img = np.transpose(image[y:y+p*2+s,x:x+p*2+s,:] / 255.0 - 0.5, (2, 0, 1))[np.newaxis].astype(np.float32)
*_, repro = self.model.predict(img)
output[y*z:(y+s)*z,x*z:(x+s)*z,:] = np.transpose(repro[0] + 0.5, (1, 2, 0))[p*z:-p*z,p*z:-p*z,:]
print('.', end='', flush=True)
output = output.clip(0.0, 1.0) * 255.0
# Match color histograms if the user specified this option.
if args.rendering_histogram:
for i in range(3):
output[:,:,i] = self.match_histograms(output[:,:,i], original[:,:,i])
return toimage(output, cmin=0, cmax=255)
if __name__ == "__main__":
if args.train:
args.zoom = 2**(args.generator_upscale - args.generator_downscale)
enhancer = NeuralEnhancer(loader=True)
enhancer.train()
else:
enhancer = NeuralEnhancer(loader=False)
for filename in args.files:
print(filename, end=' ')
img = imread(filename, mode='RGB')
out = enhancer.process(img)
out.save(os.path.splitext(filename)[0]+'_ne%ix.png' % args.zoom)
print(flush=True)
print(ansi.ENDC)
| [
"lixiaojie"
] | lixiaojie |
d5e8d6dcd3352aea7afb3458f1ec740103900231 | 084c2f661e871133428c9681cb8e4ad76fc40cbd | /venv/Lib/site-packages/wx/core.py | 925004daaa07ad8eb058e9f2a614f0115fbd7577 | [] | no_license | taquynhnga2001/ntu-northspine-canteen-system | 5ec914b4f6cc85ca5df136063289da17ea096bb3 | 0dc072b709d3c75bd0db7738f08e1d1cc9dd949a | refs/heads/master | 2022-12-12T03:49:11.068899 | 2020-08-21T09:34:35 | 2020-08-21T09:34:35 | 265,493,323 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 137,832 | py | # This file is generated by wxPython's SIP generator. Do not edit by hand.
#
# Copyright: (c) 2018 by Total Control Software
# License: wxWindows License
"""
The classes in this module are the most commonly used classes for wxPython,
which is why they have been made visible in the core `wx` namespace.
Everything you need for building typical GUI applications is here.
"""
from ._core import *
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
# This code block was included from src/core_ex.py
import sys as _sys
# Load version numbers from __version__ and some other initialization tasks...
if 'wxEVT_NULL' in dir():
from wx.__version__ import *
import wx._core
__version__ = VERSION_STRING
# Add the build type to PlatformInfo
PlatformInfo = PlatformInfo + ('build-type: ' + BUILD_TYPE, )
# Register a function to be called when Python terminates that will clean
# up and release all system resources that wxWidgets allocated.
import atexit
atexit.register(wx._core._wxPyCleanup)
del atexit
else:
Port = ''
Platform = ''
PlatformInfo = []
# A little trick to make 'wx' be a reference to this module so wx.Names can
# be used in the python code here.
wx = _sys.modules[__name__]
import warnings
class wxPyDeprecationWarning(DeprecationWarning):
pass
warnings.simplefilter('default', wxPyDeprecationWarning)
del warnings
def deprecated(item, msg='', useName=False):
"""
Create a delegating wrapper that raises a deprecation warning. Can be
used with callable objects (functions, methods, classes) or with
properties.
"""
import warnings
name = ''
if useName:
try:
name = ' ' + item.__name__
except AttributeError:
pass
if isinstance(item, type):
# It is a class. Make a subclass that raises a warning.
class DeprecatedClassProxy(item):
def __init__(*args, **kw):
warnings.warn("Using deprecated class%s. %s" % (name, msg),
wxPyDeprecationWarning, stacklevel=2)
item.__init__(*args, **kw)
DeprecatedClassProxy.__name__ = item.__name__
return DeprecatedClassProxy
elif callable(item):
# wrap a new function around the callable
def deprecated_func(*args, **kw):
warnings.warn("Call to deprecated item%s. %s" % (name, msg),
wxPyDeprecationWarning, stacklevel=2)
if not kw:
return item(*args)
return item(*args, **kw)
deprecated_func.__name__ = item.__name__
deprecated_func.__doc__ = item.__doc__
if hasattr(item, '__dict__'):
deprecated_func.__dict__.update(item.__dict__)
return deprecated_func
elif hasattr(item, '__get__'):
# it should be a property if there is a getter
class DepGetProp(object):
def __init__(self, item, msg):
self.item = item
self.msg = msg
def __get__(self, inst, klass):
warnings.warn("Accessing deprecated property. %s" % msg,
wxPyDeprecationWarning, stacklevel=2)
return self.item.__get__(inst, klass)
class DepGetSetProp(DepGetProp):
def __set__(self, inst, val):
warnings.warn("Accessing deprecated property. %s" % msg,
wxPyDeprecationWarning, stacklevel=2)
return self.item.__set__(inst, val)
class DepGetSetDelProp(DepGetSetProp):
def __delete__(self, inst):
warnings.warn("Accessing deprecated property. %s" % msg,
wxPyDeprecationWarning, stacklevel=2)
return self.item.__delete__(inst)
if hasattr(item, '__set__') and hasattr(item, '__delete__'):
return DepGetSetDelProp(item, msg)
elif hasattr(item, '__set__'):
return DepGetSetProp(item, msg)
else:
return DepGetProp(item, msg)
else:
raise TypeError("unsupported type %s" % type(item))
def deprecatedMsg(msg):
"""
A wrapper for the deprecated decorator that makes it easier to attach a
custom message to the warning that is raised if the item is used. This
can also be used in the @decorator role since it returns the real
decorator when called.
"""
import functools
return functools.partial(deprecated, msg=msg, useName=True)
#----------------------------------------------------------------------------
EmptyString = ""
#----------------------------------------------------------------------------
# End of included code block
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
BG_STYLE_CUSTOM = BG_STYLE_PAINT
def _ClientDataContainer_GetClientObject(self):
"""
Alias for :meth:`GetClientData`
"""
return self.GetClientData()
ClientDataContainer.GetClientObject = _ClientDataContainer_GetClientObject
del _ClientDataContainer_GetClientObject
def _ClientDataContainer_SetClientObject(self, data):
"""
Alias for :meth:`SetClientData`
"""
self.SetClientData(data)
ClientDataContainer.SetClientObject = _ClientDataContainer_SetClientObject
del _ClientDataContainer_SetClientObject
ClientDataContainer.ClientData = property(ClientDataContainer.GetClientData, ClientDataContainer.SetClientData)
def _initStockObjects():
import wx
wx.NORMAL_FONT._copyFrom( StockGDI.instance().GetFont(StockGDI.FONT_NORMAL))
wx.SMALL_FONT._copyFrom( StockGDI.instance().GetFont(StockGDI.FONT_SMALL))
wx.SWISS_FONT._copyFrom( StockGDI.instance().GetFont(StockGDI.FONT_SWISS))
wx.ITALIC_FONT._copyFrom( StockGDI.instance().GetFont(StockGDI.FONT_ITALIC))
wx.BLACK_DASHED_PEN._copyFrom( StockGDI.GetPen(StockGDI.PEN_BLACKDASHED))
wx.BLACK_PEN._copyFrom( StockGDI.GetPen(StockGDI.PEN_BLACK))
wx.BLUE_PEN._copyFrom( StockGDI.GetPen(StockGDI.PEN_BLUE))
wx.CYAN_PEN._copyFrom( StockGDI.GetPen(StockGDI.PEN_CYAN))
wx.GREEN_PEN._copyFrom( StockGDI.GetPen(StockGDI.PEN_GREEN))
wx.YELLOW_PEN._copyFrom( StockGDI.GetPen(StockGDI.PEN_YELLOW))
wx.GREY_PEN._copyFrom( StockGDI.GetPen(StockGDI.PEN_GREY))
wx.LIGHT_GREY_PEN._copyFrom( StockGDI.GetPen(StockGDI.PEN_LIGHTGREY))
wx.MEDIUM_GREY_PEN._copyFrom( StockGDI.GetPen(StockGDI.PEN_MEDIUMGREY))
wx.RED_PEN._copyFrom( StockGDI.GetPen(StockGDI.PEN_RED))
wx.TRANSPARENT_PEN._copyFrom( StockGDI.GetPen(StockGDI.PEN_TRANSPARENT))
wx.WHITE_PEN._copyFrom( StockGDI.GetPen(StockGDI.PEN_WHITE))
wx.BLACK_BRUSH._copyFrom( StockGDI.GetBrush(StockGDI.BRUSH_BLACK))
wx.BLUE_BRUSH._copyFrom( StockGDI.GetBrush(StockGDI.BRUSH_BLUE))
wx.CYAN_BRUSH._copyFrom( StockGDI.GetBrush(StockGDI.BRUSH_CYAN))
wx.GREEN_BRUSH._copyFrom( StockGDI.GetBrush(StockGDI.BRUSH_GREEN))
wx.YELLOW_BRUSH._copyFrom( StockGDI.GetBrush(StockGDI.BRUSH_YELLOW))
wx.GREY_BRUSH._copyFrom( StockGDI.GetBrush(StockGDI.BRUSH_GREY))
wx.LIGHT_GREY_BRUSH._copyFrom( StockGDI.GetBrush(StockGDI.BRUSH_LIGHTGREY))
wx.MEDIUM_GREY_BRUSH._copyFrom( StockGDI.GetBrush(StockGDI.BRUSH_MEDIUMGREY))
wx.RED_BRUSH._copyFrom( StockGDI.GetBrush(StockGDI.BRUSH_RED))
wx.TRANSPARENT_BRUSH._copyFrom( StockGDI.GetBrush(StockGDI.BRUSH_TRANSPARENT))
wx.WHITE_BRUSH._copyFrom( StockGDI.GetBrush(StockGDI.BRUSH_WHITE))
wx.BLACK._copyFrom( StockGDI.GetColour(StockGDI.COLOUR_BLACK))
wx.BLUE._copyFrom( StockGDI.GetColour(StockGDI.COLOUR_BLUE))
wx.CYAN._copyFrom( StockGDI.GetColour(StockGDI.COLOUR_CYAN))
wx.GREEN._copyFrom( StockGDI.GetColour(StockGDI.COLOUR_GREEN))
wx.YELLOW._copyFrom( StockGDI.GetColour(StockGDI.COLOUR_YELLOW))
wx.LIGHT_GREY._copyFrom( StockGDI.GetColour(StockGDI.COLOUR_LIGHTGREY))
wx.RED._copyFrom( StockGDI.GetColour(StockGDI.COLOUR_RED))
wx.WHITE._copyFrom( StockGDI.GetColour(StockGDI.COLOUR_WHITE))
wx.CROSS_CURSOR._copyFrom( StockGDI.GetCursor(StockGDI.CURSOR_CROSS))
wx.HOURGLASS_CURSOR._copyFrom( StockGDI.GetCursor(StockGDI.CURSOR_HOURGLASS))
wx.STANDARD_CURSOR._copyFrom( StockGDI.GetCursor(StockGDI.CURSOR_STANDARD))
wx.TheFontList = StockGDI._getTheFontList()
wx.ThePenList = StockGDI._getThePenList()
wx.TheBrushList = StockGDI._getTheBrushList()
wx.TheColourDatabase = StockGDI._getTheColourDatabase()
StockGDI._initStockObjects = staticmethod(_initStockObjects)
def _DateTime___repr__(self):
from six import PY2
if self.IsValid():
f = self.Format()
if PY2: f = f.encode('utf-8')
return '<wx.DateTime: "%s">' % f
else:
return '<wx.DateTime: "INVALID">'
DateTime.__repr__ = _DateTime___repr__
del _DateTime___repr__
def _DateTime___str__(self):
from six import PY2
if self.IsValid():
f = self.Format()
if PY2: f = f.encode('utf-8')
return f
else:
return "INVALID DateTime"
DateTime.__str__ = _DateTime___str__
del _DateTime___str__
InvalidDateTime = DefaultDateTime
@wx.deprecatedMsg("Use :meth:`DateTime.FromTimeT` instead.")
def DateTimeFromTimeT(timet):
"""
Compatibility wrapper for :meth:`DateTime.FromTimeT`
"""
return DateTime.FromTimeT(timet)
@wx.deprecatedMsg("Use :meth:`DateTime.FromJDN` instead.")
def DateTimeFromJDN(jdn):
"""
Compatibility wrapper for :meth:`DateTime.FromJDN`
"""
return DateTime.FromJDN(jdn)
@wx.deprecatedMsg("Use :meth:`DateTime.FromHMS` instead.")
def DateTimeFromHMS(hour, minute=0, second=0, millisecond=0):
"""
Compatibility wrapper for :meth:`DateTime.FromHMS`
"""
return DateTime.FromHMS(hour, minute, second, millisecond)
@wx.deprecatedMsg("Use :meth:`DateTime.FromDMY` instead.")
def DateTimeFromDMY(day, month, year=DateTime.Inv_Year, hour=0, minute=0, second=0, millisecond=0):
"""
Compatibility wrapper for :meth:`DateTime.FromDMY`
"""
return DateTime.FromDMY(day, month, year, hour, minute, second, millisecond)
def pydate2wxdate(date):
"""
Convert a Python date or datetime to a :class:`DateTime` object
"""
import datetime
assert isinstance(date, (datetime.datetime, datetime.date))
return DateTime(date) # the built-in typemap will convert it for us
def wxdate2pydate(date):
"""
Convert a :class:`DateTime` object to a Python datetime.
"""
import datetime
assert isinstance(date, DateTime)
if date.IsValid():
return datetime.datetime(date.year, date.month+1, date.day,
date.hour, date.minute, date.second, date.millisecond*1000)
else:
return None
def _WindowIDRef___repr__(self):
return "WindowIDRef: {}".format(self.GetId())
WindowIDRef.__repr__ = _WindowIDRef___repr__
del _WindowIDRef___repr__
def _WindowIDRef___hash__(self):
return hash(self.GetValue())
WindowIDRef.__hash__ = _WindowIDRef___hash__
del _WindowIDRef___hash__
def NewIdRef(count=1):
"""
Reserves a new Window ID (or range of WindowIDs) and returns a
:class:`wx.WindowIDRef` object (or list of them) that will help
manage the reservation of that ID.
This function is intended to be a drop-in replacement of the old
and deprecated :func:`wx.NewId` function, with the added benefit
that the ID should never conflict with an in-use ID or other IDs
generated by this function.
"""
if count == 1:
return WindowIDRef(IdManager.ReserveId())
else:
start = IdManager.ReserveId(count)
IDRefs = []
for id in range(start, start+count):
IDRefs.append(WindowIDRef(id))
return IDRefs
def _ArrayVideoModes___repr__(self):
return "ArrayVideoModes: " + repr(list(self))
ArrayVideoModes.__repr__ = _ArrayVideoModes___repr__
del _ArrayVideoModes___repr__
#----------------------------------------------------------------------------
# Add the directory where the wxWidgets catalogs were installed
# to the default catalog path, if they were put in the pacakge dir.
import os
_localedir = os.path.join(os.path.dirname(__file__), "locale")
if os.path.exists(_localedir):
Locale.AddCatalogLookupPathPrefix(_localedir)
del os
#----------------------------------------------------------------------------
def _Point_GetIM(self):
"""
Returns an immutable representation of the ``wx.Point`` object, based on ``namedtuple``.
This new object is hashable and can be used as a dictionary key,
be added to sets, etc. It can be converted back into a real ``wx.Point``
with a simple statement like this: ``obj = wx.Point(imObj)``.
"""
return _im_Point(*self.Get())
Point.GetIM = _Point_GetIM
del _Point_GetIM
def _Point___str__(self):
return str(self.Get())
Point.__str__ = _Point___str__
del _Point___str__
def _Point___repr__(self):
return "wx.Point"+str(self.Get())
Point.__repr__ = _Point___repr__
del _Point___repr__
def _Point___len__(self):
return len(self.Get())
Point.__len__ = _Point___len__
del _Point___len__
def _Point___reduce__(self):
return (Point, self.Get())
Point.__reduce__ = _Point___reduce__
del _Point___reduce__
def _Point___getitem__(self, idx):
return self.Get()[idx]
Point.__getitem__ = _Point___getitem__
del _Point___getitem__
def _Point___setitem__(self, idx, val):
if idx == 0: self.x = val
elif idx == 1: self.y = val
else: raise IndexError
Point.__setitem__ = _Point___setitem__
del _Point___setitem__
Point.__safe_for_unpickling__ = True
Point.IM = property(Point.GetIM)
def _Size_GetIM(self):
"""
Returns an immutable representation of the ``wx.Size`` object, based on ``namedtuple``.
This new object is hashable and can be used as a dictionary key,
be added to sets, etc. It can be converted back into a real ``wx.Size``
with a simple statement like this: ``obj = wx.Size(imObj)``.
"""
return _im_Size(*self.Get())
Size.GetIM = _Size_GetIM
del _Size_GetIM
def _Size___str__(self):
return str(self.Get())
Size.__str__ = _Size___str__
del _Size___str__
def _Size___repr__(self):
return "wx.Size"+str(self.Get())
Size.__repr__ = _Size___repr__
del _Size___repr__
def _Size___len__(self):
return len(self.Get())
Size.__len__ = _Size___len__
del _Size___len__
def _Size___nonzero__(self):
return self.Get() != (0,0)
Size.__nonzero__ = _Size___nonzero__
del _Size___nonzero__
def _Size___bool__(self):
return self.Get() != (0,0)
Size.__bool__ = _Size___bool__
del _Size___bool__
def _Size___reduce__(self):
return (Size, self.Get())
Size.__reduce__ = _Size___reduce__
del _Size___reduce__
def _Size___getitem__(self, idx):
return self.Get()[idx]
Size.__getitem__ = _Size___getitem__
del _Size___getitem__
def _Size___setitem__(self, idx, val):
if idx == 0: self.width = val
elif idx == 1: self.height = val
else: raise IndexError
Size.__setitem__ = _Size___setitem__
del _Size___setitem__
Size.__safe_for_unpickling__ = True
def _Rect_GetIM(self):
"""
Returns an immutable representation of the ``wx.Rect`` object, based on ``namedtuple``.
This new object is hashable and can be used as a dictionary key,
be added to sets, etc. It can be converted back into a real ``wx.Rect``
with a simple statement like this: ``obj = wx.Rect(imObj)``.
"""
return _im_Rect(*self.Get())
Rect.GetIM = _Rect_GetIM
del _Rect_GetIM
def _Rect___str__(self):
return str(self.Get())
Rect.__str__ = _Rect___str__
del _Rect___str__
def _Rect___repr__(self):
return "wx.Rect"+str(self.Get())
Rect.__repr__ = _Rect___repr__
del _Rect___repr__
def _Rect___len__(self):
return len(self.Get())
Rect.__len__ = _Rect___len__
del _Rect___len__
def _Rect___nonzero__(self):
return self.Get() != (0,0,0,0)
Rect.__nonzero__ = _Rect___nonzero__
del _Rect___nonzero__
def _Rect___bool__(self):
return self.Get() != (0,0,0,0)
Rect.__bool__ = _Rect___bool__
del _Rect___bool__
def _Rect___reduce__(self):
return (Rect, self.Get())
Rect.__reduce__ = _Rect___reduce__
del _Rect___reduce__
def _Rect___getitem__(self, idx):
return self.Get()[idx]
Rect.__getitem__ = _Rect___getitem__
del _Rect___getitem__
def _Rect___setitem__(self, idx, val):
if idx == 0: self.x = val
elif idx == 1: self.y = val
elif idx == 2: self.width = val
elif idx == 3: self.height = val
else: raise IndexError
Rect.__setitem__ = _Rect___setitem__
del _Rect___setitem__
Rect.__safe_for_unpickling__ = True
def _RealPoint_GetIM(self):
"""
Returns an immutable representation of the ``wx.RealPoint`` object, based on ``namedtuple``.
This new object is hashable and can be used as a dictionary key,
be added to sets, etc. It can be converted back into a real ``wx.RealPoint``
with a simple statement like this: ``obj = wx.RealPoint(imObj)``.
"""
return _im_RealPoint(*self.Get())
RealPoint.GetIM = _RealPoint_GetIM
del _RealPoint_GetIM
def _RealPoint___str__(self):
return str(self.Get())
RealPoint.__str__ = _RealPoint___str__
del _RealPoint___str__
def _RealPoint___repr__(self):
return "wx.RealPoint"+str(self.Get())
RealPoint.__repr__ = _RealPoint___repr__
del _RealPoint___repr__
def _RealPoint___len__(self):
return len(self.Get())
RealPoint.__len__ = _RealPoint___len__
del _RealPoint___len__
def _RealPoint___nonzero__(self):
return self.Get() != (0,0)
RealPoint.__nonzero__ = _RealPoint___nonzero__
del _RealPoint___nonzero__
def _RealPoint___bool__(self):
return self.Get() != (0,0)
RealPoint.__bool__ = _RealPoint___bool__
del _RealPoint___bool__
def _RealPoint___reduce__(self):
return (Rect, self.Get())
RealPoint.__reduce__ = _RealPoint___reduce__
del _RealPoint___reduce__
def _RealPoint___getitem__(self, idx):
return self.Get()[idx]
RealPoint.__getitem__ = _RealPoint___getitem__
del _RealPoint___getitem__
def _RealPoint___setitem__(self, idx, val):
if idx == 0: self.x = val
elif idx == 1: self.y = val
else: raise IndexError
RealPoint.__setitem__ = _RealPoint___setitem__
del _RealPoint___setitem__
RealPoint.__safe_for_unpickling__ = True
RealPoint.IM = property(RealPoint.GetIM)
def _ColourDatabase_FindColour(self, colour):
return self.Find(colour)
ColourDatabase.FindColour = _ColourDatabase_FindColour
del _ColourDatabase_FindColour
from collections import namedtuple
_im_Point = namedtuple('_im_Point', ['x', 'y'])
del namedtuple
def _PointList___repr__(self):
return "PointList: " + repr(list(self))
PointList.__repr__ = _PointList___repr__
del _PointList___repr__
from collections import namedtuple
_im_Size = namedtuple('_im_Size', ['width', 'height'])
del namedtuple
from collections import namedtuple
_im_Rect = namedtuple('_im_Rect', ['x', 'y', 'width', 'height'])
del namedtuple
from collections import namedtuple
_im_RealPoint = namedtuple('_im_RealPoint', ['x', 'y'])
del namedtuple
def _Point2D_GetIM(self):
"""
Returns an immutable representation of the ``wx.Point2D`` object, based on ``namedtuple``.
This new object is hashable and can be used as a dictionary key,
be added to sets, etc. It can be converted back into a real ``wx.Point2D``
with a simple statement like this: ``obj = wx.Point2D(imObj)``.
"""
return _im_Point2D(*self.Get())
Point2D.GetIM = _Point2D_GetIM
del _Point2D_GetIM
def _Point2D___str__(self):
return str(self.Get())
Point2D.__str__ = _Point2D___str__
del _Point2D___str__
def _Point2D___repr__(self):
return "wx.Point2D"+str(self.Get())
Point2D.__repr__ = _Point2D___repr__
del _Point2D___repr__
def _Point2D___len__(self):
return len(self.Get())
Point2D.__len__ = _Point2D___len__
del _Point2D___len__
def _Point2D___nonzero__(self):
return self.Get() != (0,0)
Point2D.__nonzero__ = _Point2D___nonzero__
del _Point2D___nonzero__
def _Point2D___bool__(self):
return self.Get() != (0,0)
Point2D.__bool__ = _Point2D___bool__
del _Point2D___bool__
def _Point2D___reduce__(self):
return (Point2D, self.Get())
Point2D.__reduce__ = _Point2D___reduce__
del _Point2D___reduce__
def _Point2D___getitem__(self, idx):
return self.Get()[idx]
Point2D.__getitem__ = _Point2D___getitem__
del _Point2D___getitem__
def _Point2D___setitem__(self, idx, val):
if idx == 0: self.x = val
elif idx == 1: self.y = val
else: raise IndexError
Point2D.__setitem__ = _Point2D___setitem__
del _Point2D___setitem__
Point2D.__safe_for_unpickling__ = True
Point2D.IM = property(Point2D.GetIM)
def _Rect2D_GetIM(self):
"""
Returns an immutable representation of the ``wx.Rect2D`` object, based on ``namedtuple``.
This new object is hashable and can be used as a dictionary key,
be added to sets, etc. It can be converted back into a real ``wx.Rect2D``
with a simple statement like this: ``obj = wx.Rect2D(imObj)``.
"""
return _im_Rect2D(*self.Get())
Rect2D.GetIM = _Rect2D_GetIM
del _Rect2D_GetIM
def _Rect2D___str__(self):
return str(self.Get())
Rect2D.__str__ = _Rect2D___str__
del _Rect2D___str__
def _Rect2D___repr__(self):
return "wx.Rect2D"+str(self.Get())
Rect2D.__repr__ = _Rect2D___repr__
del _Rect2D___repr__
def _Rect2D___len__(self):
return len(self.Get())
Rect2D.__len__ = _Rect2D___len__
del _Rect2D___len__
def _Rect2D___nonzero__(self):
return self.Get() != (0,0,0,0)
Rect2D.__nonzero__ = _Rect2D___nonzero__
del _Rect2D___nonzero__
def _Rect2D___bool__(self):
return self.Get() != (0,0,0,0)
Rect2D.__bool__ = _Rect2D___bool__
del _Rect2D___bool__
def _Rect2D___reduce__(self):
return (Rect2D, self.Get())
Rect2D.__reduce__ = _Rect2D___reduce__
del _Rect2D___reduce__
def _Rect2D___getitem__(self, idx):
return self.Get()[idx]
Rect2D.__getitem__ = _Rect2D___getitem__
del _Rect2D___getitem__
def _Rect2D___setitem__(self, idx, val):
if idx == 0: self.x = val
elif idx == 1: self.y = val
elif idx == 2: self.width = val
elif idx == 3: self.height = val
else: raise IndexError
Rect2D.__setitem__ = _Rect2D___setitem__
del _Rect2D___setitem__
Rect2D.__safe_for_unpickling__ = True
Rect2D.IM = property(Rect2D.GetIM)
from collections import namedtuple
_im_Point2D = namedtuple('_im_Point2D', ['x', 'y'])
del namedtuple
from collections import namedtuple
_im_Rect2D = namedtuple('_im_Rect2D', ['x', 'y', 'width', 'height'])
del namedtuple
def _Position_GetIM(self):
"""
Returns an immutable representation of the ``wx.Position`` object, based on ``namedtuple``.
This new object is hashable and can be used as a dictionary key,
be added to sets, etc. It can be converted back into a real ``wx.Position``
with a simple statement like this: ``obj = wx.Position(imObj)``.
"""
return _im_Position(*self.Get())
Position.GetIM = _Position_GetIM
del _Position_GetIM
def _Position___str__(self):
return str(self.Get())
Position.__str__ = _Position___str__
del _Position___str__
def _Position___repr__(self):
return "wx.Position"+str(self.Get())
Position.__repr__ = _Position___repr__
del _Position___repr__
def _Position___len__(self):
return len(self.Get())
Position.__len__ = _Position___len__
del _Position___len__
def _Position___nonzero__(self):
return self.Get() != (0,0)
Position.__nonzero__ = _Position___nonzero__
del _Position___nonzero__
def _Position___bool__(self):
return self.Get() != (0,0)
Position.__bool__ = _Position___bool__
del _Position___bool__
def _Position___reduce__(self):
return (Position, self.Get())
Position.__reduce__ = _Position___reduce__
del _Position___reduce__
def _Position___getitem__(self, idx):
return self.Get()[idx]
Position.__getitem__ = _Position___getitem__
del _Position___getitem__
def _Position___setitem__(self, idx, val):
if idx == 0: self.Row = val
elif idx == 1: self.Col = val
else: raise IndexError
Position.__setitem__ = _Position___setitem__
del _Position___setitem__
Position.__safe_for_unpickling__ = True
Position.IM = property(Position.GetIM)
from collections import namedtuple
_im_Position = namedtuple('_im_Position', ['Row', 'Col'])
del namedtuple
def _Colour_GetIM(self):
"""
Returns an immutable representation of the ``wx.Colour`` object, based on ``namedtuple``.
This new object is hashable and can be used as a dictionary key,
be added to sets, etc. It can be converted back into a real ``wx.Colour``
with a simple statement like this: ``obj = wx.Colour(imObj)``.
"""
return _im_Colour(*self.Get())
Colour.GetIM = _Colour_GetIM
del _Colour_GetIM
def _Colour___str__(self):
return str(self.Get())
Colour.__str__ = _Colour___str__
del _Colour___str__
def _Colour___repr__(self):
return "wx.Colour"+str(self.Get())
Colour.__repr__ = _Colour___repr__
del _Colour___repr__
def _Colour___len__(self):
return len(self.Get())
Colour.__len__ = _Colour___len__
del _Colour___len__
def _Colour___reduce__(self):
return (Colour, self.Get())
Colour.__reduce__ = _Colour___reduce__
del _Colour___reduce__
def _Colour___getitem__(self, idx):
return self.Get()[idx]
Colour.__getitem__ = _Colour___getitem__
del _Colour___getitem__
def _Colour___setitem__(self, idx, val):
if idx == 0: self.red = val
elif idx == 1: self.green = val
elif idx == 2: self.blue = val
elif idx == 3: self.alpha = val
else: raise IndexError
Colour.__setitem__ = _Colour___setitem__
del _Colour___setitem__
Colour.__safe_for_unpickling__ = True
# These stock colours will be initialized when the wx.App object is created.
BLACK = Colour()
BLUE = Colour()
CYAN = Colour()
GREEN = Colour()
YELLOW = Colour()
LIGHT_GREY = Colour()
RED = Colour()
WHITE = Colour()
from collections import namedtuple
_im_Colour = namedtuple('_im_Colour', ['red', 'green', 'blue', 'alpha'])
del namedtuple
NamedColour = wx.deprecated(Colour, "Use Colour instead.")
ZipFSHandler = wx.deprecated(ArchiveFSHandler, "Use ArchiveFSHandler instead.")
def _Image_ConvertToBitmap(self, depth=-1):
"""
ConvertToBitmap(depth=-1) -> Bitmap
Convert the image to a :class:`wx.Bitmap`.
"""
bmp = wx.Bitmap(self, depth)
return bmp
Image.ConvertToBitmap = _Image_ConvertToBitmap
del _Image_ConvertToBitmap
def _Image_ConvertToMonoBitmap(self, red, green, blue):
"""
ConvertToMonoBitmap(red, green, blue) -> Bitmap
Creates a monochrome version of the image and returns it as a :class:`wx.Bitmap`.
"""
mono = self.ConvertToMono( red, green, blue )
bmp = wx.Bitmap( mono, 1 )
return bmp
Image.ConvertToMonoBitmap = _Image_ConvertToMonoBitmap
del _Image_ConvertToMonoBitmap
@wx.deprecatedMsg("Use :class:`Image` instead.")
def EmptyImage(width=0, height=0, clear=True):
"""
A compatibility wrapper for the wx.Image(width, height) constructor
"""
return Image(width, height, clear)
@wx.deprecatedMsg("Use bitmap.ConvertToImage instead.")
def ImageFromBitmap(bitmap):
"""
Create a :class:`Image` from a :class:`wx.Bitmap`
"""
return bitmap.ConvertToImage()
@wx.deprecatedMsg("Use :class:`Image` instead.")
def ImageFromStream(stream, type=BITMAP_TYPE_ANY, index=-1):
"""
Load an image from a stream (file-like object)
"""
return wx.Image(stream, type, index)
@wx.deprecatedMsg("Use :class:`Image` instead.")
def ImageFromData(width, height, data):
"""
Compatibility wrapper for creating an image from RGB data
"""
return Image(width, height, data)
@wx.deprecatedMsg("Use :class:`Image` instead.")
def ImageFromDataWithAlpha(width, height, data, alpha):
"""
Compatibility wrapper for creating an image from RGB and Alpha data
"""
return Image(width, height, data, alpha)
def ImageFromBuffer(width, height, dataBuffer, alphaBuffer=None):
"""
Creates a :class:`Image` from the data in `dataBuffer`. The `dataBuffer`
parameter must be a Python object that implements the buffer interface,
such as a string, array, etc. The `dataBuffer` object is expected to
contain a series of RGB bytes and be width*height*3 bytes long. A buffer
object can optionally be supplied for the image's alpha channel data, and
it is expected to be width*height bytes long.
The :class:`Image` will be created with its data and alpha pointers initialized
to the memory address pointed to by the buffer objects, thus saving the
time needed to copy the image data from the buffer object to the :class:`Image`.
While this has advantages, it also has the shoot-yourself-in-the-foot
risks associated with sharing a C pointer between two objects.
To help alleviate the risk a reference to the data and alpha buffer
objects are kept with the :class:`Image`, so that they won't get deleted until
after the wx.Image is deleted. However please be aware that it is not
guaranteed that an object won't move its memory buffer to a new location
when it needs to resize its contents. If that happens then the :class:`Image`
will end up referring to an invalid memory location and could cause the
application to crash. Therefore care should be taken to not manipulate
the objects used for the data and alpha buffers in a way that would cause
them to change size.
"""
img = Image(width, height)
img.SetDataBuffer(dataBuffer)
if alphaBuffer:
img.SetAlphaBuffer(alphaBuffer)
img._buffer = dataBuffer
img._alpha = alphaBuffer
return img
def _ImageArray___repr__(self):
return "ImageArray: " + repr(list(self))
ImageArray.__repr__ = _ImageArray___repr__
del _ImageArray___repr__
IMAGE_OPTION_QUALITY = "quality"
IMAGE_OPTION_FILENAME = "FileName"
IMAGE_OPTION_RESOLUTION = "Resolution"
IMAGE_OPTION_RESOLUTIONX = "ResolutionX"
IMAGE_OPTION_RESOLUTIONY = "ResolutionY"
IMAGE_OPTION_RESOLUTIONUNIT = "ResolutionUnit"
IMAGE_OPTION_MAX_WIDTH = "MaxWidth"
IMAGE_OPTION_MAX_HEIGHT = "MaxHeight"
IMAGE_OPTION_ORIGINAL_WIDTH = "OriginalWidth"
IMAGE_OPTION_ORIGINAL_HEIGHT = "OriginalHeight"
IMAGE_OPTION_BMP_FORMAT = "wxBMP_FORMAT"
IMAGE_OPTION_CUR_HOTSPOT_X = "HotSpotX"
IMAGE_OPTION_CUR_HOTSPOT_Y = "HotSpotY"
IMAGE_OPTION_GIF_COMMENT = "GifComment"
IMAGE_OPTION_PNG_FORMAT = "PngFormat"
IMAGE_OPTION_PNG_BITDEPTH = "PngBitDepth"
IMAGE_OPTION_PNG_FILTER = "PngF"
IMAGE_OPTION_PNG_COMPRESSION_LEVEL = "PngZL"
IMAGE_OPTION_PNG_COMPRESSION_MEM_LEVEL = "PngZM"
IMAGE_OPTION_PNG_COMPRESSION_STRATEGY = "PngZS"
IMAGE_OPTION_PNG_COMPRESSION_BUFFER_SIZE = "PngZB"
IMAGE_OPTION_TIFF_BITSPERSAMPLE = "BitsPerSample"
IMAGE_OPTION_TIFF_SAMPLESPERPIXEL = "SamplesPerPixel"
IMAGE_OPTION_TIFF_COMPRESSION = "Compression"
IMAGE_OPTION_TIFF_PHOTOMETRIC = "Photometric"
IMAGE_OPTION_TIFF_IMAGEDESCRIPTOR = "ImageDescriptor"
IMAGE_OPTION_TIFF_BITSPERSAMPLE = "BitsPerSample"
IMAGE_OPTION_TIFF_SAMPLESPERPIXEL = "SamplesPerPixel"
IMAGE_OPTION_TIFF_COMPRESSION = "Compression"
IMAGE_OPTION_TIFF_PHOTOMETRIC = "Photometric"
IMAGE_OPTION_TIFF_IMAGEDESCRIPTOR = "ImageDescriptor"
IMAGE_OPTION_GIF_COMMENT = "GifComment"
IMAGE_OPTION_PNG_FORMAT = "PngFormat"
IMAGE_OPTION_PNG_BITDEPTH = "PngBitDepth"
IMAGE_OPTION_PNG_FILTER = "PngF"
IMAGE_OPTION_PNG_COMPRESSION_LEVEL = "PngZL"
IMAGE_OPTION_PNG_COMPRESSION_MEM_LEVEL = "PngZM"
IMAGE_OPTION_PNG_COMPRESSION_STRATEGY = "PngZS"
IMAGE_OPTION_PNG_COMPRESSION_BUFFER_SIZE = "PngZB"
@wx.deprecatedMsg("Use :meth:`wx.Bitmap.FromBuffer` or :meth:`wx.Bitmap.FromBufferAndAlpha` instead.")
def BitmapFromBuffer(width, height, dataBuffer, alphaBuffer=None):
"""
A compatibility wrapper for :meth:`wx.Bitmap.FromBuffer` and :meth:`wx.Bitmap.FromBufferAndAlpha`
"""
if alphaBuffer is not None:
return Bitmap.FromBufferAndAlpha(width, height, dataBuffer, alphaBuffer)
else:
return Bitmap.FromBuffer(width, height, dataBuffer)
@wx.deprecatedMsg("Use :meth:`wx.Bitmap.FromBufferRGBA` instead.")
def BitmapFromBufferRGBA(width, height, dataBuffer):
"""
A compatibility wrapper for :meth:`wx.Bitmap.FromBufferRGBA`
"""
return Bitmap.FromBufferRGBA(width, height, dataBuffer)
@wx.deprecatedMsg("Use :meth:`wx.Bitmap.FromRGBA` instead.")
def EmptyBitmapRGBA(width, height, red=0, green=0, blue=0, alpha=0):
"""
A compatibility wrapper for :meth:`wx.Bitmap.FromRGBA`
"""
return Bitmap.FromRGBA(width, height, red, green, blue, alpha)
@wx.deprecatedMsg("Use :class:`wx.Bitmap` instead")
def EmptyBitmap(width, height, depth=BITMAP_SCREEN_DEPTH):
"""
A compatibility wrapper for the wx.Bitmap(width, height, depth) constructor
"""
return Bitmap(width, height, depth)
@wx.deprecatedMsg("Use :class:`wx.Bitmap` instead")
def BitmapFromImage(image):
"""
A compatibility wrapper for the wx.Bitmap(wx.Image) constructor
"""
return Bitmap(image)
@wx.deprecatedMsg("Use :class:`Icon` instead")
def EmptyIcon():
"""
A compatibility wrapper for the :class:`Icon` constructor
"""
return Icon()
def _Font_SetNoAntiAliasing(self, no=True):
pass
Font.SetNoAntiAliasing = wx.deprecated(_Font_SetNoAntiAliasing)
del _Font_SetNoAntiAliasing
def _Font_GetNoAntiAliasing(self):
pass
Font.GetNoAntiAliasing = wx.deprecated(_Font_GetNoAntiAliasing)
del _Font_GetNoAntiAliasing
# These stock fonts will be initialized when the wx.App object is created.
NORMAL_FONT = Font()
SMALL_FONT = Font()
ITALIC_FONT = Font()
SWISS_FONT = Font()
wx.DEFAULT = int(wx.FONTFAMILY_DEFAULT)
wx.DECORATIVE = int(wx.FONTFAMILY_DECORATIVE)
wx.ROMAN = int(wx.FONTFAMILY_ROMAN)
wx.SCRIPT = int(wx.FONTFAMILY_SCRIPT)
wx.SWISS = int(wx.FONTFAMILY_SWISS)
wx.MODERN = int(wx.FONTFAMILY_MODERN)
wx.TELETYPE = int(wx.FONTFAMILY_TELETYPE)
wx.NORMAL = int(wx.FONTWEIGHT_NORMAL)
wx.LIGHT = int(wx.FONTWEIGHT_LIGHT)
wx.BOLD = int(wx.FONTWEIGHT_BOLD)
wx.NORMAL = int(wx.FONTSTYLE_NORMAL)
wx.ITALIC = int(wx.FONTSTYLE_ITALIC)
wx.SLANT = int(wx.FONTSTYLE_SLANT)
# These stock pens will be initialized when the wx.App object is created.
RED_PEN = Pen()
BLUE_PEN = Pen()
CYAN_PEN = Pen()
GREEN_PEN = Pen()
YELLOW_PEN = Pen()
BLACK_PEN = Pen()
WHITE_PEN = Pen()
TRANSPARENT_PEN = Pen()
BLACK_DASHED_PEN = Pen()
GREY_PEN = Pen()
MEDIUM_GREY_PEN = Pen()
LIGHT_GREY_PEN = Pen()
wx.SOLID = int(wx.PENSTYLE_SOLID)
wx.DOT = int(wx.PENSTYLE_DOT)
wx.LONG_DASH = int(wx.PENSTYLE_LONG_DASH)
wx.SHORT_DASH = int(wx.PENSTYLE_SHORT_DASH)
wx.DOT_DASH = int(wx.PENSTYLE_DOT_DASH)
wx.USER_DASH = int(wx.PENSTYLE_USER_DASH)
wx.TRANSPARENT = int(wx.PENSTYLE_TRANSPARENT)
# These stock brushes will be initialized when the wx.App object is created.
BLUE_BRUSH = Brush()
GREEN_BRUSH = Brush()
YELLOW_BRUSH = Brush()
WHITE_BRUSH = Brush()
BLACK_BRUSH = Brush()
GREY_BRUSH = Brush()
MEDIUM_GREY_BRUSH = Brush()
LIGHT_GREY_BRUSH = Brush()
TRANSPARENT_BRUSH = Brush()
CYAN_BRUSH = Brush()
RED_BRUSH = Brush()
wx.STIPPLE_MASK_OPAQUE = int(wx.BRUSHSTYLE_STIPPLE_MASK_OPAQUE)
wx.STIPPLE_MASK = int(wx.BRUSHSTYLE_STIPPLE_MASK)
wx.STIPPLE = int(wx.BRUSHSTYLE_STIPPLE)
wx.BDIAGONAL_HATCH = int(wx.BRUSHSTYLE_BDIAGONAL_HATCH)
wx.CROSSDIAG_HATCH = int(wx.BRUSHSTYLE_CROSSDIAG_HATCH)
wx.FDIAGONAL_HATCH = int(wx.BRUSHSTYLE_FDIAGONAL_HATCH)
wx.CROSS_HATCH = int(wx.BRUSHSTYLE_CROSS_HATCH)
wx.HORIZONTAL_HATCH = int(wx.BRUSHSTYLE_HORIZONTAL_HATCH)
wx.VERTICAL_HATCH = int(wx.BRUSHSTYLE_VERTICAL_HATCH)
# These stock cursors will be initialized when the wx.App object is created.
STANDARD_CURSOR = Cursor()
HOURGLASS_CURSOR = Cursor()
CROSS_CURSOR = Cursor()
StockCursor = wx.deprecated(Cursor, "Use Cursor instead.")
CursorFromImage = wx.deprecated(Cursor, "Use Cursor instead.")
def _Region___iter__(self):
"""
Returns a rectangle interator conforming to the Python iterator
protocol.
"""
return PyRegionIterator(self)
Region.__iter__ = _Region___iter__
del _Region___iter__
class PyRegionIterator(object):
"A Python iterator for wx.Region objects"
def __init__(self, region):
self._region = region
self._iterator = wx.RegionIterator(region)
def next(self):
if not self._iterator:
raise StopIteration
rect = self._iterator.GetRect()
if self._iterator.HaveRects():
self._iterator.Next()
return rect
__next__ = next # for Python 3
def _DC_GetClippingRect(self):
"""
Gets the rectangle surrounding the current clipping region
"""
return wx.Rect(*self.GetClippingBox())
DC.GetClippingRect = _DC_GetClippingRect
del _DC_GetClippingRect
DC.DrawImageLabel = wx.deprecated(DC.DrawLabel, "Use DrawLabel instead.")
def _DC_GetBoundingBox(self):
"""
GetBoundingBox() -> (x1,y1, x2,y2)
Returns the min and max points used in drawing commands so far.
"""
return (self.MinX(), self.MinY(), self.MaxX(), self.MaxY())
DC.GetBoundingBox = _DC_GetBoundingBox
del _DC_GetBoundingBox
DC.GetHDC = wx.deprecated(DC.GetHDC, "Use GetHandle instead.")
DC.GetCGContext = wx.deprecated(DC.GetCGContext, "Use GetHandle instead.")
DC.GetGdkDrawable = wx.deprecated(DC.GetGdkDrawable, "Use GetHandle instead.")
def _DC___enter__(self):
return self
DC.__enter__ = _DC___enter__
del _DC___enter__
def _DC___exit__(self, exc_type, exc_val, exc_tb):
self.Destroy()
DC.__exit__ = _DC___exit__
del _DC___exit__
def _DC_DrawPointList(self, points, pens=None):
"""
Draw a list of points as quickly as possible.
:param points: A sequence of 2-element sequences representing
each point to draw, (x,y).
:param pens: If None, then the current pen is used. If a single
pen then it will be used for all points. If a list of
pens then there should be one for each point in points.
"""
if pens is None:
pens = []
elif isinstance(pens, wx.Pen):
pens = [pens]
elif len(pens) != len(points):
raise ValueError('points and pens must have same length')
return self._DrawPointList(points, pens, [])
DC.DrawPointList = _DC_DrawPointList
del _DC_DrawPointList
def _DC_DrawLineList(self, lines, pens=None):
"""
Draw a list of lines as quickly as possible.
:param lines: A sequence of 4-element sequences representing
each line to draw, (x1,y1, x2,y2).
:param pens: If None, then the current pen is used. If a
single pen then it will be used for all lines. If
a list of pens then there should be one for each line
in lines.
"""
if pens is None:
pens = []
elif isinstance(pens, wx.Pen):
pens = [pens]
elif len(pens) != len(lines):
raise ValueError('lines and pens must have same length')
return self._DrawLineList(lines, pens, [])
DC.DrawLineList = _DC_DrawLineList
del _DC_DrawLineList
def _DC_DrawRectangleList(self, rectangles, pens=None, brushes=None):
"""
Draw a list of rectangles as quickly as possible.
:param rectangles: A sequence of 4-element sequences representing
each rectangle to draw, (x,y, w,h).
:param pens: If None, then the current pen is used. If a
single pen then it will be used for all rectangles.
If a list of pens then there should be one for each
rectangle in rectangles.
:param brushes: A brush or brushes to be used to fill the rectagles,
with similar semantics as the pens parameter.
"""
if pens is None:
pens = []
elif isinstance(pens, wx.Pen):
pens = [pens]
elif len(pens) != len(rectangles):
raise ValueError('rectangles and pens must have same length')
if brushes is None:
brushes = []
elif isinstance(brushes, wx.Brush):
brushes = [brushes]
elif len(brushes) != len(rectangles):
raise ValueError('rectangles and brushes must have same length')
return self._DrawRectangleList(rectangles, pens, brushes)
DC.DrawRectangleList = _DC_DrawRectangleList
del _DC_DrawRectangleList
def _DC_DrawEllipseList(self, ellipses, pens=None, brushes=None):
"""
Draw a list of ellipses as quickly as possible.
:param ellipses: A sequence of 4-element sequences representing
each ellipse to draw, (x,y, w,h).
:param pens: If None, then the current pen is used. If a
single pen then it will be used for all ellipses.
If a list of pens then there should be one for each
ellipse in ellipses.
:param brushes: A brush or brushes to be used to fill the ellipses,
with similar semantics as the pens parameter.
"""
if pens is None:
pens = []
elif isinstance(pens, wx.Pen):
pens = [pens]
elif len(pens) != len(ellipses):
raise ValueError('ellipses and pens must have same length')
if brushes is None:
brushes = []
elif isinstance(brushes, wx.Brush):
brushes = [brushes]
elif len(brushes) != len(ellipses):
raise ValueError('ellipses and brushes must have same length')
return self._DrawEllipseList(ellipses, pens, brushes)
DC.DrawEllipseList = _DC_DrawEllipseList
del _DC_DrawEllipseList
def _DC_DrawPolygonList(self, polygons, pens=None, brushes=None):
"""
Draw a list of polygons, each of which is a list of points.
:param polygons: A sequence of sequences of sequences.
[[(x1,y1),(x2,y2),(x3,y3)...], [(x1,y1),(x2,y2),(x3,y3)...]]
:param pens: If None, then the current pen is used. If a
single pen then it will be used for all polygons.
If a list of pens then there should be one for each
polygon.
:param brushes: A brush or brushes to be used to fill the polygons,
with similar semantics as the pens parameter.
"""
if pens is None:
pens = []
elif isinstance(pens, wx.Pen):
pens = [pens]
elif len(pens) != len(polygons):
raise ValueError('polygons and pens must have same length')
if brushes is None:
brushes = []
elif isinstance(brushes, wx.Brush):
brushes = [brushes]
elif len(brushes) != len(polygons):
raise ValueError('polygons and brushes must have same length')
return self._DrawPolygonList(polygons, pens, brushes)
DC.DrawPolygonList = _DC_DrawPolygonList
del _DC_DrawPolygonList
def _DC_DrawTextList(self, textList, coords, foregrounds=None, backgrounds=None):
"""
Draw a list of strings using a list of coordinants for positioning each string.
:param textList: A list of strings
:param coords: A list of (x,y) positions
:param foregrounds: A list of `wx.Colour` objects to use for the
foregrounds of the strings.
:param backgrounds: A list of `wx.Colour` objects to use for the
backgrounds of the strings.
NOTE: Make sure you set background mode to wx.Solid (DC.SetBackgroundMode)
If you want backgrounds to do anything.
"""
if type(textList) == type(''):
textList = [textList]
elif len(textList) != len(coords):
raise ValueError('textlist and coords must have same length')
if foregrounds is None:
foregrounds = []
elif isinstance(foregrounds, wx.Colour):
foregrounds = [foregrounds]
elif len(foregrounds) != len(coords):
raise ValueError('foregrounds and coords must have same length')
if backgrounds is None:
backgrounds = []
elif isinstance(backgrounds, wx.Colour):
backgrounds = [backgrounds]
elif len(backgrounds) != len(coords):
raise ValueError('backgrounds and coords must have same length')
return self._DrawTextList(textList, coords, foregrounds, backgrounds)
DC.DrawTextList = _DC_DrawTextList
del _DC_DrawTextList
DC.BoundingBox = property(DC.GetBoundingBox)
DC.ClippingRect = property(DC.GetClippingRect)
def _DCClipper___enter__(self):
return self
DCClipper.__enter__ = _DCClipper___enter__
del _DCClipper___enter__
def _DCClipper___exit__(self, exc_type, exc_val, exc_tb):
return False
DCClipper.__exit__ = _DCClipper___exit__
del _DCClipper___exit__
def _DCBrushChanger___enter__(self):
return self
DCBrushChanger.__enter__ = _DCBrushChanger___enter__
del _DCBrushChanger___enter__
def _DCBrushChanger___exit__(self, exc_type, exc_val, exc_tb):
return False
DCBrushChanger.__exit__ = _DCBrushChanger___exit__
del _DCBrushChanger___exit__
def _DCPenChanger___enter__(self):
return self
DCPenChanger.__enter__ = _DCPenChanger___enter__
del _DCPenChanger___enter__
def _DCPenChanger___exit__(self, exc_type, exc_val, exc_tb):
return False
DCPenChanger.__exit__ = _DCPenChanger___exit__
del _DCPenChanger___exit__
def _DCTextColourChanger___enter__(self):
return self
DCTextColourChanger.__enter__ = _DCTextColourChanger___enter__
del _DCTextColourChanger___enter__
def _DCTextColourChanger___exit__(self, exc_type, exc_val, exc_tb):
return False
DCTextColourChanger.__exit__ = _DCTextColourChanger___exit__
del _DCTextColourChanger___exit__
def _DCFontChanger___enter__(self):
return self
DCFontChanger.__enter__ = _DCFontChanger___enter__
del _DCFontChanger___enter__
def _DCFontChanger___exit__(self, exc_type, exc_val, exc_tb):
return False
DCFontChanger.__exit__ = _DCFontChanger___exit__
del _DCFontChanger___exit__
GraphicsContext.DrawRotatedText = wx.deprecated(GraphicsContext.DrawText, 'Use DrawText instead.')
def _PixelDataBase___iter__(self):
"""
Create and return an iterator/generator object for traversing
this pixel data object.
"""
width = self.GetWidth()
height = self.GetHeight()
pixels = self.GetPixels() # this is the C++ iterator
# This class is a facade over the pixels object (using the one
# in the enclosing scope) that only allows Get() and Set() to
# be called.
class PixelFacade(object):
def Get(self):
return pixels.Get()
def Set(self, *args, **kw):
return pixels.Set(*args, **kw)
def __str__(self):
return str(self.Get())
def __repr__(self):
return 'pixel(%d,%d): %s' % (x,y,self.Get())
X = property(lambda self: x)
Y = property(lambda self: y)
import sys
rangeFunc = range if sys.version_info >= (3,) else xrange
pf = PixelFacade()
for y in rangeFunc(height):
pixels.MoveTo(self, 0, y)
for x in rangeFunc(width):
# We always generate the same pf instance, but it
# accesses the pixels object which we use to iterate
# over the pixel buffer.
yield pf
pixels.nextPixel()
PixelDataBase.__iter__ = _PixelDataBase___iter__
del _PixelDataBase___iter__
@wx.deprecated
def GetAccelFromString(label):
accel = wx.AcceleratorEntry()
accel.FromString(label)
return accel
def CustomDataFormat(format):
return wx.DataFormat(format)
CustomDataFormat = wx.deprecated(CustomDataFormat, "Use wx.DataFormat instead.")
PyDataObjectSimple = wx.deprecated(DataObjectSimple), 'Use DataObjectSimple instead.'
PyTextDataObject = wx.deprecated(TextDataObject, 'Use TextDataObject instead.')
PyBitmapDataObject = wx.deprecated(BitmapDataObject, 'Use TextDataObject instead.')
PyDropTarget = wx.deprecated(DropTarget, 'Use DropTarget instead.')
# Since wxTheClipboard is not really a global variable (it is a macro
# that calls the Get static method) we can't declare it as a global
# variable for the wrapper generator, otherwise it will try to run the
# function at module import and the wxApp object won't exist yet. So
# we'll use a class that will allow us to delay calling the Get until
# wx.TheClipboard is actually being used for the first time.
class _wxPyDelayedInitWrapper(object):
def __init__(self, initfunc, *args, **kwargs):
self._initfunc = initfunc
self._args = args
self._kwargs = kwargs
self._instance = None
def _checkInstance(self):
if self._instance is None:
if wx.GetApp():
self._instance = self._initfunc(*self._args, **self._kwargs)
def __getattr__(self, name):
self._checkInstance()
return getattr(self._instance, name)
def __repr__(self):
self._checkInstance()
return repr(self._instance)
# context manager methods
def __enter__(self):
self._checkInstance()
if not self.Open():
raise RuntimeError('Unable to open clipboard.')
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.Close()
TheClipboard = _wxPyDelayedInitWrapper(Clipboard.Get)
def _ConfigBase_ReadInt(self, key, defaultVal=0):
import six
rv = self._cpp_ReadInt(key, defaultVal)
if six.PY2:
rv = int(rv)
return rv
ConfigBase.ReadInt = _ConfigBase_ReadInt
del _ConfigBase_ReadInt
def _ConfigPathChanger___enter__(self):
return self
ConfigPathChanger.__enter__ = _ConfigPathChanger___enter__
del _ConfigPathChanger___enter__
def _ConfigPathChanger___exit__(self, exc_type, exc_val, exc_tb):
return False
ConfigPathChanger.__exit__ = _ConfigPathChanger___exit__
del _ConfigPathChanger___exit__
# For 2.8 compatibility
KeyboardState.m_controlDown = wx.deprecated(KeyboardState.controlDown, "Use controlDown instead.")
KeyboardState.m_shiftDown = wx.deprecated(KeyboardState.shiftDown, "Use shiftDown instead.")
KeyboardState.m_altDown = wx.deprecated(KeyboardState.altDown, "Use altDown instead.")
KeyboardState.m_metaDown = wx.deprecated(KeyboardState.metaDown, "Use metaDown instead.")
def _EvtHandler_Bind(self, event, handler, source=None, id=wx.ID_ANY, id2=wx.ID_ANY):
"""
Bind an event to an event handler.
:param event: One of the ``EVT_*`` event binder objects that
specifies the type of event to bind.
:param handler: A callable object to be invoked when the
event is delivered to self. Pass ``None`` to
disconnect an event handler.
:param source: Sometimes the event originates from a
different window than self, but you still
want to catch it in self. (For example, a
button event delivered to a frame.) By
passing the source of the event, the event
handling system is able to differentiate
between the same event type from different
controls.
:param id: Used to spcify the event source by ID instead
of instance.
:param id2: Used when it is desirable to bind a handler
to a range of IDs, such as with EVT_MENU_RANGE.
"""
assert isinstance(event, wx.PyEventBinder)
assert callable(handler) or handler is None
assert source is None or hasattr(source, 'GetId')
if source is not None:
id = source.GetId()
event.Bind(self, id, id2, handler)
EvtHandler.Bind = _EvtHandler_Bind
del _EvtHandler_Bind
def _EvtHandler_Unbind(self, event, source=None, id=wx.ID_ANY, id2=wx.ID_ANY, handler=None):
"""
Disconnects the event handler binding for event from `self`.
Returns ``True`` if successful.
"""
if source is not None:
id = source.GetId()
return event.Unbind(self, id, id2, handler)
EvtHandler.Unbind = _EvtHandler_Unbind
del _EvtHandler_Unbind
def _EventBlocker___enter__(self):
return self
EventBlocker.__enter__ = _EventBlocker___enter__
del _EventBlocker___enter__
def _EventBlocker___exit__(self, exc_type, exc_val, exc_tb):
return False
EventBlocker.__exit__ = _EventBlocker___exit__
del _EventBlocker___exit__
def _PropagationDisabler___enter__(self):
return self
PropagationDisabler.__enter__ = _PropagationDisabler___enter__
del _PropagationDisabler___enter__
def _PropagationDisabler___exit__(self, exc_type, exc_val, exc_tb):
return False
PropagationDisabler.__exit__ = _PropagationDisabler___exit__
del _PropagationDisabler___exit__
def _PropagateOnce___enter__(self):
return self
PropagateOnce.__enter__ = _PropagateOnce___enter__
del _PropagateOnce___enter__
def _PropagateOnce___exit__(self, exc_type, exc_val, exc_tb):
return False
PropagateOnce.__exit__ = _PropagateOnce___exit__
del _PropagateOnce___exit__
def _CommandEvent_GetClientObject(self):
"""
Alias for :meth:`GetClientData`
"""
return self.GetClientData()
CommandEvent.GetClientObject = _CommandEvent_GetClientObject
del _CommandEvent_GetClientObject
def _CommandEvent_SetClientObject(self, data):
"""
Alias for :meth:`SetClientData`
"""
self.SetClientData(data)
CommandEvent.SetClientObject = _CommandEvent_SetClientObject
del _CommandEvent_SetClientObject
CommandEvent.ClientData = property(CommandEvent.GetClientData, CommandEvent.SetClientData)
class PyEventBinder(object):
"""
Instances of this class are used to bind specific events to event handlers.
"""
def __init__(self, evtType, expectedIDs=0):
if expectedIDs not in [0, 1, 2]:
raise ValueError("Invalid number of expectedIDs")
self.expectedIDs = expectedIDs
if isinstance(evtType, (list, tuple)):
self.evtType = list(evtType)
else:
self.evtType = [evtType]
def Bind(self, target, id1, id2, function):
"""
Bind this set of event types to target using its Connect() method.
"""
for et in self.evtType:
target.Connect(id1, id2, et, function)
def Unbind(self, target, id1, id2, handler=None):
"""
Remove an event binding.
"""
success = 0
for et in self.evtType:
success += int(target.Disconnect(id1, id2, et, handler))
return success != 0
def _getEvtType(self):
"""
Make it easy to get to the default wxEventType typeID for this
event binder.
"""
return self.evtType[0]
typeId = property(_getEvtType)
@wx.deprecatedMsg("Use :meth:`EvtHandler.Bind` instead.")
def __call__(self, *args):
"""
For backwards compatibility with the old ``EVT_*`` functions.
Should be called with either (window, func), (window, ID,
func) or (window, ID1, ID2, func) parameters depending on the
type of the event.
"""
assert len(args) == 2 + self.expectedIDs
id1 = ID_ANY
id2 = ID_ANY
target = args[0]
if self.expectedIDs == 0:
func = args[1]
elif self.expectedIDs == 1:
id1 = args[1]
func = args[2]
elif self.expectedIDs == 2:
id1 = args[1]
id2 = args[2]
func = args[3]
else:
raise ValueError("Unexpected number of IDs")
self.Bind(target, id1, id2, func)
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
# This code block was included from src/event_ex.py
# Create some event binders
EVT_SIZE = wx.PyEventBinder( wxEVT_SIZE )
EVT_SIZING = wx.PyEventBinder( wxEVT_SIZING )
EVT_MOVE = wx.PyEventBinder( wxEVT_MOVE )
EVT_MOVING = wx.PyEventBinder( wxEVT_MOVING )
EVT_MOVE_START = wx.PyEventBinder( wxEVT_MOVE_START )
EVT_MOVE_END = wx.PyEventBinder( wxEVT_MOVE_END )
EVT_CLOSE = wx.PyEventBinder( wxEVT_CLOSE_WINDOW )
EVT_END_SESSION = wx.PyEventBinder( wxEVT_END_SESSION )
EVT_QUERY_END_SESSION = wx.PyEventBinder( wxEVT_QUERY_END_SESSION )
EVT_PAINT = wx.PyEventBinder( wxEVT_PAINT )
EVT_NC_PAINT = wx.PyEventBinder( wxEVT_NC_PAINT )
EVT_ERASE_BACKGROUND = wx.PyEventBinder( wxEVT_ERASE_BACKGROUND )
EVT_CHAR = wx.PyEventBinder( wxEVT_CHAR )
EVT_KEY_DOWN = wx.PyEventBinder( wxEVT_KEY_DOWN )
EVT_KEY_UP = wx.PyEventBinder( wxEVT_KEY_UP )
EVT_HOTKEY = wx.PyEventBinder( wxEVT_HOTKEY, 1)
EVT_CHAR_HOOK = wx.PyEventBinder( wxEVT_CHAR_HOOK )
EVT_MENU_OPEN = wx.PyEventBinder( wxEVT_MENU_OPEN )
EVT_MENU_CLOSE = wx.PyEventBinder( wxEVT_MENU_CLOSE )
EVT_MENU_HIGHLIGHT = wx.PyEventBinder( wxEVT_MENU_HIGHLIGHT, 1)
EVT_MENU_HIGHLIGHT_ALL = wx.PyEventBinder( wxEVT_MENU_HIGHLIGHT )
EVT_SET_FOCUS = wx.PyEventBinder( wxEVT_SET_FOCUS )
EVT_KILL_FOCUS = wx.PyEventBinder( wxEVT_KILL_FOCUS )
EVT_CHILD_FOCUS = wx.PyEventBinder( wxEVT_CHILD_FOCUS )
EVT_ACTIVATE = wx.PyEventBinder( wxEVT_ACTIVATE )
EVT_ACTIVATE_APP = wx.PyEventBinder( wxEVT_ACTIVATE_APP )
EVT_HIBERNATE = wx.PyEventBinder( wxEVT_HIBERNATE )
EVT_DROP_FILES = wx.PyEventBinder( wxEVT_DROP_FILES )
EVT_INIT_DIALOG = wx.PyEventBinder( wxEVT_INIT_DIALOG )
EVT_SYS_COLOUR_CHANGED = wx.PyEventBinder( wxEVT_SYS_COLOUR_CHANGED )
EVT_DISPLAY_CHANGED = wx.PyEventBinder( wxEVT_DISPLAY_CHANGED )
EVT_SHOW = wx.PyEventBinder( wxEVT_SHOW )
EVT_MAXIMIZE = wx.PyEventBinder( wxEVT_MAXIMIZE )
EVT_ICONIZE = wx.PyEventBinder( wxEVT_ICONIZE )
EVT_NAVIGATION_KEY = wx.PyEventBinder( wxEVT_NAVIGATION_KEY )
EVT_PALETTE_CHANGED = wx.PyEventBinder( wxEVT_PALETTE_CHANGED )
EVT_QUERY_NEW_PALETTE = wx.PyEventBinder( wxEVT_QUERY_NEW_PALETTE )
EVT_WINDOW_CREATE = wx.PyEventBinder( wxEVT_CREATE )
EVT_WINDOW_DESTROY = wx.PyEventBinder( wxEVT_DESTROY )
EVT_SET_CURSOR = wx.PyEventBinder( wxEVT_SET_CURSOR )
EVT_MOUSE_CAPTURE_CHANGED = wx.PyEventBinder( wxEVT_MOUSE_CAPTURE_CHANGED )
EVT_MOUSE_CAPTURE_LOST = wx.PyEventBinder( wxEVT_MOUSE_CAPTURE_LOST )
EVT_LEFT_DOWN = wx.PyEventBinder( wxEVT_LEFT_DOWN )
EVT_LEFT_UP = wx.PyEventBinder( wxEVT_LEFT_UP )
EVT_MIDDLE_DOWN = wx.PyEventBinder( wxEVT_MIDDLE_DOWN )
EVT_MIDDLE_UP = wx.PyEventBinder( wxEVT_MIDDLE_UP )
EVT_RIGHT_DOWN = wx.PyEventBinder( wxEVT_RIGHT_DOWN )
EVT_RIGHT_UP = wx.PyEventBinder( wxEVT_RIGHT_UP )
EVT_MOTION = wx.PyEventBinder( wxEVT_MOTION )
EVT_LEFT_DCLICK = wx.PyEventBinder( wxEVT_LEFT_DCLICK )
EVT_MIDDLE_DCLICK = wx.PyEventBinder( wxEVT_MIDDLE_DCLICK )
EVT_RIGHT_DCLICK = wx.PyEventBinder( wxEVT_RIGHT_DCLICK )
EVT_LEAVE_WINDOW = wx.PyEventBinder( wxEVT_LEAVE_WINDOW )
EVT_ENTER_WINDOW = wx.PyEventBinder( wxEVT_ENTER_WINDOW )
EVT_MOUSEWHEEL = wx.PyEventBinder( wxEVT_MOUSEWHEEL )
EVT_MOUSE_AUX1_DOWN = wx.PyEventBinder( wxEVT_AUX1_DOWN )
EVT_MOUSE_AUX1_UP = wx.PyEventBinder( wxEVT_AUX1_UP )
EVT_MOUSE_AUX1_DCLICK = wx.PyEventBinder( wxEVT_AUX1_DCLICK )
EVT_MOUSE_AUX2_DOWN = wx.PyEventBinder( wxEVT_AUX2_DOWN )
EVT_MOUSE_AUX2_UP = wx.PyEventBinder( wxEVT_AUX2_UP )
EVT_MOUSE_AUX2_DCLICK = wx.PyEventBinder( wxEVT_AUX2_DCLICK )
EVT_MOUSE_EVENTS = wx.PyEventBinder([ wxEVT_LEFT_DOWN,
wxEVT_LEFT_UP,
wxEVT_MIDDLE_DOWN,
wxEVT_MIDDLE_UP,
wxEVT_RIGHT_DOWN,
wxEVT_RIGHT_UP,
wxEVT_MOTION,
wxEVT_LEFT_DCLICK,
wxEVT_MIDDLE_DCLICK,
wxEVT_RIGHT_DCLICK,
wxEVT_ENTER_WINDOW,
wxEVT_LEAVE_WINDOW,
wxEVT_MOUSEWHEEL,
wxEVT_AUX1_DOWN,
wxEVT_AUX1_UP,
wxEVT_AUX1_DCLICK,
wxEVT_AUX2_DOWN,
wxEVT_AUX2_UP,
wxEVT_AUX2_DCLICK,
])
# Scrolling from wxWindow (sent to wxScrolledWindow)
EVT_SCROLLWIN = wx.PyEventBinder([ wxEVT_SCROLLWIN_TOP,
wxEVT_SCROLLWIN_BOTTOM,
wxEVT_SCROLLWIN_LINEUP,
wxEVT_SCROLLWIN_LINEDOWN,
wxEVT_SCROLLWIN_PAGEUP,
wxEVT_SCROLLWIN_PAGEDOWN,
wxEVT_SCROLLWIN_THUMBTRACK,
wxEVT_SCROLLWIN_THUMBRELEASE,
])
EVT_SCROLLWIN_TOP = wx.PyEventBinder( wxEVT_SCROLLWIN_TOP )
EVT_SCROLLWIN_BOTTOM = wx.PyEventBinder( wxEVT_SCROLLWIN_BOTTOM )
EVT_SCROLLWIN_LINEUP = wx.PyEventBinder( wxEVT_SCROLLWIN_LINEUP )
EVT_SCROLLWIN_LINEDOWN = wx.PyEventBinder( wxEVT_SCROLLWIN_LINEDOWN )
EVT_SCROLLWIN_PAGEUP = wx.PyEventBinder( wxEVT_SCROLLWIN_PAGEUP )
EVT_SCROLLWIN_PAGEDOWN = wx.PyEventBinder( wxEVT_SCROLLWIN_PAGEDOWN )
EVT_SCROLLWIN_THUMBTRACK = wx.PyEventBinder( wxEVT_SCROLLWIN_THUMBTRACK )
EVT_SCROLLWIN_THUMBRELEASE = wx.PyEventBinder( wxEVT_SCROLLWIN_THUMBRELEASE )
# Scrolling from wx.Slider and wx.ScrollBar
EVT_SCROLL = wx.PyEventBinder([ wxEVT_SCROLL_TOP,
wxEVT_SCROLL_BOTTOM,
wxEVT_SCROLL_LINEUP,
wxEVT_SCROLL_LINEDOWN,
wxEVT_SCROLL_PAGEUP,
wxEVT_SCROLL_PAGEDOWN,
wxEVT_SCROLL_THUMBTRACK,
wxEVT_SCROLL_THUMBRELEASE,
wxEVT_SCROLL_CHANGED,
])
EVT_SCROLL_TOP = wx.PyEventBinder( wxEVT_SCROLL_TOP )
EVT_SCROLL_BOTTOM = wx.PyEventBinder( wxEVT_SCROLL_BOTTOM )
EVT_SCROLL_LINEUP = wx.PyEventBinder( wxEVT_SCROLL_LINEUP )
EVT_SCROLL_LINEDOWN = wx.PyEventBinder( wxEVT_SCROLL_LINEDOWN )
EVT_SCROLL_PAGEUP = wx.PyEventBinder( wxEVT_SCROLL_PAGEUP )
EVT_SCROLL_PAGEDOWN = wx.PyEventBinder( wxEVT_SCROLL_PAGEDOWN )
EVT_SCROLL_THUMBTRACK = wx.PyEventBinder( wxEVT_SCROLL_THUMBTRACK )
EVT_SCROLL_THUMBRELEASE = wx.PyEventBinder( wxEVT_SCROLL_THUMBRELEASE )
EVT_SCROLL_CHANGED = wx.PyEventBinder( wxEVT_SCROLL_CHANGED )
EVT_SCROLL_ENDSCROLL = EVT_SCROLL_CHANGED
# Scrolling from wx.Slider and wx.ScrollBar, with an id
EVT_COMMAND_SCROLL = wx.PyEventBinder([ wxEVT_SCROLL_TOP,
wxEVT_SCROLL_BOTTOM,
wxEVT_SCROLL_LINEUP,
wxEVT_SCROLL_LINEDOWN,
wxEVT_SCROLL_PAGEUP,
wxEVT_SCROLL_PAGEDOWN,
wxEVT_SCROLL_THUMBTRACK,
wxEVT_SCROLL_THUMBRELEASE,
wxEVT_SCROLL_CHANGED,
], 1)
EVT_COMMAND_SCROLL_TOP = wx.PyEventBinder( wxEVT_SCROLL_TOP, 1)
EVT_COMMAND_SCROLL_BOTTOM = wx.PyEventBinder( wxEVT_SCROLL_BOTTOM, 1)
EVT_COMMAND_SCROLL_LINEUP = wx.PyEventBinder( wxEVT_SCROLL_LINEUP, 1)
EVT_COMMAND_SCROLL_LINEDOWN = wx.PyEventBinder( wxEVT_SCROLL_LINEDOWN, 1)
EVT_COMMAND_SCROLL_PAGEUP = wx.PyEventBinder( wxEVT_SCROLL_PAGEUP, 1)
EVT_COMMAND_SCROLL_PAGEDOWN = wx.PyEventBinder( wxEVT_SCROLL_PAGEDOWN, 1)
EVT_COMMAND_SCROLL_THUMBTRACK = wx.PyEventBinder( wxEVT_SCROLL_THUMBTRACK, 1)
EVT_COMMAND_SCROLL_THUMBRELEASE = wx.PyEventBinder( wxEVT_SCROLL_THUMBRELEASE, 1)
EVT_COMMAND_SCROLL_CHANGED = wx.PyEventBinder( wxEVT_SCROLL_CHANGED, 1)
EVT_COMMAND_SCROLL_ENDSCROLL = EVT_COMMAND_SCROLL_CHANGED
EVT_BUTTON = wx.PyEventBinder( wxEVT_BUTTON, 1)
EVT_CHECKBOX = wx.PyEventBinder( wxEVT_CHECKBOX, 1)
EVT_CHOICE = wx.PyEventBinder( wxEVT_CHOICE, 1)
EVT_LISTBOX = wx.PyEventBinder( wxEVT_LISTBOX, 1)
EVT_LISTBOX_DCLICK = wx.PyEventBinder( wxEVT_LISTBOX_DCLICK, 1)
EVT_MENU = wx.PyEventBinder( wxEVT_MENU, 1)
EVT_MENU_RANGE = wx.PyEventBinder( wxEVT_MENU, 2)
EVT_SLIDER = wx.PyEventBinder( wxEVT_SLIDER, 1)
EVT_RADIOBOX = wx.PyEventBinder( wxEVT_RADIOBOX, 1)
EVT_RADIOBUTTON = wx.PyEventBinder( wxEVT_RADIOBUTTON, 1)
EVT_SCROLLBAR = wx.PyEventBinder( wxEVT_SCROLLBAR, 1)
EVT_VLBOX = wx.PyEventBinder( wxEVT_VLBOX, 1)
EVT_COMBOBOX = wx.PyEventBinder( wxEVT_COMBOBOX, 1)
EVT_TOOL = wx.PyEventBinder( wxEVT_TOOL, 1)
EVT_TOOL_RANGE = wx.PyEventBinder( wxEVT_TOOL, 2)
EVT_TOOL_RCLICKED = wx.PyEventBinder( wxEVT_TOOL_RCLICKED, 1)
EVT_TOOL_RCLICKED_RANGE = wx.PyEventBinder( wxEVT_TOOL_RCLICKED, 2)
EVT_TOOL_ENTER = wx.PyEventBinder( wxEVT_TOOL_ENTER, 1)
EVT_TOOL_DROPDOWN = wx.PyEventBinder( wxEVT_TOOL_DROPDOWN, 1)
EVT_CHECKLISTBOX = wx.PyEventBinder( wxEVT_CHECKLISTBOX, 1)
EVT_COMBOBOX_DROPDOWN = wx.PyEventBinder( wxEVT_COMBOBOX_DROPDOWN , 1)
EVT_COMBOBOX_CLOSEUP = wx.PyEventBinder( wxEVT_COMBOBOX_CLOSEUP , 1)
EVT_COMMAND_LEFT_CLICK = wx.PyEventBinder( wxEVT_COMMAND_LEFT_CLICK, 1)
EVT_COMMAND_LEFT_DCLICK = wx.PyEventBinder( wxEVT_COMMAND_LEFT_DCLICK, 1)
EVT_COMMAND_RIGHT_CLICK = wx.PyEventBinder( wxEVT_COMMAND_RIGHT_CLICK, 1)
EVT_COMMAND_RIGHT_DCLICK = wx.PyEventBinder( wxEVT_COMMAND_RIGHT_DCLICK, 1)
EVT_COMMAND_SET_FOCUS = wx.PyEventBinder( wxEVT_COMMAND_SET_FOCUS, 1)
EVT_COMMAND_KILL_FOCUS = wx.PyEventBinder( wxEVT_COMMAND_KILL_FOCUS, 1)
EVT_COMMAND_ENTER = wx.PyEventBinder( wxEVT_COMMAND_ENTER, 1)
EVT_HELP = wx.PyEventBinder( wxEVT_HELP, 1)
EVT_HELP_RANGE = wx.PyEventBinder( wxEVT_HELP, 2)
EVT_DETAILED_HELP = wx.PyEventBinder( wxEVT_DETAILED_HELP, 1)
EVT_DETAILED_HELP_RANGE = wx.PyEventBinder( wxEVT_DETAILED_HELP, 2)
EVT_IDLE = wx.PyEventBinder( wxEVT_IDLE )
EVT_UPDATE_UI = wx.PyEventBinder( wxEVT_UPDATE_UI, 1)
EVT_UPDATE_UI_RANGE = wx.PyEventBinder( wxEVT_UPDATE_UI, 2)
EVT_CONTEXT_MENU = wx.PyEventBinder( wxEVT_CONTEXT_MENU )
EVT_THREAD = wx.PyEventBinder( wxEVT_THREAD )
EVT_WINDOW_MODAL_DIALOG_CLOSED = wx.PyEventBinder( wxEVT_WINDOW_MODAL_DIALOG_CLOSED )
EVT_JOY_BUTTON_DOWN = wx.PyEventBinder( wxEVT_JOY_BUTTON_DOWN )
EVT_JOY_BUTTON_UP = wx.PyEventBinder( wxEVT_JOY_BUTTON_UP )
EVT_JOY_MOVE = wx.PyEventBinder( wxEVT_JOY_MOVE )
EVT_JOY_ZMOVE = wx.PyEventBinder( wxEVT_JOY_ZMOVE )
EVT_JOYSTICK_EVENTS = wx.PyEventBinder([ wxEVT_JOY_BUTTON_DOWN,
wxEVT_JOY_BUTTON_UP,
wxEVT_JOY_MOVE,
wxEVT_JOY_ZMOVE,
])
# deprecated wxEVT aliases
wxEVT_COMMAND_BUTTON_CLICKED = wxEVT_BUTTON
wxEVT_COMMAND_CHECKBOX_CLICKED = wxEVT_CHECKBOX
wxEVT_COMMAND_CHOICE_SELECTED = wxEVT_CHOICE
wxEVT_COMMAND_LISTBOX_SELECTED = wxEVT_LISTBOX
wxEVT_COMMAND_LISTBOX_DOUBLECLICKED = wxEVT_LISTBOX_DCLICK
wxEVT_COMMAND_CHECKLISTBOX_TOGGLED = wxEVT_CHECKLISTBOX
wxEVT_COMMAND_MENU_SELECTED = wxEVT_MENU
wxEVT_COMMAND_TOOL_CLICKED = wxEVT_TOOL
wxEVT_COMMAND_SLIDER_UPDATED = wxEVT_SLIDER
wxEVT_COMMAND_RADIOBOX_SELECTED = wxEVT_RADIOBOX
wxEVT_COMMAND_RADIOBUTTON_SELECTED = wxEVT_RADIOBUTTON
wxEVT_COMMAND_SCROLLBAR_UPDATED = wxEVT_SCROLLBAR
wxEVT_COMMAND_VLBOX_SELECTED = wxEVT_VLBOX
wxEVT_COMMAND_COMBOBOX_SELECTED = wxEVT_COMBOBOX
wxEVT_COMMAND_TOOL_RCLICKED = wxEVT_TOOL_RCLICKED
wxEVT_COMMAND_TOOL_DROPDOWN_CLICKED = wxEVT_TOOL_DROPDOWN
wxEVT_COMMAND_TOOL_ENTER = wxEVT_TOOL_ENTER
wxEVT_COMMAND_COMBOBOX_DROPDOWN = wxEVT_COMBOBOX_DROPDOWN
wxEVT_COMMAND_COMBOBOX_CLOSEUP = wxEVT_COMBOBOX_CLOSEUP
# End of included code block
#-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=-=
PyEvtHandler = wx.deprecated(EvtHandler, "Use :class:`EvtHandler` instead.")
def _PyEvent_Clone(self):
"""
Make a new instance of the event that is a copy of self.
Through the magic of Python this implementation should work for
this and all derived classes.
"""
# Create a new instance of the same type as this instance and
# then invoke the C++ copy constructor to copy the C++ parts and
# any custom attributes.
clone = wx.PyEvent.__new__(self.__class__)
wx.PyEvent.__init__(clone, self)
return clone
PyEvent.Clone = _PyEvent_Clone
del _PyEvent_Clone
def _PyCommandEvent_Clone(self):
"""
Make a new instance of the event that is a copy of self.
Through the magic of Python this implementation should work for
this and all derived classes.
"""
# Create a new instance of the same type as this instance and
# then invoke the C++ copy constructor to copy the C++ parts and
# any custom attributes.
clone = wx.PyCommandEvent.__new__(self.__class__)
wx.PyCommandEvent.__init__(clone, self)
return clone
PyCommandEvent.Clone = _PyCommandEvent_Clone
del _PyCommandEvent_Clone
def _Sizer_AddMany(self, items):
"""
:meth:`AddMany` is a convenience method for adding several items to a sizer
at one time. Simply pass it a list of tuples, where each tuple
consists of the parameters that you would normally pass to the :meth:`Add`
method.
"""
for item in items:
if not isinstance(item, (tuple, list)):
item = (item, )
self.Add(*item)
Sizer.AddMany = _Sizer_AddMany
del _Sizer_AddMany
def _Sizer___nonzero__(self):
"""
Can be used to test if the C++ part of the sizer still exists, with
code like this::
if theSizer:
doSomething()
"""
import wx.siplib
return not wx.siplib.isdeleted(self)
Sizer.__nonzero__ = _Sizer___nonzero__
del _Sizer___nonzero__
def _Sizer___iter__(self):
"""
A Py convenience method that allows Sizers to act as iterables that will yield their wx.SizerItems.
"""
for item in self.GetChildren(): yield item
Sizer.__iter__ = _Sizer___iter__
del _Sizer___iter__
Sizer.__bool__ = Sizer.__nonzero__
def _GridSizer_CalcRowsCols(self):
"""
CalcRowsCols() -> (rows, cols)
Calculates how many rows and columns will be in the sizer based
on the current number of items and also the rows, cols specified
in the constructor.
"""
nitems = len(self.GetChildren())
rows = self.GetRows()
cols = self.GetCols()
assert rows != 0 or cols != 0, "Grid sizer must have either rows or columns fixed"
if cols != 0:
rows = (nitems + cols - 1) / cols
elif rows != 0:
cols = (nitems + rows - 1) / rows
return (rows, cols)
GridSizer.CalcRowsCols = _GridSizer_CalcRowsCols
del _GridSizer_CalcRowsCols
PySizer = wx.deprecated(Sizer, 'Use Sizer instead.')
def _SizerItemList___repr__(self):
return "SizerItemList: " + repr(list(self))
SizerItemList.__repr__ = _SizerItemList___repr__
del _SizerItemList___repr__
def _GBPosition_GetIM(self):
"""
Returns an immutable representation of the ``wx.GBPosition`` object, based on ``namedtuple``.
This new object is hashable and can be used as a dictionary key,
be added to sets, etc. It can be converted back into a real ``wx.GBPosition``
with a simple statement like this: ``obj = wx.GBPosition(imObj)``.
"""
return _im_GBPosition(*self.Get())
GBPosition.GetIM = _GBPosition_GetIM
del _GBPosition_GetIM
def _GBPosition___str__(self):
return str(self.Get())
GBPosition.__str__ = _GBPosition___str__
del _GBPosition___str__
def _GBPosition___repr__(self):
return "wx.GBPosition"+str(self.Get())
GBPosition.__repr__ = _GBPosition___repr__
del _GBPosition___repr__
def _GBPosition___len__(self):
return len(self.Get())
GBPosition.__len__ = _GBPosition___len__
del _GBPosition___len__
def _GBPosition___nonzero__(self):
return self.Get() != (0,0)
GBPosition.__nonzero__ = _GBPosition___nonzero__
del _GBPosition___nonzero__
def _GBPosition___bool__(self):
return self.Get() != (0,0)
GBPosition.__bool__ = _GBPosition___bool__
del _GBPosition___bool__
def _GBPosition___reduce__(self):
return (GBPosition, self.Get())
GBPosition.__reduce__ = _GBPosition___reduce__
del _GBPosition___reduce__
def _GBPosition___getitem__(self, idx):
return self.Get()[idx]
GBPosition.__getitem__ = _GBPosition___getitem__
del _GBPosition___getitem__
def _GBPosition___setitem__(self, idx, val):
if idx == 0: self.Row = val
elif idx == 1: self.Col = val
else: raise IndexError
GBPosition.__setitem__ = _GBPosition___setitem__
del _GBPosition___setitem__
GBPosition.__safe_for_unpickling__ = True
def _GBSpan_GetIM(self):
"""
Returns an immutable representation of the ``wx.GBSpan`` object, based on ``namedtuple``.
This new object is hashable and can be used as a dictionary key,
be added to sets, etc. It can be converted back into a real ``wx.GBSpan``
with a simple statement like this: ``obj = wx.GBSpan(imObj)``.
"""
return _im_GBSpan(*self.Get())
GBSpan.GetIM = _GBSpan_GetIM
del _GBSpan_GetIM
def _GBSpan___str__(self):
return str(self.Get())
GBSpan.__str__ = _GBSpan___str__
del _GBSpan___str__
def _GBSpan___repr__(self):
return "wx.GBSpan"+str(self.Get())
GBSpan.__repr__ = _GBSpan___repr__
del _GBSpan___repr__
def _GBSpan___len__(self):
return len(self.Get())
GBSpan.__len__ = _GBSpan___len__
del _GBSpan___len__
def _GBSpan___nonzero__(self):
return self.Get() != (0,0)
GBSpan.__nonzero__ = _GBSpan___nonzero__
del _GBSpan___nonzero__
def _GBSpan___bool__(self):
return self.Get() != (0,0)
GBSpan.__bool__ = _GBSpan___bool__
del _GBSpan___bool__
def _GBSpan___reduce__(self):
return (GBSpan, self.Get())
GBSpan.__reduce__ = _GBSpan___reduce__
del _GBSpan___reduce__
def _GBSpan___getitem__(self, idx):
return self.Get()[idx]
GBSpan.__getitem__ = _GBSpan___getitem__
del _GBSpan___getitem__
def _GBSpan___setitem__(self, idx, val):
if idx == 0: self.Rowspan = val
elif idx == 1: self.Colspan = val
else: raise IndexError
GBSpan.__setitem__ = _GBSpan___setitem__
del _GBSpan___setitem__
GBSpan.__safe_for_unpickling__ = True
GridBagSizer.CheckForIntersectionPos = wx.deprecated(GridBagSizer.CheckForIntersection, 'Use CheckForIntersection instead.')
from collections import namedtuple
_im_GBPosition = namedtuple('_im_GBPosition', ['row', 'col'])
del namedtuple
from collections import namedtuple
_im_GBSpan = namedtuple('_im_GBSpan', ['rowspan', 'colspan'])
del namedtuple
def _EventLoopActivator___enter__(self):
return self
EventLoopActivator.__enter__ = _EventLoopActivator___enter__
del _EventLoopActivator___enter__
def _EventLoopActivator___exit__(self, exc_type, exc_val, exc_tb):
return False
EventLoopActivator.__exit__ = _EventLoopActivator___exit__
del _EventLoopActivator___exit__
@wx.deprecatedMsg('Use GUIEventLoop instead.')
class EventLoop(GUIEventLoop):
'''A class using the old name for compatibility.'''
def __init__(self):
GUIEventLoop.__init__(self)
def YieldIfNeeded():
"""
Convenience function for wx.GetApp().Yield(True)
"""
return wx.GetApp().Yield(True)
class PyOnDemandOutputWindow(object):
"""
A class that can be used for redirecting Python's stdout and
stderr streams. It will do nothing until something is wrriten to
the stream at which point it will create a Frame with a text area
and write the text there.
"""
def __init__(self, title="wxPython: stdout/stderr"):
self.frame = None
self.title = title
self.pos = wx.DefaultPosition
self.size = (450, 300)
self.parent = None
def SetParent(self, parent):
"""
Set the window to be used as the popup Frame's parent.
"""
self.parent = parent
def CreateOutputWindow(self, txt):
self.frame = wx.Frame(self.parent, -1, self.title, self.pos, self.size,
style=wx.DEFAULT_FRAME_STYLE)
self.text = wx.TextCtrl(self.frame, -1, "",
style=wx.TE_MULTILINE|wx.TE_READONLY)
self.text.AppendText(txt)
self.frame.Show(True)
self.frame.Bind(wx.EVT_CLOSE, self.OnCloseWindow)
def OnCloseWindow(self, event):
if self.frame is not None:
self.frame.Destroy()
self.frame = None
self.text = None
self.parent = None
def write(self, text):
"""
Create the output window if needed and write the string to it.
If not called in the context of the gui thread then CallAfter is
used to do the work there.
"""
if self.frame is None:
if not wx.IsMainThread():
wx.CallAfter(self.CreateOutputWindow, text)
else:
self.CreateOutputWindow(text)
else:
if not wx.IsMainThread():
wx.CallAfter(self.text.AppendText, text)
else:
self.text.AppendText(text)
def close(self):
if self.frame is not None:
wx.CallAfter(self.frame.Close)
def flush(self):
pass
class App(PyApp):
"""
The ``wx.App`` class represents the application and is used to:
* bootstrap the wxPython system and initialize the underlying
gui toolkit
* set and get application-wide properties
* implement the native windowing system main message or event loop,
and to dispatch events to window instances
* etc.
Every wx application must have a single ``wx.App`` instance, and all
creation of UI objects should be delayed until after the ``wx.App`` object
has been created in order to ensure that the gui platform and wxWidgets
have been fully initialized.
Normally you would derive from this class and implement an ``OnInit``
method that creates a frame and then calls ``self.SetTopWindow(frame)``,
however ``wx.App`` is also usable on it's own without derivation.
"""
outputWindowClass = PyOnDemandOutputWindow
def __init__(self, redirect=False, filename=None, useBestVisual=False, clearSigInt=True):
"""
Construct a ``wx.App`` object.
:param redirect: Should ``sys.stdout`` and ``sys.stderr`` be
redirected? Defaults to False. If ``filename`` is None
then output will be redirected to a window that pops up
as needed. (You can control what kind of window is created
for the output by resetting the class variable
``outputWindowClass`` to a class of your choosing.)
:param filename: The name of a file to redirect output to, if
redirect is True.
:param useBestVisual: Should the app try to use the best
available visual provided by the system (only relevant on
systems that have more than one visual.) This parameter
must be used instead of calling `SetUseBestVisual` later
on because it must be set before the underlying GUI
toolkit is initialized.
:param clearSigInt: Should SIGINT be cleared? This allows the
app to terminate upon a Ctrl-C in the console like other
GUI apps will.
:note: You should override OnInit to do application
initialization to ensure that the system, toolkit and
wxWidgets are fully initialized.
"""
PyApp.__init__(self)
# make sure we can create a GUI
if not self.IsDisplayAvailable():
if wx.Port == "__WXMAC__":
msg = "This program needs access to the screen. Please run with a\n" \
"Framework build of python, and only when you are logged in\n" \
"on the main display of your Mac."
elif wx.Port == "__WXGTK__":
msg ="Unable to access the X Display, is $DISPLAY set properly?"
else:
msg = "Unable to create GUI"
# TODO: more description is needed for wxMSW...
raise SystemExit(msg)
# This has to be done before OnInit
self.SetUseBestVisual(useBestVisual)
# Set the default handler for SIGINT. This fixes a problem
# where if Ctrl-C is pressed in the console that started this
# app then it will not appear to do anything, (not even send
# KeyboardInterrupt???) but will later segfault on exit. By
# setting the default handler then the app will exit, as
# expected (depending on platform.)
if clearSigInt:
try:
import signal
signal.signal(signal.SIGINT, signal.SIG_DFL)
except:
pass
# Save and redirect the stdio to a window?
self.stdioWin = None
self.saveStdio = (_sys.stdout, _sys.stderr)
if redirect:
self.RedirectStdio(filename)
# Use Python's install prefix as the default
prefix = _sys.prefix
if isinstance(prefix, (bytes, bytearray)):
prefix = prefix.decode(_sys.getfilesystemencoding())
wx.StandardPaths.Get().SetInstallPrefix(prefix)
# Until the new native control for wxMac is up to par, still use the generic one.
wx.SystemOptions.SetOption("mac.listctrl.always_use_generic", 1)
# This finishes the initialization of wxWindows and then calls
# the OnInit that should be present in the derived class
self._BootstrapApp()
def OnPreInit(self):
"""
Things that must be done after _BootstrapApp has done its thing, but
would be nice if they were already done by the time that OnInit is
called. This can be overridden in derived classes, but be sure to call
this method from there.
"""
wx.StockGDI._initStockObjects()
def __del__(self):
# Just in case the MainLoop was overridden without calling RestoreStio
self.RestoreStdio()
def SetTopWindow(self, frame):
"""
Set the "main" top level window, which will be used for the parent of
the on-demand output window as well as for dialogs that do not have
an explicit parent set.
"""
if self.stdioWin:
self.stdioWin.SetParent(frame)
wx.PyApp.SetTopWindow(self, frame)
def MainLoop(self):
"""
Execute the main GUI event loop
"""
rv = wx.PyApp.MainLoop(self)
self.RestoreStdio()
return rv
def RedirectStdio(self, filename=None):
"""
Redirect sys.stdout and sys.stderr to a file or a popup window.
"""
if filename:
_sys.stdout = _sys.stderr = open(filename, 'a')
else:
self.stdioWin = self.outputWindowClass()
_sys.stdout = _sys.stderr = self.stdioWin
def RestoreStdio(self):
try:
_sys.stdout, _sys.stderr = self.saveStdio
except:
pass
def SetOutputWindowAttributes(self, title=None, pos=None, size=None):
"""
Set the title, position and/or size of the output window if the stdio
has been redirected. This should be called before any output would
cause the output window to be created.
"""
if self.stdioWin:
if title is not None:
self.stdioWin.title = title
if pos is not None:
self.stdioWin.pos = pos
if size is not None:
self.stdioWin.size = size
@staticmethod
def Get():
"""
A staticmethod returning the currently active application object.
Essentially just a more pythonic version of :meth:`GetApp`.
"""
return GetApp()
@wx.deprecatedMsg("Use :class:`App` instead.")
class PySimpleApp(App):
"""
This class is deprecated. Please use :class:`App` instead.
"""
def __init__(self, *args, **kw):
App.__init__(self, *args, **kw)
EVT_TIMER = wx.PyEventBinder( wxEVT_TIMER )
class PyTimer(Timer):
'''This timer class is passed the callable object to be called when the timer expires.'''
def __init__(self, notify):
Timer.__init__(self)
self.notify = notify
def Notify(self):
if self.notify:
self.notify()
def _Window_SetRect(self, rect):
return self.SetSize(rect)
Window.SetRect = _Window_SetRect
del _Window_SetRect
Window.Rect = property(Window.GetRect, Window.SetRect)
def _Window_SetClientRect(self, rect):
return self.SetClientSize(rect)
Window.SetClientRect = _Window_SetClientRect
del _Window_SetClientRect
Window.ClientRect = property(Window.GetClientRect, Window.SetClientRect)
Window.SetDimensions = wx.deprecated(Window.SetDimensions, 'Use SetSize instead.')
def _Window___nonzero__(self):
"""
Can be used to test if the C++ part of the window still exists, with
code like this::
if theWindow:
doSomething()
"""
import wx.siplib
return not wx.siplib.isdeleted(self)
Window.__nonzero__ = _Window___nonzero__
del _Window___nonzero__
Window.__bool__ = Window.__nonzero__
def _Window_DestroyLater(self):
"""
Schedules the window to be destroyed in the near future.
This should be used whenever Destroy could happen too soon, such
as when there may still be events for this window or its children
waiting in the event queue.
"""
self.Hide()
wx.GetApp().ScheduleForDestruction(self)
Window.DestroyLater = _Window_DestroyLater
del _Window_DestroyLater
def _Window_DLG_UNIT(self, dlg_unit):
"""
A convenience wrapper for :meth:`ConvertDialogToPixels`.
"""
is_wxType = isinstance(dlg_unit, (wx.Size, wx.Point))
pix = self.ConvertDialogToPixels(dlg_unit)
if not is_wxType:
pix = tuple(pix)
return pix
Window.DLG_UNIT = _Window_DLG_UNIT
del _Window_DLG_UNIT
def _Window_PostCreate(self, pre):
pass
Window.PostCreate = wx.deprecated(_Window_PostCreate, "PostCreate is no longer necessary.")
del _Window_PostCreate
def _Window_GetPositionTuple(self):
return self.GetPosition()
Window.GetPositionTuple = wx.deprecated(_Window_GetPositionTuple, "Use GetPosition instead")
del _Window_GetPositionTuple
def _Window_GetSizeTuple(self):
return self.GetSize()
Window.GetSizeTuple = wx.deprecated(_Window_GetSizeTuple, "Use GetSize instead")
del _Window_GetSizeTuple
def _Window_MoveXY(self, x, y):
return self.Move(x, y)
Window.MoveXY = wx.deprecated(_Window_MoveXY, "Use Move instead.")
del _Window_MoveXY
def _Window_SetSizeWH(self, w, h):
return self.SetSize(w,h)
Window.SetSizeWH = wx.deprecated(_Window_SetSizeWH, "Use SetSize instead.")
del _Window_SetSizeWH
def _Window_SetVirtualSizeWH(self, w, h):
return self.SetVirtualSize(w,h)
Window.SetVirtualSizeWH = wx.deprecated(_Window_SetVirtualSizeWH, "Use SetVirtualSize instead.")
del _Window_SetVirtualSizeWH
def _Window_GetVirtualSizeTuple(self):
return self.GetVirtualSize()
Window.GetVirtualSizeTuple = wx.deprecated(_Window_GetVirtualSizeTuple, "Use GetVirtualSize instead.")
del _Window_GetVirtualSizeTuple
def _Window_SetToolTipString(self, string):
return self.SetToolTip(string)
Window.SetToolTipString = wx.deprecated(_Window_SetToolTipString, "Use SetToolTip instead.")
del _Window_SetToolTipString
def _Window_ConvertDialogPointToPixels(self, point):
return self.ConvertDialogToPixels(point)
Window.ConvertDialogPointToPixels = wx.deprecated(_Window_ConvertDialogPointToPixels, "Use ConvertDialogToPixels instead.")
del _Window_ConvertDialogPointToPixels
def _Window_ConvertDialogSizeToPixels(self, size):
return self.ConvertDialogToPixels(point)
Window.ConvertDialogSizeToPixels = wx.deprecated(_Window_ConvertDialogSizeToPixels, "Use ConvertDialogToPixels instead.")
del _Window_ConvertDialogSizeToPixels
def _Window_SetSizeHintsSz(self, minSize, maxSize=wx.DefaultSize, incSize=wx.DefaultSize):
return self.SetSizeHints(minSize, maxSize, incSize)
Window.SetSizeHintsSz = wx.deprecated(_Window_SetSizeHintsSz, "Use SetSizeHints instead.")
del _Window_SetSizeHintsSz
class FrozenWindow(object):
"""
A context manager to be used with Python 'with' statements
that will freeze the given window for the duration of the
with block.
"""
def __init__(self, window):
self._win = window
def __enter__(self):
self._win.Freeze()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self._win.Thaw()
def DLG_UNIT(win, dlg_unit, val2=None):
"""
Convenience function for converting a wx.Point, wx.Size or
(x,y) in dialog units to pixels, using the given window as a
reference.
"""
if val2 is not None:
dlg_unit = (dlg_unit, val2)
is_wxType = isinstance(dlg_unit, (wx.Size, wx.Point))
pix = win.ConvertDialogToPixels(dlg_unit)
if not is_wxType:
pix = tuple(pix)
return pix
DLG_PNT = wx.deprecated(DLG_UNIT, "Use DLG_UNIT instead.")
DLG_SZE = wx.deprecated(DLG_UNIT, "Use DLG_UNIT instead.")
def _WindowList___repr__(self):
return "WindowList: " + repr(list(self))
WindowList.__repr__ = _WindowList___repr__
del _WindowList___repr__
PyWindow = wx.deprecated(Window, 'Use Window instead.')
PyValidator = wx.deprecated(Validator, 'Use Validator instead.')
PyPanel = wx.deprecated(Panel, 'Use Panel instead.')
def _MenuItemList___repr__(self):
return "MenuItemList: " + repr(list(self))
MenuItemList.__repr__ = _MenuItemList___repr__
del _MenuItemList___repr__
def _Menu_AppendMenu(self, id, item, subMenu, help=""):
return self.Append(id, item, subMenu, help)
Menu.AppendMenu = wx.deprecated(_Menu_AppendMenu, "Use Append instead.")
del _Menu_AppendMenu
def _Menu_AppendItem(self, menuItem):
return self.Append(menuItem)
Menu.AppendItem = wx.deprecated(_Menu_AppendItem, "Use Append instead.")
del _Menu_AppendItem
def _Menu_InsertMenu(self, pos, id, item, subMenu, help=""):
return self.Insert(pos, id, item, subMenu, help)
Menu.InsertMenu = wx.deprecated(_Menu_InsertMenu, "Use Insert instead.")
del _Menu_InsertMenu
def _Menu_InsertItem(self, pos, menuItem):
return self.Insert(pos, menuItem)
Menu.InsertItem = wx.deprecated(_Menu_InsertItem, "Use Insert instead.")
del _Menu_InsertItem
def _Menu_PrependMenu(self, id, item, subMenu, help=""):
return self.Prepend(id, item, subMenu, help)
Menu.PrependMenu = wx.deprecated(_Menu_PrependMenu, "Use Prepend instead.")
del _Menu_PrependMenu
def _Menu_PrependItem(self, menuItem):
return self.Prepend(menuItem)
Menu.PrependItem = wx.deprecated(_Menu_PrependItem, "Use Prepend instead.")
del _Menu_PrependItem
def _Menu_RemoveMenu(self, id, item, subMenu, help=""):
return self.Remove(id, item, subMenu, help)
Menu.RemoveMenu = wx.deprecated(_Menu_RemoveMenu, "Use Remove instead.")
del _Menu_RemoveMenu
def _Menu_RemoveItem(self, menuItem):
return self.Remove(menuItem)
Menu.RemoveItem = wx.deprecated(_Menu_RemoveItem, "Use Remove instead.")
del _Menu_RemoveItem
def _MenuBar_GetMenus(self):
"""
GetMenus() -> (menu, label)
Return a list of (menu, label) items for the menus in the :class:`MenuBar`.
"""
return [(self.GetMenu(i), self.GetLabelTop(i)) for i in range(self.GetMenuCount())]
MenuBar.GetMenus = _MenuBar_GetMenus
del _MenuBar_GetMenus
def _MenuBar_SetMenus(self, items):
"""
SetMenus()
Clear and add new menus to the :class:`MenuBar` from a list of (menu, label) items.
"""
for i in range(self.GetMenuCount()-1, -1, -1):
self.Remove(i)
for m, l in items:
self.Append(m, l)
MenuBar.SetMenus = _MenuBar_SetMenus
del _MenuBar_SetMenus
MenuBar.Menus = property(MenuBar.GetMenus, MenuBar.SetMenus)
def _MenuList___repr__(self):
return "MenuList: " + repr(list(self))
MenuList.__repr__ = _MenuList___repr__
del _MenuList___repr__
PyScrolledWindow = wx.deprecated(ScrolledWindow, 'Use ScrolledWindow instead.')
def _VScrolledWindow_HitTest(self, *args):
"""
Deprecated compatibility helper.
"""
if len(args) == 2:
x, y = args
return self.VirtualHitTest(y)
else:
pt = args[0]
return self.VirtualHitTest(pt[1])
VScrolledWindow.HitTest = wx.deprecated(_VScrolledWindow_HitTest, "Use VirtualHitTest instead.")
del _VScrolledWindow_HitTest
PyControl = wx.deprecated(Control, 'Use Control instead.')
def _ItemContainer_GetClientObject(self, n):
"""
Alias for :meth:`GetClientData`
"""
return self.GetClientData(n)
ItemContainer.GetClientObject = _ItemContainer_GetClientObject
del _ItemContainer_GetClientObject
def _ItemContainer_SetClientObject(self, n, data):
"""
Alias for :meth:`SetClientData`
"""
self.SetClientData(n, data)
ItemContainer.SetClientObject = _ItemContainer_SetClientObject
del _ItemContainer_SetClientObject
def _ItemContainer_AppendItems(self, items):
self.Append(items)
ItemContainer.AppendItems = _ItemContainer_AppendItems
del _ItemContainer_AppendItems
def _ItemContainer_GetItems(self):
return self.GetStrings()
ItemContainer.GetItems = _ItemContainer_GetItems
del _ItemContainer_GetItems
def _ItemContainer_SetItems(self, items):
self.Set(items)
ItemContainer.SetItems = _ItemContainer_SetItems
del _ItemContainer_SetItems
ItemContainer.Items = property(ItemContainer.GetItems, ItemContainer.SetItems)
EVT_NOTEBOOK_PAGE_CHANGED = wx.PyEventBinder( wxEVT_NOTEBOOK_PAGE_CHANGED, 1 )
EVT_NOTEBOOK_PAGE_CHANGING = wx.PyEventBinder( wxEVT_NOTEBOOK_PAGE_CHANGING, 1 )
# Aliases for the "best book" control as described in the overview
BookCtrl = Notebook
wxEVT_BOOKCTRL_PAGE_CHANGED = wxEVT_NOTEBOOK_PAGE_CHANGED
wxEVT_BOOKCTRL_PAGE_CHANGING = wxEVT_NOTEBOOK_PAGE_CHANGING
EVT_BOOKCTRL_PAGE_CHANGED = EVT_NOTEBOOK_PAGE_CHANGED
EVT_BOOKCTRL_PAGE_CHANGING = EVT_NOTEBOOK_PAGE_CHANGING
# deprecated wxEVT aliases
wxEVT_COMMAND_BOOKCTRL_PAGE_CHANGED = wxEVT_BOOKCTRL_PAGE_CHANGED
wxEVT_COMMAND_BOOKCTRL_PAGE_CHANGING = wxEVT_BOOKCTRL_PAGE_CHANGING
wxEVT_COMMAND_NOTEBOOK_PAGE_CHANGED = wxEVT_NOTEBOOK_PAGE_CHANGED
wxEVT_COMMAND_NOTEBOOK_PAGE_CHANGING = wxEVT_NOTEBOOK_PAGE_CHANGING
EVT_SPLITTER_SASH_POS_CHANGED = wx.PyEventBinder( wxEVT_SPLITTER_SASH_POS_CHANGED, 1 )
EVT_SPLITTER_SASH_POS_CHANGING = wx.PyEventBinder( wxEVT_SPLITTER_SASH_POS_CHANGING, 1 )
EVT_SPLITTER_DOUBLECLICKED = wx.PyEventBinder( wxEVT_SPLITTER_DOUBLECLICKED, 1 )
EVT_SPLITTER_UNSPLIT = wx.PyEventBinder( wxEVT_SPLITTER_UNSPLIT, 1 )
EVT_SPLITTER_DCLICK = EVT_SPLITTER_DOUBLECLICKED
# deprecated wxEVT aliases
wxEVT_COMMAND_SPLITTER_SASH_POS_CHANGED = wxEVT_SPLITTER_SASH_POS_CHANGED
wxEVT_COMMAND_SPLITTER_SASH_POS_CHANGING = wxEVT_SPLITTER_SASH_POS_CHANGING
wxEVT_COMMAND_SPLITTER_DOUBLECLICKED = wxEVT_SPLITTER_DOUBLECLICKED
wxEVT_COMMAND_SPLITTER_UNSPLIT = wxEVT_SPLITTER_UNSPLIT
EVT_COLLAPSIBLEPANE_CHANGED = wx.PyEventBinder( wxEVT_COLLAPSIBLEPANE_CHANGED )
# deprecated wxEVT alias
wxEVT_COMMAND_COLLPANE_CHANGED = wxEVT_COLLAPSIBLEPANE_CHANGED
EVT_TEXT = wx.PyEventBinder( wxEVT_TEXT, 1)
EVT_TEXT_ENTER = wx.PyEventBinder( wxEVT_TEXT_ENTER, 1)
EVT_TEXT_URL = wx.PyEventBinder( wxEVT_TEXT_URL, 1)
EVT_TEXT_MAXLEN = wx.PyEventBinder( wxEVT_TEXT_MAXLEN, 1)
EVT_TEXT_CUT = wx.PyEventBinder( wxEVT_TEXT_CUT )
EVT_TEXT_COPY = wx.PyEventBinder( wxEVT_TEXT_COPY )
EVT_TEXT_PASTE = wx.PyEventBinder( wxEVT_TEXT_PASTE )
# deprecated wxEVT aliases
wxEVT_COMMAND_TEXT_UPDATED = wxEVT_TEXT
wxEVT_COMMAND_TEXT_ENTER = wxEVT_TEXT_ENTER
wxEVT_COMMAND_TEXT_URL = wxEVT_TEXT_URL
wxEVT_COMMAND_TEXT_MAXLEN = wxEVT_TEXT_MAXLEN
wxEVT_COMMAND_TEXT_CUT = wxEVT_TEXT_CUT
wxEVT_COMMAND_TEXT_COPY = wxEVT_TEXT_COPY
wxEVT_COMMAND_TEXT_PASTE = wxEVT_TEXT_PASTE
ComboBox.SetMark = wx.deprecated(ComboBox.SetTextSelection, 'Use SetTextSelection instead.')
ComboBox.GetMark = wx.deprecated(ComboBox.GetTextSelection, 'Use GetTextSelection instead.')
def _CheckListBox_GetCheckedItems(self):
"""
GetCheckedItems()
Return a sequence of integers corresponding to the checked items in
the control, based on :meth:`IsChecked`.
"""
return tuple([i for i in range(self.Count) if self.IsChecked(i)])
CheckListBox.GetCheckedItems = _CheckListBox_GetCheckedItems
del _CheckListBox_GetCheckedItems
def _CheckListBox_GetCheckedStrings(self):
"""
GetCheckedStrings()
Return a tuple of strings corresponding to the checked
items of the control, based on :meth:`GetChecked`.
"""
return tuple([self.GetString(i) for i in self.GetCheckedItems()])
CheckListBox.GetCheckedStrings = _CheckListBox_GetCheckedStrings
del _CheckListBox_GetCheckedStrings
def _CheckListBox_SetCheckedItems(self, indexes):
"""
SetCheckedItems(indexes)
Sets the checked state of items if the index of the item is
found in the indexes sequence.
"""
for i in indexes:
assert 0 <= i < self.Count, "Index (%s) out of range" % i
for i in range(self.Count):
self.Check(i, i in indexes)
CheckListBox.SetCheckedItems = _CheckListBox_SetCheckedItems
del _CheckListBox_SetCheckedItems
def _CheckListBox_SetCheckedStrings(self, strings):
"""
SetCheckedStrings(strings)
Sets the checked state of items if the item's string is found
in the strings sequence.
"""
for s in strings:
assert s in self.GetStrings(), "String ('%s') not found" % s
for i in range(self.Count):
self.Check(i, self.GetString(i) in strings)
CheckListBox.SetCheckedStrings = _CheckListBox_SetCheckedStrings
del _CheckListBox_SetCheckedStrings
def _CheckListBox_GetChecked(self):
return self.GetCheckedItems()
CheckListBox.GetChecked = wx.deprecated(_CheckListBox_GetChecked, "Use GetCheckedItems instead.")
del _CheckListBox_GetChecked
def _CheckListBox_SetChecked(self, indexes):
return self.SetCheckedItems(indexes)
CheckListBox.SetChecked = wx.deprecated(_CheckListBox_SetChecked, "Use SetCheckedItems instead.")
del _CheckListBox_SetChecked
CheckListBox.Checked = property(CheckListBox.GetChecked, CheckListBox.SetChecked)
CheckListBox.CheckedItems = property(CheckListBox.GetCheckedItems, CheckListBox.SetCheckedItems)
CheckListBox.CheckedStrings = property(CheckListBox.GetCheckedStrings, CheckListBox.SetCheckedStrings)
SettableHeaderColumn.Title = property(HeaderColumn.GetTitle, SettableHeaderColumn.SetTitle)
SettableHeaderColumn.Bitmap = property(HeaderColumn.GetBitmap, SettableHeaderColumn.SetBitmap)
SettableHeaderColumn.Width = property(HeaderColumn.GetWidth, SettableHeaderColumn.SetWidth)
SettableHeaderColumn.MinWidth = property(HeaderColumn.GetMinWidth, SettableHeaderColumn.SetMinWidth)
SettableHeaderColumn.Alignment = property(HeaderColumn.GetAlignment, SettableHeaderColumn.SetAlignment)
SettableHeaderColumn.Flags = property(HeaderColumn.GetFlags, SettableHeaderColumn.SetFlags)
SettableHeaderColumn.Resizeable = property(HeaderColumn.IsResizeable, SettableHeaderColumn.SetResizeable)
SettableHeaderColumn.Sortable = property(HeaderColumn.IsSortable, SettableHeaderColumn.SetSortable)
SettableHeaderColumn.Reorderable = property(HeaderColumn.IsReorderable, SettableHeaderColumn.SetReorderable)
SettableHeaderColumn.Hidden = property(HeaderColumn.IsHidden, SettableHeaderColumn.SetHidden)
EVT_HEADER_CLICK = wx.PyEventBinder( wxEVT_HEADER_CLICK )
EVT_HEADER_RIGHT_CLICK = wx.PyEventBinder( wxEVT_HEADER_RIGHT_CLICK )
EVT_HEADER_MIDDLE_CLICK = wx.PyEventBinder( wxEVT_HEADER_MIDDLE_CLICK )
EVT_HEADER_DCLICK = wx.PyEventBinder( wxEVT_HEADER_DCLICK )
EVT_HEADER_RIGHT_DCLICK = wx.PyEventBinder( wxEVT_HEADER_RIGHT_DCLICK )
EVT_HEADER_MIDDLE_DCLICK = wx.PyEventBinder( wxEVT_HEADER_MIDDLE_DCLICK )
EVT_HEADER_SEPARATOR_DCLICK = wx.PyEventBinder( wxEVT_HEADER_SEPARATOR_DCLICK )
EVT_HEADER_BEGIN_RESIZE = wx.PyEventBinder( wxEVT_HEADER_BEGIN_RESIZE )
EVT_HEADER_RESIZING = wx.PyEventBinder( wxEVT_HEADER_RESIZING )
EVT_HEADER_END_RESIZE = wx.PyEventBinder( wxEVT_HEADER_END_RESIZE )
EVT_HEADER_BEGIN_REORDER = wx.PyEventBinder( wxEVT_HEADER_BEGIN_REORDER )
EVT_HEADER_END_REORDER = wx.PyEventBinder( wxEVT_HEADER_END_REORDER )
EVT_HEADER_DRAGGING_CANCELLED = wx.PyEventBinder( wxEVT_HEADER_DRAGGING_CANCELLED )
# deprecated wxEVT aliases
wxEVT_COMMAND_HEADER_CLICK = wxEVT_HEADER_CLICK
wxEVT_COMMAND_HEADER_RIGHT_CLICK = wxEVT_HEADER_RIGHT_CLICK
wxEVT_COMMAND_HEADER_MIDDLE_CLICK = wxEVT_HEADER_MIDDLE_CLICK
wxEVT_COMMAND_HEADER_DCLICK = wxEVT_HEADER_DCLICK
wxEVT_COMMAND_HEADER_RIGHT_DCLICK = wxEVT_HEADER_RIGHT_DCLICK
wxEVT_COMMAND_HEADER_MIDDLE_DCLICK = wxEVT_HEADER_MIDDLE_DCLICK
wxEVT_COMMAND_HEADER_SEPARATOR_DCLICK = wxEVT_HEADER_SEPARATOR_DCLICK
wxEVT_COMMAND_HEADER_BEGIN_RESIZE = wxEVT_HEADER_BEGIN_RESIZE
wxEVT_COMMAND_HEADER_RESIZING = wxEVT_HEADER_RESIZING
wxEVT_COMMAND_HEADER_END_RESIZE = wxEVT_HEADER_END_RESIZE
wxEVT_COMMAND_HEADER_BEGIN_REORDER = wxEVT_HEADER_BEGIN_REORDER
wxEVT_COMMAND_HEADER_END_REORDER = wxEVT_HEADER_END_REORDER
wxEVT_COMMAND_HEADER_DRAGGING_CANCELLED = wxEVT_HEADER_DRAGGING_CANCELLED
EVT_SEARCHCTRL_CANCEL_BTN = wx.PyEventBinder( wxEVT_SEARCHCTRL_CANCEL_BTN, 1)
EVT_SEARCHCTRL_SEARCH_BTN = wx.PyEventBinder( wxEVT_SEARCHCTRL_SEARCH_BTN, 1)
# deprecated wxEVT aliases
wxEVT_COMMAND_SEARCHCTRL_CANCEL_BTN = wxEVT_SEARCHCTRL_CANCEL_BTN
wxEVT_COMMAND_SEARCHCTRL_SEARCH_BTN = wxEVT_SEARCHCTRL_SEARCH_BTN
def _RadioBox_GetItemLabel(self, n):
"""
GetItemLabel(self, n) -> string
Return the text of the n'th item in the radio box.
"""
return self.GetString(n)
RadioBox.GetItemLabel = _RadioBox_GetItemLabel
del _RadioBox_GetItemLabel
def _RadioBox_SetItemLabel(self, n, text):
"""
SetItemLabel(self, n, text)
Set the text of the n'th item in the radio box.
"""
self.SetString(n, text)
RadioBox.SetItemLabel = _RadioBox_SetItemLabel
del _RadioBox_SetItemLabel
def _Slider_GetRange(self):
return (self.GetMin(), self.GetMax())
Slider.GetRange = _Slider_GetRange
del _Slider_GetRange
Slider.Range = property(Slider.GetRange)
def _SpinButton_GetRange(self):
return (self.GetMin(), self.GetMax())
SpinButton.GetRange = _SpinButton_GetRange
del _SpinButton_GetRange
def _SpinButton_SetMin(self, minVal):
self.SetRange(minVal, self.GetMax())
SpinButton.SetMin = _SpinButton_SetMin
del _SpinButton_SetMin
def _SpinButton_SetMax(self, maxVal):
self.SetRange(self.GetMin(), maxVal)
SpinButton.SetMax = _SpinButton_SetMax
del _SpinButton_SetMax
SpinButton.Max = property(SpinButton.GetMax, SpinButton.SetMax)
SpinButton.Min = property(SpinButton.GetMin, SpinButton.SetMin)
SpinButton.Range = property(SpinButton.GetRange)
EVT_SPIN_UP = wx.PyEventBinder( wxEVT_SPIN_UP, 1)
EVT_SPIN_DOWN = wx.PyEventBinder( wxEVT_SPIN_DOWN, 1)
EVT_SPIN = wx.PyEventBinder( wxEVT_SPIN, 1)
def _SpinCtrl_GetRange(self):
return (self.GetMin(), self.GetMax())
SpinCtrl.GetRange = _SpinCtrl_GetRange
del _SpinCtrl_GetRange
def _SpinCtrl_SetMin(self, minVal):
self.SetRange(minVal, self.GetMax())
SpinCtrl.SetMin = _SpinCtrl_SetMin
del _SpinCtrl_SetMin
def _SpinCtrl_SetMax(self, maxVal):
self.SetRange(self.GetMin(), maxVal)
SpinCtrl.SetMax = _SpinCtrl_SetMax
del _SpinCtrl_SetMax
SpinCtrl.Max = property(SpinCtrl.GetMax, SpinCtrl.SetMax)
SpinCtrl.Min = property(SpinCtrl.GetMin, SpinCtrl.SetMin)
SpinCtrl.Range = property(SpinCtrl.GetRange)
def _SpinCtrlDouble_GetRange(self):
return (self.GetMin(), self.GetMax())
SpinCtrlDouble.GetRange = _SpinCtrlDouble_GetRange
del _SpinCtrlDouble_GetRange
def _SpinCtrlDouble_SetMin(self, minVal):
self.SetRange(minVal, self.GetMax())
SpinCtrlDouble.SetMin = _SpinCtrlDouble_SetMin
del _SpinCtrlDouble_SetMin
def _SpinCtrlDouble_SetMax(self, maxVal):
self.SetRange(self.GetMin(), maxVal)
SpinCtrlDouble.SetMax = _SpinCtrlDouble_SetMax
del _SpinCtrlDouble_SetMax
SpinCtrlDouble.Max = property(SpinCtrlDouble.GetMax, SpinCtrlDouble.SetMax)
SpinCtrlDouble.Min = property(SpinCtrlDouble.GetMin, SpinCtrlDouble.SetMin)
SpinCtrlDouble.Range = property(SpinCtrlDouble.GetRange)
EVT_SPINCTRL = wx.PyEventBinder( wxEVT_SPINCTRL, 1)
EVT_SPINCTRLDOUBLE = wx.PyEventBinder( wxEVT_SPINCTRLDOUBLE, 1)
# deprecated wxEVT aliases
wxEVT_COMMAND_SPINCTRL_UPDATED = wxEVT_SPINCTRL
wxEVT_COMMAND_SPINCTRLDOUBLE_UPDATED = wxEVT_SPINCTRLDOUBLE
EVT_TOGGLEBUTTON = PyEventBinder(wxEVT_TOGGLEBUTTON, 1)
# deprecated wxEVT alias
wxEVT_COMMAND_TOGGLEBUTTON_CLICKED = wxEVT_TOGGLEBUTTON
def _ToolBar_AddSimpleTool(self, toolId, bitmap, shortHelpString="", longHelpString="", isToggle=0):
"""
Old style method to add a tool to the toolbar.
"""
kind = wx.ITEM_NORMAL
if isToggle: kind = wx.ITEM_CHECK
return self.AddTool(toolId, '', bitmap, wx.NullBitmap, kind,
shortHelpString, longHelpString)
ToolBar.AddSimpleTool = wx.deprecated(_ToolBar_AddSimpleTool, "Use :meth:`AddTool` instead.")
del _ToolBar_AddSimpleTool
def _ToolBar_AddLabelTool(self, id, label, bitmap, bmpDisabled=wx.NullBitmap, kind=wx.ITEM_NORMAL, shortHelp="", longHelp="", clientData=None):
"""
Old style method to add a tool in the toolbar.
"""
return self.AddTool(id, label, bitmap, bmpDisabled, kind,
shortHelp, longHelp, clientData)
ToolBar.AddLabelTool = wx.deprecated(_ToolBar_AddLabelTool, "Use :meth:`AddTool` instead.")
del _ToolBar_AddLabelTool
def _ToolBar_InsertSimpleTool(self, pos, toolId, bitmap, shortHelpString="", longHelpString="", isToggle=0):
"""
Old style method to insert a tool in the toolbar.
"""
kind = wx.ITEM_NORMAL
if isToggle: kind = wx.ITEM_CHECK
return self.InsertTool(pos, toolId, '', bitmap, wx.NullBitmap, kind,
shortHelpString, longHelpString)
ToolBar.InsertSimpleTool = wx.deprecated(_ToolBar_InsertSimpleTool, "Use :meth:`InsertTool` instead.")
del _ToolBar_InsertSimpleTool
def _ToolBar_InsertLabelTool(self, pos, id, label, bitmap, bmpDisabled=wx.NullBitmap, kind=wx.ITEM_NORMAL, shortHelp="", longHelp="", clientData=None):
"""
Old style method to insert a tool in the toolbar.
"""
return self.InsertTool(pos, id, label, bitmap, bmpDisabled, kind,
shortHelp, longHelp, clientData)
ToolBar.InsertLabelTool = wx.deprecated(_ToolBar_InsertLabelTool, "Use :meth:`InsertTool` instead.")
del _ToolBar_InsertLabelTool
ListCtrl.FindItemData = wx.deprecated(ListCtrl.FindItem, "Use FindItem instead.")
ListCtrl.FindItemAtPos = wx.deprecated(ListCtrl.FindItem, "Use FindItem instead.")
ListCtrl.InsertStringItem = wx.deprecated(ListCtrl.InsertItem, "Use InsertItem instead.")
ListCtrl.InsertImageItem = wx.deprecated(ListCtrl.InsertItem, "Use InsertItem instead.")
ListCtrl.InsertImageStringItem = wx.deprecated(ListCtrl.InsertItem, "Use InsertItem instead.")
ListCtrl.SetStringItem = wx.deprecated(ListCtrl.SetItem, "Use SetItem instead.")
def _ListCtrl_Select(self, idx, on=1):
"""
Selects/deselects an item.
"""
if on: state = wx.LIST_STATE_SELECTED
else: state = 0
self.SetItemState(idx, state, wx.LIST_STATE_SELECTED)
ListCtrl.Select = _ListCtrl_Select
del _ListCtrl_Select
def _ListCtrl_Focus(self, idx):
"""
Focus and show the given item.
"""
self.SetItemState(idx, wx.LIST_STATE_FOCUSED, wx.LIST_STATE_FOCUSED)
self.EnsureVisible(idx)
ListCtrl.Focus = _ListCtrl_Focus
del _ListCtrl_Focus
def _ListCtrl_GetFocusedItem(self):
"""
Gets the currently focused item or -1 if none is focused.
"""
return self.GetNextItem(-1, wx.LIST_NEXT_ALL, wx.LIST_STATE_FOCUSED)
ListCtrl.GetFocusedItem = _ListCtrl_GetFocusedItem
del _ListCtrl_GetFocusedItem
def _ListCtrl_GetFirstSelected(self, *args):
"""
Returns the first selected item, or -1 when none is selected.
"""
return self.GetNextSelected(-1)
ListCtrl.GetFirstSelected = _ListCtrl_GetFirstSelected
del _ListCtrl_GetFirstSelected
def _ListCtrl_GetNextSelected(self, item):
"""
Returns subsequent selected items, or -1 when no more are selected.
"""
return self.GetNextItem(item, wx.LIST_NEXT_ALL, wx.LIST_STATE_SELECTED)
ListCtrl.GetNextSelected = _ListCtrl_GetNextSelected
del _ListCtrl_GetNextSelected
def _ListCtrl_IsSelected(self, idx):
"""
Returns ``True`` if the item is selected.
"""
return (self.GetItemState(idx, wx.LIST_STATE_SELECTED) & wx.LIST_STATE_SELECTED) != 0
ListCtrl.IsSelected = _ListCtrl_IsSelected
del _ListCtrl_IsSelected
def _ListCtrl_SetColumnImage(self, col, image):
item = self.GetColumn(col)
# preserve all other attributes too
item.SetMask( wx.LIST_MASK_STATE |
wx.LIST_MASK_TEXT |
wx.LIST_MASK_IMAGE |
wx.LIST_MASK_DATA |
wx.LIST_SET_ITEM |
wx.LIST_MASK_WIDTH |
wx.LIST_MASK_FORMAT )
item.SetImage(image)
self.SetColumn(col, item)
ListCtrl.SetColumnImage = _ListCtrl_SetColumnImage
del _ListCtrl_SetColumnImage
def _ListCtrl_ClearColumnImage(self, col):
self.SetColumnImage(col, -1)
ListCtrl.ClearColumnImage = _ListCtrl_ClearColumnImage
del _ListCtrl_ClearColumnImage
def _ListCtrl_Append(self, entry):
"""
Append an item to the list control. The `entry` parameter should be a
sequence with an item for each column
"""
if len(entry):
from six import text_type
pos = self.InsertItem(self.GetItemCount(), text_type(entry[0]))
for i in range(1, len(entry)):
self.SetItem(pos, i, text_type(entry[i]))
return pos
ListCtrl.Append = _ListCtrl_Append
del _ListCtrl_Append
ListCtrl.FocusedItem = property(ListCtrl.GetFocusedItem)
EVT_LIST_BEGIN_DRAG = PyEventBinder(wxEVT_LIST_BEGIN_DRAG , 1)
EVT_LIST_BEGIN_RDRAG = PyEventBinder(wxEVT_LIST_BEGIN_RDRAG , 1)
EVT_LIST_BEGIN_LABEL_EDIT = PyEventBinder(wxEVT_LIST_BEGIN_LABEL_EDIT , 1)
EVT_LIST_END_LABEL_EDIT = PyEventBinder(wxEVT_LIST_END_LABEL_EDIT , 1)
EVT_LIST_DELETE_ITEM = PyEventBinder(wxEVT_LIST_DELETE_ITEM , 1)
EVT_LIST_DELETE_ALL_ITEMS = PyEventBinder(wxEVT_LIST_DELETE_ALL_ITEMS , 1)
EVT_LIST_ITEM_SELECTED = PyEventBinder(wxEVT_LIST_ITEM_SELECTED , 1)
EVT_LIST_ITEM_DESELECTED = PyEventBinder(wxEVT_LIST_ITEM_DESELECTED , 1)
EVT_LIST_KEY_DOWN = PyEventBinder(wxEVT_LIST_KEY_DOWN , 1)
EVT_LIST_INSERT_ITEM = PyEventBinder(wxEVT_LIST_INSERT_ITEM , 1)
EVT_LIST_COL_CLICK = PyEventBinder(wxEVT_LIST_COL_CLICK , 1)
EVT_LIST_ITEM_RIGHT_CLICK = PyEventBinder(wxEVT_LIST_ITEM_RIGHT_CLICK , 1)
EVT_LIST_ITEM_MIDDLE_CLICK = PyEventBinder(wxEVT_LIST_ITEM_MIDDLE_CLICK, 1)
EVT_LIST_ITEM_ACTIVATED = PyEventBinder(wxEVT_LIST_ITEM_ACTIVATED , 1)
EVT_LIST_CACHE_HINT = PyEventBinder(wxEVT_LIST_CACHE_HINT , 1)
EVT_LIST_COL_RIGHT_CLICK = PyEventBinder(wxEVT_LIST_COL_RIGHT_CLICK , 1)
EVT_LIST_COL_BEGIN_DRAG = PyEventBinder(wxEVT_LIST_COL_BEGIN_DRAG , 1)
EVT_LIST_COL_DRAGGING = PyEventBinder(wxEVT_LIST_COL_DRAGGING , 1)
EVT_LIST_COL_END_DRAG = PyEventBinder(wxEVT_LIST_COL_END_DRAG , 1)
EVT_LIST_ITEM_FOCUSED = PyEventBinder(wxEVT_LIST_ITEM_FOCUSED , 1)
# deprecated wxEVT aliases
wxEVT_COMMAND_LIST_BEGIN_DRAG = wxEVT_LIST_BEGIN_DRAG
wxEVT_COMMAND_LIST_BEGIN_RDRAG = wxEVT_LIST_BEGIN_RDRAG
wxEVT_COMMAND_LIST_BEGIN_LABEL_EDIT = wxEVT_LIST_BEGIN_LABEL_EDIT
wxEVT_COMMAND_LIST_END_LABEL_EDIT = wxEVT_LIST_END_LABEL_EDIT
wxEVT_COMMAND_LIST_DELETE_ITEM = wxEVT_LIST_DELETE_ITEM
wxEVT_COMMAND_LIST_DELETE_ALL_ITEMS = wxEVT_LIST_DELETE_ALL_ITEMS
wxEVT_COMMAND_LIST_ITEM_SELECTED = wxEVT_LIST_ITEM_SELECTED
wxEVT_COMMAND_LIST_ITEM_DESELECTED = wxEVT_LIST_ITEM_DESELECTED
wxEVT_COMMAND_LIST_KEY_DOWN = wxEVT_LIST_KEY_DOWN
wxEVT_COMMAND_LIST_INSERT_ITEM = wxEVT_LIST_INSERT_ITEM
wxEVT_COMMAND_LIST_COL_CLICK = wxEVT_LIST_COL_CLICK
wxEVT_COMMAND_LIST_ITEM_RIGHT_CLICK = wxEVT_LIST_ITEM_RIGHT_CLICK
wxEVT_COMMAND_LIST_ITEM_MIDDLE_CLICK = wxEVT_LIST_ITEM_MIDDLE_CLICK
wxEVT_COMMAND_LIST_ITEM_ACTIVATED = wxEVT_LIST_ITEM_ACTIVATED
wxEVT_COMMAND_LIST_CACHE_HINT = wxEVT_LIST_CACHE_HINT
wxEVT_COMMAND_LIST_COL_RIGHT_CLICK = wxEVT_LIST_COL_RIGHT_CLICK
wxEVT_COMMAND_LIST_COL_BEGIN_DRAG = wxEVT_LIST_COL_BEGIN_DRAG
wxEVT_COMMAND_LIST_COL_DRAGGING = wxEVT_LIST_COL_DRAGGING
wxEVT_COMMAND_LIST_COL_END_DRAG = wxEVT_LIST_COL_END_DRAG
wxEVT_COMMAND_LIST_ITEM_FOCUSED = wxEVT_LIST_ITEM_FOCUSED
def _TreeItemId___hash__(self):
return hash(int(self.GetID()))
TreeItemId.__hash__ = _TreeItemId___hash__
del _TreeItemId___hash__
TreeCtrl.GetItemPyData = wx.deprecated(TreeCtrl.GetItemData, 'Use GetItemData instead.')
TreeCtrl.SetItemPyData = wx.deprecated(TreeCtrl.SetItemData, 'Use SetItemData instead.')
TreeCtrl.GetPyData = wx.deprecated(TreeCtrl.GetItemData, 'Use GetItemData instead.')
TreeCtrl.SetPyData = wx.deprecated(TreeCtrl.SetItemData, 'Use SetItemData instead.')
def TreeItemData(data):
return data
TreeItemData = deprecated(TreeItemData, "The TreeItemData class no longer exists, just pass your object directly to the tree instead.")
EVT_TREE_BEGIN_DRAG = PyEventBinder(wxEVT_TREE_BEGIN_DRAG , 1)
EVT_TREE_BEGIN_RDRAG = PyEventBinder(wxEVT_TREE_BEGIN_RDRAG , 1)
EVT_TREE_BEGIN_LABEL_EDIT = PyEventBinder(wxEVT_TREE_BEGIN_LABEL_EDIT , 1)
EVT_TREE_END_LABEL_EDIT = PyEventBinder(wxEVT_TREE_END_LABEL_EDIT , 1)
EVT_TREE_DELETE_ITEM = PyEventBinder(wxEVT_TREE_DELETE_ITEM , 1)
EVT_TREE_GET_INFO = PyEventBinder(wxEVT_TREE_GET_INFO , 1)
EVT_TREE_SET_INFO = PyEventBinder(wxEVT_TREE_SET_INFO , 1)
EVT_TREE_ITEM_EXPANDED = PyEventBinder(wxEVT_TREE_ITEM_EXPANDED , 1)
EVT_TREE_ITEM_EXPANDING = PyEventBinder(wxEVT_TREE_ITEM_EXPANDING , 1)
EVT_TREE_ITEM_COLLAPSED = PyEventBinder(wxEVT_TREE_ITEM_COLLAPSED , 1)
EVT_TREE_ITEM_COLLAPSING = PyEventBinder(wxEVT_TREE_ITEM_COLLAPSING , 1)
EVT_TREE_SEL_CHANGED = PyEventBinder(wxEVT_TREE_SEL_CHANGED , 1)
EVT_TREE_SEL_CHANGING = PyEventBinder(wxEVT_TREE_SEL_CHANGING , 1)
EVT_TREE_KEY_DOWN = PyEventBinder(wxEVT_TREE_KEY_DOWN , 1)
EVT_TREE_ITEM_ACTIVATED = PyEventBinder(wxEVT_TREE_ITEM_ACTIVATED , 1)
EVT_TREE_ITEM_RIGHT_CLICK = PyEventBinder(wxEVT_TREE_ITEM_RIGHT_CLICK , 1)
EVT_TREE_ITEM_MIDDLE_CLICK = PyEventBinder(wxEVT_TREE_ITEM_MIDDLE_CLICK, 1)
EVT_TREE_END_DRAG = PyEventBinder(wxEVT_TREE_END_DRAG , 1)
EVT_TREE_STATE_IMAGE_CLICK = PyEventBinder(wxEVT_TREE_STATE_IMAGE_CLICK, 1)
EVT_TREE_ITEM_GETTOOLTIP = PyEventBinder(wxEVT_TREE_ITEM_GETTOOLTIP, 1)
EVT_TREE_ITEM_MENU = PyEventBinder(wxEVT_TREE_ITEM_MENU, 1)
# deprecated wxEVT aliases
wxEVT_COMMAND_TREE_BEGIN_DRAG = wxEVT_TREE_BEGIN_DRAG
wxEVT_COMMAND_TREE_BEGIN_RDRAG = wxEVT_TREE_BEGIN_RDRAG
wxEVT_COMMAND_TREE_BEGIN_LABEL_EDIT = wxEVT_TREE_BEGIN_LABEL_EDIT
wxEVT_COMMAND_TREE_END_LABEL_EDIT = wxEVT_TREE_END_LABEL_EDIT
wxEVT_COMMAND_TREE_DELETE_ITEM = wxEVT_TREE_DELETE_ITEM
wxEVT_COMMAND_TREE_GET_INFO = wxEVT_TREE_GET_INFO
wxEVT_COMMAND_TREE_SET_INFO = wxEVT_TREE_SET_INFO
wxEVT_COMMAND_TREE_ITEM_EXPANDED = wxEVT_TREE_ITEM_EXPANDED
wxEVT_COMMAND_TREE_ITEM_EXPANDING = wxEVT_TREE_ITEM_EXPANDING
wxEVT_COMMAND_TREE_ITEM_COLLAPSED = wxEVT_TREE_ITEM_COLLAPSED
wxEVT_COMMAND_TREE_ITEM_COLLAPSING = wxEVT_TREE_ITEM_COLLAPSING
wxEVT_COMMAND_TREE_SEL_CHANGED = wxEVT_TREE_SEL_CHANGED
wxEVT_COMMAND_TREE_SEL_CHANGING = wxEVT_TREE_SEL_CHANGING
wxEVT_COMMAND_TREE_KEY_DOWN = wxEVT_TREE_KEY_DOWN
wxEVT_COMMAND_TREE_ITEM_ACTIVATED = wxEVT_TREE_ITEM_ACTIVATED
wxEVT_COMMAND_TREE_ITEM_RIGHT_CLICK = wxEVT_TREE_ITEM_RIGHT_CLICK
wxEVT_COMMAND_TREE_ITEM_MIDDLE_CLICK = wxEVT_TREE_ITEM_MIDDLE_CLICK
wxEVT_COMMAND_TREE_END_DRAG = wxEVT_TREE_END_DRAG
wxEVT_COMMAND_TREE_STATE_IMAGE_CLICK = wxEVT_TREE_STATE_IMAGE_CLICK
wxEVT_COMMAND_TREE_ITEM_GETTOOLTIP = wxEVT_TREE_ITEM_GETTOOLTIP
wxEVT_COMMAND_TREE_ITEM_MENU = wxEVT_TREE_ITEM_MENU
EVT_COLOURPICKER_CHANGED = wx.PyEventBinder( wxEVT_COLOURPICKER_CHANGED, 1 )
# deprecated wxEVT alias
wxEVT_COMMAND_COLOURPICKER_CHANGED = wxEVT_COLOURPICKER_CHANGED
EVT_FILEPICKER_CHANGED = wx.PyEventBinder( wxEVT_FILEPICKER_CHANGED, 1 )
EVT_DIRPICKER_CHANGED = wx.PyEventBinder( wxEVT_DIRPICKER_CHANGED, 1 )
# deprecated wxEVT aliases
wxEVT_COMMAND_FILEPICKER_CHANGED = wxEVT_FILEPICKER_CHANGED
wxEVT_COMMAND_DIRPICKER_CHANGED = wxEVT_DIRPICKER_CHANGED
EVT_FONTPICKER_CHANGED = wx.PyEventBinder( wxEVT_FONTPICKER_CHANGED, 1 )
# deprecated wxEVT alias
wxEVT_COMMAND_FONTPICKER_CHANGED = wxEVT_FONTPICKER_CHANGED
if 'wxMac' in wx.PlatformInfo:
class ColourPickerCtrl(PickerBase):
'''
This control allows the user to select a colour. The
implementation varies by platform but is usually a button which
brings up a `wx.ColourDialog` when clicked.
Window Styles
-------------
====================== ============================================
wx.CLRP_DEFAULT Default style.
wx.CLRP_USE_TEXTCTRL Creates a text control to the left of the
picker button which is completely managed
by the `wx.ColourPickerCtrl` and which can
be used by the user to specify a colour.
The text control is automatically synchronized
with the button's value. Use functions defined in
`wx.PickerBase` to modify the text control.
wx.CLRP_SHOW_LABEL Shows the colour in HTML form (AABBCC) as the
colour button label (instead of no label at all).
====================== ============================================
Events
------
======================== ==========================================
EVT_COLOURPICKER_CHANGED The user changed the colour selected in the
control either using the button or using the
text control (see wx.CLRP_USE_TEXTCTRL; note
that in this case the event is fired only if
the user's input is valid, i.e. recognizable).
======================== ==========================================
'''
# ColourData object to be shared by all colour pickers, so they can
# share the custom colours
_colourData = None
#--------------------------------------------------
class ColourPickerButton(BitmapButton):
def __init__(self, parent, id=-1, colour=wx.BLACK,
pos=wx.DefaultPosition, size=wx.DefaultSize,
style = CLRP_DEFAULT_STYLE,
validator = wx.DefaultValidator,
name = "colourpickerwidget"):
wx.BitmapButton.__init__(self, parent, id, wx.Bitmap(1,1),
pos, size, style, validator, name)
self.SetColour(colour)
self.InvalidateBestSize()
self.SetInitialSize(size)
self.Bind(wx.EVT_BUTTON, self.OnButtonClick)
if ColourPickerCtrl._colourData is None:
ColourPickerCtrl._colourData = wx.ColourData()
ColourPickerCtrl._colourData.SetChooseFull(True)
grey = 0
for i in range(16):
c = wx.Colour(grey, grey, grey)
ColourPickerCtrl._colourData.SetCustomColour(i, c)
grey += 16
def SetColour(self, colour):
# force a copy, in case the _colorData is shared
self.colour = wx.Colour(colour)
bmp = self._makeBitmap()
self.SetBitmapLabel(bmp)
def GetColour(self):
return self.colour
def OnButtonClick(self, evt):
ColourPickerCtrl._colourData.SetColour(self.colour)
dlg = wx.ColourDialog(self, ColourPickerCtrl._colourData)
if dlg.ShowModal() == wx.ID_OK:
ColourPickerCtrl._colourData = dlg.GetColourData()
self.SetColour(ColourPickerCtrl._colourData.GetColour())
evt = wx.ColourPickerEvent(self, self.GetId(), self.GetColour())
self.GetEventHandler().ProcessEvent(evt)
def _makeBitmap(self):
width = height = 24
bg = self.GetColour()
if self.HasFlag(CLRP_SHOW_LABEL):
w, h = self.GetTextExtent(bg.GetAsString(wx.C2S_HTML_SYNTAX))
width += w
bmp = wx.Bitmap(width, height)
dc = wx.MemoryDC(bmp)
dc.SetBackground(wx.Brush(self.colour))
dc.Clear()
if self.HasFlag(CLRP_SHOW_LABEL):
from wx.lib.colourutils import BestLabelColour
fg = BestLabelColour(bg)
dc.SetTextForeground(fg)
dc.DrawText(bg.GetAsString(wx.C2S_HTML_SYNTAX),
(width - w)/2, (height - h)/2)
return bmp
#--------------------------------------------------
def __init__(self, parent, id=-1, colour=wx.BLACK,
pos=wx.DefaultPosition, size=wx.DefaultSize,
style = CLRP_DEFAULT_STYLE,
validator = wx.DefaultValidator,
name = "colourpicker"):
if type(colour) != wx.Colour:
colour = wx.Colour(colour)
wx.PickerBase.__init__(self)
self.CreateBase(parent, id, colour.GetAsString(),
pos, size, style, validator, name)
widget = ColourPickerCtrl.ColourPickerButton(
self, -1, colour, style=self.GetPickerStyle(style))
self.SetPickerCtrl(widget)
widget.Bind(wx.EVT_COLOURPICKER_CHANGED, self.OnColourChange)
self.PostCreation()
def GetColour(self):
'''Set the displayed colour.'''
return self.GetPickerCtrl().GetColour()
def SetColour(self, colour):
'''Returns the currently selected colour.'''
self.GetPickerCtrl().SetColour(colour)
self.UpdateTextCtrlFromPicker()
Colour = property(GetColour, SetColour)
def UpdatePickerFromTextCtrl(self):
col = wx.Colour(self.GetTextCtrl().GetValue())
if not col.IsOk():
return
if self.GetColour() != col:
self.GetPickerCtrl().SetColour(col)
evt = wx.ColourPickerEvent(self, self.GetId(), self.GetColour())
self.GetEventHandler().ProcessEvent(evt)
def UpdateTextCtrlFromPicker(self):
if not self.GetTextCtrl():
return
self.GetTextCtrl().SetValue(self.GetColour().GetAsString())
def GetPickerStyle(self, style):
return style & CLRP_SHOW_LABEL
def OnColourChange(self, evt):
self.UpdateTextCtrlFromPicker()
evt = wx.ColourPickerEvent(self, self.GetId(), self.GetColour())
self.GetEventHandler().ProcessEvent(evt)
EVT_FILECTRL_SELECTIONCHANGED = wx.PyEventBinder( wxEVT_FILECTRL_SELECTIONCHANGED, 1)
EVT_FILECTRL_FILEACTIVATED = wx.PyEventBinder( wxEVT_FILECTRL_FILEACTIVATED, 1)
EVT_FILECTRL_FOLDERCHANGED = wx.PyEventBinder( wxEVT_FILECTRL_FOLDERCHANGED, 1)
EVT_FILECTRL_FILTERCHANGED = wx.PyEventBinder( wxEVT_FILECTRL_FILTERCHANGED, 1)
EVT_CHOICEBOOK_PAGE_CHANGED = wx.PyEventBinder( wxEVT_CHOICEBOOK_PAGE_CHANGED, 1 )
EVT_CHOICEBOOK_PAGE_CHANGING = wx.PyEventBinder( wxEVT_CHOICEBOOK_PAGE_CHANGING, 1 )
# deprecated wxEVT aliases
wxEVT_COMMAND_CHOICEBOOK_PAGE_CHANGED = wxEVT_CHOICEBOOK_PAGE_CHANGED
wxEVT_COMMAND_CHOICEBOOK_PAGE_CHANGING = wxEVT_CHOICEBOOK_PAGE_CHANGING
EVT_LISTBOOK_PAGE_CHANGED = wx.PyEventBinder( wxEVT_LISTBOOK_PAGE_CHANGED, 1 )
EVT_LISTBOOK_PAGE_CHANGING = wx.PyEventBinder( wxEVT_LISTBOOK_PAGE_CHANGING, 1 )
# deprecated wxEVT aliases
wxEVT_COMMAND_LISTBOOK_PAGE_CHANGED = wxEVT_LISTBOOK_PAGE_CHANGED
wxEVT_COMMAND_LISTBOOK_PAGE_CHANGING = wxEVT_LISTBOOK_PAGE_CHANGING
EVT_TOOLBOOK_PAGE_CHANGED = wx.PyEventBinder( wxEVT_TOOLBOOK_PAGE_CHANGED, 1 )
EVT_TOOLBOOK_PAGE_CHANGING = wx.PyEventBinder( wxEVT_TOOLBOOK_PAGE_CHANGING, 1 )
# deprecated wxEVT aliases
wxEVT_COMMAND_TOOLBOOK_PAGE_CHANGED = wxEVT_TOOLBOOK_PAGE_CHANGED
wxEVT_COMMAND_TOOLBOOK_PAGE_CHANGING = wxEVT_TOOLBOOK_PAGE_CHANGING
EVT_TREEBOOK_PAGE_CHANGED = wx.PyEventBinder( wxEVT_TREEBOOK_PAGE_CHANGED, 1 )
EVT_TREEBOOK_PAGE_CHANGING = wx.PyEventBinder( wxEVT_TREEBOOK_PAGE_CHANGING, 1)
EVT_TREEBOOK_NODE_COLLAPSED = wx.PyEventBinder( wxEVT_TREEBOOK_NODE_COLLAPSED, 1 )
EVT_TREEBOOK_NODE_EXPANDED = wx.PyEventBinder( wxEVT_TREEBOOK_NODE_EXPANDED, 1 )
# deprecated wxEVT aliases
wxEVT_COMMAND_TREEBOOK_PAGE_CHANGED = wxEVT_TREEBOOK_PAGE_CHANGED
wxEVT_COMMAND_TREEBOOK_PAGE_CHANGING = wxEVT_TREEBOOK_PAGE_CHANGING
wxEVT_COMMAND_TREEBOOK_NODE_COLLAPSED = wxEVT_TREEBOOK_NODE_COLLAPSED
wxEVT_COMMAND_TREEBOOK_NODE_EXPANDED = wxEVT_TREEBOOK_NODE_EXPANDED
def _Dialog___enter__(self):
return self
Dialog.__enter__ = _Dialog___enter__
del _Dialog___enter__
def _Dialog___exit__(self, exc_type, exc_val, exc_tb):
self.Destroy()
Dialog.__exit__ = _Dialog___exit__
del _Dialog___exit__
EVT_DIRCTRL_SELECTIONCHANGED = wx.PyEventBinder( wxEVT_DIRCTRL_SELECTIONCHANGED, 1 )
EVT_DIRCTRL_FILEACTIVATED = wx.PyEventBinder( wxEVT_DIRCTRL_FILEACTIVATED, 1 )
EVT_FIND = wx.PyEventBinder( wxEVT_FIND, 1 )
EVT_FIND_NEXT = wx.PyEventBinder( wxEVT_FIND_NEXT, 1 )
EVT_FIND_REPLACE = wx.PyEventBinder( wxEVT_FIND_REPLACE, 1 )
EVT_FIND_REPLACE_ALL = wx.PyEventBinder( wxEVT_FIND_REPLACE_ALL, 1 )
EVT_FIND_CLOSE = wx.PyEventBinder( wxEVT_FIND_CLOSE, 1 )
# deprecated wxEVT aliases
wxEVT_COMMAND_FIND = wxEVT_FIND
wxEVT_COMMAND_FIND_NEXT = wxEVT_FIND_NEXT
wxEVT_COMMAND_FIND_REPLACE = wxEVT_FIND_REPLACE
wxEVT_COMMAND_FIND_REPLACE_ALL = wxEVT_FIND_REPLACE_ALL
wxEVT_COMMAND_FIND_CLOSE = wxEVT_FIND_CLOSE
EVT_POWER_SUSPENDING = wx.PyEventBinder( wxEVT_POWER_SUSPENDING , 1 )
EVT_POWER_SUSPENDED = wx.PyEventBinder( wxEVT_POWER_SUSPENDED , 1 )
EVT_POWER_SUSPEND_CANCEL = wx.PyEventBinder( wxEVT_POWER_SUSPEND_CANCEL , 1 )
EVT_POWER_RESUME = wx.PyEventBinder( wxEVT_POWER_RESUME , 1 )
def _WindowDisabler___enter__(self):
return self
WindowDisabler.__enter__ = _WindowDisabler___enter__
del _WindowDisabler___enter__
def _WindowDisabler___exit__(self, exc_type, exc_val, exc_tb):
pass
WindowDisabler.__exit__ = _WindowDisabler___exit__
del _WindowDisabler___exit__
def _BusyCursor___enter__(self):
return self
BusyCursor.__enter__ = _BusyCursor___enter__
del _BusyCursor___enter__
def _BusyCursor___exit__(self, exc_type, exc_val, exc_tb):
pass
BusyCursor.__exit__ = _BusyCursor___exit__
del _BusyCursor___exit__
EVT_END_PROCESS = wx.PyEventBinder( wxEVT_END_PROCESS )
PrintPreview.Ok = wx.deprecated(PrintPreview.IsOk, 'Use IsOk instead.')
PyPrintPreview = wx.deprecated(PrintPreview, 'Use PrintPreview instead.')
PyPreviewFrame = wx.deprecated(PreviewFrame, 'Use PreviewFrame instead.')
PyPreviewControlBar = wx.deprecated(PreviewControlBar, 'Use PreviewControlBar instead.')
PyPrintout = wx.deprecated(Printout, 'Use Printout instead.')
def _BusyInfo___enter__(self):
return self
BusyInfo.__enter__ = _BusyInfo___enter__
del _BusyInfo___enter__
def _BusyInfo___exit__(self, exc_type, exc_val, exc_tb):
return False
BusyInfo.__exit__ = _BusyInfo___exit__
del _BusyInfo___exit__
def _FileHistoryMenuList___repr__(self):
return "FileHistoryMenuList: " + repr(list(self))
FileHistoryMenuList.__repr__ = _FileHistoryMenuList___repr__
del _FileHistoryMenuList___repr__
def _CommandList___repr__(self):
return "CommandList: " + repr(list(self))
CommandList.__repr__ = _CommandList___repr__
del _CommandList___repr__
EVT_FSWATCHER = wx.PyEventBinder(wxEVT_FSWATCHER)
def version():
"""
Returns a string containing version and port info
"""
if wx.Port == '__WXMSW__':
port = 'msw'
elif wx.Port == '__WXMAC__':
if 'wxOSX-carbon' in wx.PlatformInfo:
port = 'osx-carbon'
else:
port = 'osx-cocoa'
elif wx.Port == '__WXGTK__':
port = 'gtk'
if 'gtk2' in wx.PlatformInfo:
port = 'gtk2'
elif 'gtk3' in wx.PlatformInfo:
port = 'gtk3'
else:
port = '???'
return "%s %s (phoenix) %s" % (wx.VERSION_STRING, port, wx.wxWidgets_version)
def CallAfter(callableObj, *args, **kw):
"""
Call the specified function after the current and pending event
handlers have been completed. This is also good for making GUI
method calls from non-GUI threads. Any extra positional or
keyword args are passed on to the callable when it is called.
:param PyObject callableObj: the callable object
:param args: arguments to be passed to the callable object
:param kw: keywords to be passed to the callable object
.. seealso::
:ref:`wx.CallLater`
"""
assert callable(callableObj), "callableObj is not callable"
app = wx.GetApp()
assert app is not None, 'No wx.App created yet'
if not hasattr(app, "_CallAfterId"):
app._CallAfterId = wx.NewEventType()
app.Connect(-1, -1, app._CallAfterId,
lambda event: event.callable(*event.args, **event.kw) )
evt = wx.PyEvent()
evt.SetEventType(app._CallAfterId)
evt.callable = callableObj
evt.args = args
evt.kw = kw
wx.PostEvent(app, evt)
class CallLater(object):
"""
A convenience class for :class:`wx.Timer`, that calls the given callable
object once after the given amount of milliseconds, passing any
positional or keyword args. The return value of the callable is
available after it has been run with the :meth:`~wx.CallLater.GetResult`
method.
If you don't need to get the return value or restart the timer
then there is no need to hold a reference to this object. CallLater
maintains references to its instances while they are running. When they
finish, the internal reference is deleted and the GC is free to collect
naturally.
.. seealso::
:func:`wx.CallAfter`
"""
__instances = {}
def __init__(self, millis, callableObj, *args, **kwargs):
"""
Constructs a new :class:`wx.CallLater` object.
:param int millis: number of milliseconds to delay until calling the callable object
:param PyObject callableObj: the callable object
:param args: arguments to be passed to the callable object
:param kw: keywords to be passed to the callable object
"""
assert callable(callableObj), "callableObj is not callable"
self.millis = millis
self.callable = callableObj
self.SetArgs(*args, **kwargs)
self.runCount = 0
self.running = False
self.hasRun = False
self.result = None
self.timer = None
self.Start()
def __del__(self):
self.Stop()
def Start(self, millis=None, *args, **kwargs):
"""
(Re)start the timer
:param int millis: number of milli seconds
:param args: arguments to be passed to the callable object
:param kw: keywords to be passed to the callable object
"""
self.hasRun = False
if millis is not None:
self.millis = millis
if args or kwargs:
self.SetArgs(*args, **kwargs)
self.Stop()
CallLater.__instances[self] = "value irrelevant" # Maintain a reference to avoid GC
self.timer = wx.PyTimer(self.Notify)
self.timer.Start(self.millis, wx.TIMER_ONE_SHOT)
self.running = True
Restart = Start
def Stop(self):
"""
Stop and destroy the timer.
"""
if self in CallLater.__instances:
del CallLater.__instances[self]
if self.timer is not None:
self.timer.Stop()
self.timer = None
def GetInterval(self):
if self.timer is not None:
return self.timer.GetInterval()
else:
return 0
def IsRunning(self):
return self.timer is not None and self.timer.IsRunning()
def SetArgs(self, *args, **kwargs):
"""
(Re)set the args passed to the callable object. This is
useful in conjunction with :meth:`Start` if
you want to schedule a new call to the same callable
object but with different parameters.
:param args: arguments to be passed to the callable object
:param kw: keywords to be passed to the callable object
"""
self.args = args
self.kwargs = kwargs
def HasRun(self):
"""
Returns whether or not the callable has run.
:rtype: bool
"""
return self.hasRun
def GetResult(self):
"""
Returns the value of the callable.
:rtype: a Python object
:return: result from callable
"""
return self.result
def Notify(self):
"""
The timer has expired so call the callable.
"""
if self.callable and getattr(self.callable, 'im_self', True):
self.runCount += 1
self.running = False
self.result = self.callable(*self.args, **self.kwargs)
self.hasRun = True
if not self.running:
# if it wasn't restarted, then cleanup
wx.CallAfter(self.Stop)
Interval = property(GetInterval)
Result = property(GetResult)
FutureCall = deprecated(CallLater, 'Use CallLater instead.')
def GetDefaultPyEncoding():
return "utf-8"
GetDefaultPyEncoding = deprecated(GetDefaultPyEncoding, msg="wxPython now always uses utf-8")
| [
"[email protected]"
] | |
a48af5ad00e06e718218840b9a84fdd2d9b2aec9 | 412b5863c3f9c0d1979f333f843341b687f851b7 | /worksheets/storage.py | ee87fb7b55e3fef13e0eab94d416a413699868d3 | [] | no_license | westernguy2/personal | aaee32e77ba2cd158a908605daa1115c68963163 | 6a4d37eebadb8a75152eb362ca45b4657bc0b663 | refs/heads/master | 2021-09-22T12:09:39.557077 | 2021-09-13T07:29:35 | 2021-09-13T07:29:35 | 146,562,461 | 0 | 0 | null | 2020-05-18T07:27:54 | 2018-08-29T07:30:30 | HTML | UTF-8 | Python | false | false | 1,120 | py | from django.core.files.storage import FileSystemStorage
from django.conf import settings
import os
class OverwriteStorage(FileSystemStorage):
def get_available_name(self, name, max_length):
"""Returns a filename that's free on the target storage system, and
available for new content to be written to.
Found at http://djangosnippets.org/snippets/976/
This file storage solves overwrite on upload problem. Another
proposed solution was to override the save method on the model
like so (from https://code.djangoproject.com/ticket/11663):
def save(self, *args, **kwargs):
try:
this = MyModelName.objects.get(id=self.id)
if this.MyImageFieldName != self.MyImageFieldName:
this.MyImageFieldName.delete()
except: pass
super(MyModelName, self).save(*args, **kwargs)
"""
# If the filename already exists, remove it as if it was a true file system
if self.exists(name):
os.remove(os.path.join(settings.MEDIA_ROOT, name))
return name | [
"[email protected]"
] | |
a687ba0d5cb12fafa14c7e22a712c8c997ff5721 | 52b287db51ac7d47c04f6d5d0180ad9754dce7f7 | /GANs/Face_Detection/DC_GAN.py | 461dd0b38faae92ef3a09231b1f22ad3179ebb46 | [] | no_license | AshishRMenon/Implementations | 3e44be6b3a0af8e468e3f515d98eb076a912ee33 | 4470fd1cbfe471bc2f994c8190b5ddec492cbf1f | refs/heads/master | 2022-01-04T21:33:11.546711 | 2019-05-26T16:25:45 | 2019-05-26T16:25:45 | 172,696,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,774 | py | #!/usr/bin/env python
# coding: utf-8
# In[ ]:
from __future__ import print_function
import argparse
import os
import random
import torch
import torch.nn as nn
import torch.nn.parallel
import torch.backends.cudnn as cudnn
import torch.optim as optim
import torch.utils.data
import torchvision.datasets as dset
import torchvision.transforms as transforms
import torchvision.utils as vutils
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.animation as animation
from IPython.display import HTML
import pathlib
from skimage import io
random.seed(999)
# Root directory for dataset
dataroot = "celeba"
# Number of workers for dataloader
workers = 2
# Batch size during training
batch_size = 128
# Spatial size of training images. All images will be resized to this
# size using a transformer.
image_size = 64
# Number of channels in the training images. For color images this is 3
nc = 3
# Size of z latent vector (i.e. size of generator input)
nz = 100
# Size of feature maps in generator
ngf = 64
# Size of feature maps in discriminator
ndf = 64
# Number of training epochs
num_epochs = 50
# Learning rate for optimizers
lr = 0.0002
# Beta1 hyperparam for Adam optimizers
beta1 = 0.5
# Number of GPUs available. Use 0 for CPU mode.
ngpu = 4
dataset = dset.ImageFolder(root=dataroot,
transform=transforms.Compose([
transforms.Resize(image_size),
transforms.CenterCrop(image_size),
transforms.ToTensor(),
transforms.Normalize((0.5, 0.5, 0.5), (0.5, 0.5, 0.5)),
]))
# Create the dataloader
dataloader = torch.utils.data.DataLoader(dataset, batch_size=batch_size,
shuffle=True, num_workers=workers)
# Decide which device we want to run on
device = torch.device("cuda:0" if (torch.cuda.is_available() and ngpu > 0) else "cpu")
# Plot some training images
real_batch = next(iter(dataloader))
plt.figure(figsize=(8,8))
plt.axis("off")
plt.title("Training Images")
plt.imshow(np.transpose(vutils.make_grid(real_batch[0].to(device)[:64], padding=2, normalize=True).cpu(),(1,2,0)))
def weights_init(m):
classname = m.__class__.__name__
if classname.find('Conv') != -1:
nn.init.normal_(m.weight.data, 0.0, 0.02)
elif classname.find('BatchNorm') != -1:
nn.init.normal_(m.weight.data, 1.0, 0.02)
nn.init.constant_(m.bias.data, 0)
g='/home/ashishmenon/labpc/GAN_KAN/DCGAN/gmodel.pth'
fg=pathlib.Path(g)
class Generator(nn.Module):
def __init__(self, ngpu):
super(Generator, self).__init__()
self.ngpu = ngpu
self.main = nn.Sequential(
nn.ConvTranspose2d( nz, ngf * 8, 4, 1, 0, bias=False),
nn.ReLU(True),
# state size. (ngf*8) x 4 x 4
nn.ConvTranspose2d(ngf * 8, ngf * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(ngf * 4),
nn.ReLU(True),
# state size. (ngf*4) x 8 x 8
nn.ConvTranspose2d( ngf * 4, ngf * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ngf * 2),
nn.ReLU(True),
# state size. (ngf*2) x 16 x 16
nn.ConvTranspose2d( ngf * 2, ngf, 4, 2, 1, bias=False),
nn.BatchNorm2d(ngf),
nn.ReLU(True),
# state size. (ngf) x 32 x 32
nn.ConvTranspose2d( ngf, nc, 4, 2, 1, bias=False),
nn.Tanh()
# state size. (nc) x 64 x 64
)
def forward(self, input):
return self.main(input)
if not(fg.exists ()):
# Create the generator
netG = Generator(ngpu).to(device)
# Handle multi-gpu if desired
if (device.type == 'cuda') and (ngpu > 1):
netG = nn.DataParallel(netG, list(range(ngpu)))
# Apply the weights_init function to randomly initialize all weights
# to mean=0, stdev=0.2.
netG.apply(weights_init)
# Print the model
if (fg.exists ()):
state_dict=torch.load(g)
print('-----------------------------------------------------------------')
print(state_dict.keys())
print('-----------------------------------------------------------------')
Generator(ngpu).load_state_dict(state_dict)
netG = Generator(ngpu).to(device)
if (device.type == 'cuda') and (ngpu > 1):
netG = nn.DataParallel(netG, list(range(ngpu)))
print(netG)
d='/home/ashishmenon/labpc/GAN_KAN/DCGAN/dmodel.pth'
fd=pathlib.Path(d)
class Discriminator(nn.Module):
def __init__(self, ngpu):
super(Discriminator, self).__init__()
self.ngpu = ngpu
self.main = nn.Sequential(
# input is (nc) x 64 x 64
nn.Conv2d(nc, ndf, 4, 2, 1, bias=False),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf) x 32 x 32
nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 2),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*2) x 16 x 16
nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 4),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*4) x 8 x 8
nn.Conv2d(ndf * 4, ndf * 8, 4, 2, 1, bias=False),
nn.BatchNorm2d(ndf * 8),
nn.LeakyReLU(0.2, inplace=True),
# state size. (ndf*8) x 4 x 4
nn.Conv2d(ndf * 8, 1, 4, 1, 0, bias=False),
nn.Sigmoid()
)
def forward(self, input):
return self.main(input)
if not (fd.exists()):
netD = Discriminator(ngpu).to(device)
if (device.type == 'cuda') and (ngpu > 1):
netD = nn.DataParallel(netD, list(range(ngpu)))
netD.apply(weights_init)
if fd.exists():
state_dict=torch.load(d)
print('-----------------------------------------------------------------')
print(state_dict.keys())
print('-----------------------------------------------------------------')
Discriminator(ngpu).load_state_dict(state_dict)
netD = Discriminator(ngpu).to(device)
if (device.type == 'cuda') and (ngpu > 1):
netD = nn.DataParallel(netD, list(range(ngpu)))
print(netD)
criterion = nn.BCELoss()
# Create batch of latent vectors that we will use to visualize
# the progression of the generator
fixed_noise = torch.randn(64, nz, 1, 1, device=device)
# Establish convention for real and fake labels during training
real_label = 1
fake_label = 0
# Setup Adam optimizers for both G and D
optimizerD = optim.Adam(netD.parameters(), lr=lr, betas=(beta1, 0.999))
optimizerG = optim.Adam(netG.parameters(), lr=lr, betas=(beta1, 0.999))
img_list = []
G_losses = []
D_losses = []
iters = 0
print("Starting Training Loop...")
# For each epoch
for epoch in range(num_epochs):
j=0
# For each batch in the dataloader
for i, data in enumerate(dataloader):
############################
# (1) Update D network: maximize log(D(x)) + log(1 - D(G(z)))
###########################
## Train with all-real batch
netD.zero_grad()
# Format batch
real_cpu = data[0].to(device)
b_size = real_cpu.size(0)
print(real_cpu.size)
label = torch.full((b_size,), real_label, device=device)
# Forward pass real batch through D
output = netD(real_cpu).view(-1)
# Calculate loss on all-real batch
errD_real = criterion(output, label)
# Calculate gradients for D in backward pass
errD_real.backward()
D_x = output.mean().item()
## Train with all-fake batch
# Generate batch of latent vectors
noise = torch.randn(b_size, nz, 1, 1, device=device)
# Generate fake image batch with G
fake = netG(noise)
print(fake.shape)
print(b_size)
label.fill_(fake_label)
# Classify all fake batch with D
output = netD(fake.detach()).view(-1)
print(output.shape)
# Calculate D's loss on the all-fake batch
errD_fake = criterion(output, label)
# Calculate the gradients for this batch
errD_fake.backward()
D_G_z1 = output.mean().item()
# Add the gradients from the all-real and all-fake batches
errD = errD_real + errD_fake
# Update D
optimizerD.step()
############################
# (2) Update G network: maximize log(D(G(z)))
###########################
noise = torch.randn(b_size, nz, 1, 1, device=device)
fake = netG(noise)
netG.zero_grad()
label.fill_(real_label) # fake labels are real for generator cost
# Since we just updated D, perform another forward pass of all-fake batch through D
output = netD(fake).view(-1)
# Calculate G's loss based on this output
errG = criterion(output, label)
# Calculate gradients for G
errG.backward()
D_G_z2 = output.mean().item()
# Update G
optimizerG.step()
with torch.no_grad():
fake = netG(fixed_noise).detach().cpu()
if i % 50 == 0:
print('[%d/%d][%d/%d]\tLoss_D: %.4f\tLoss_G: %.4f\tD(x): %.4f\tD(G(z)): %.4f / %.4f'
% (epoch, num_epochs, i, len(dataloader),
errD.item(), errG.item(), D_x, D_G_z1, D_G_z2))
G_losses.append(errG.item())
D_losses.append(errD.item())
# Check how the generator is doing by saving G's output on fixed_noise
if (errG.item()<8 and errD.item()>0.2):
j+=1
if os.path.exists('./output/epoch{}'.format(epoch)):
io.imsave(('./output/epoch{}/out{}.png').format(epoch,j),np.transpose(vutils.make_grid(fake[:64],padding=2, normalize=True),(1,2,0)))
io.imsave(('./output/epoch{}/in{}.png').format(epoch,j),np.transpose(vutils.make_grid(data[0].to(device)[:64],padding=5, normalize=True).cpu(),(1,2,0)))
else:
os.mkdir('./output/epoch{}'.format(epoch))
io.imsave(('./output/epoch{}/out{}.png').format(epoch,j),np.transpose(vutils.make_grid(fake[:64],padding=2, normalize=True),(1,2,0)))
io.imsave(('./output/epoch{}/in{}.png').format(epoch,j),np.transpose(vutils.make_grid(data[0].to(device)[:64], padding=5, normalize=True).cpu(),(1,2,0)))
iters += 1
plt.figure(figsize=(10,5))
plt.title("Generator and Discriminator Loss During Training")
plt.plot(G_losses,label="G")
plt.plot(D_losses,label="D")
plt.xlabel("iterations")
plt.ylabel("Loss")
plt.legend()
plt.savefig('Outputcurve.png')
real_batch = next(iter(dataloader))
torch.save(Generator(ngpu).state_dict(),'gmodel.pth')
torch.save(Discriminator(ngpu).state_dict(),'dmodel.pth')
| [
"[email protected]"
] | |
2a34fbe18afc155465fd02a040417bafdcb289a9 | fe67a0dda1740f82cf24cdf635466d45cdbbe511 | /Fitness.py | ea4527d7d3e09c789b3907bc940e107882a0abbe | [] | no_license | akm-sabbir/fractal_dimension_calculation | 62fa131b31ed74fb4d2336f087e7c29c12b67c70 | 4b8fac1d03f16e19cc5e9a9c506d09bfc9eaac15 | refs/heads/main | 2023-01-15T11:45:08.001551 | 2020-11-25T17:49:37 | 2020-11-25T17:49:37 | 316,011,476 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,792 | py | from collections import defaultdict
import math
import numpy as np
import scipy as sp
import random
from matplotlib import pyplot as plt
from global_configuration import global_configuration
from housdroff import cost_function
import pylab as pl
NFE = 0
from sklearn.linear_model import SGDRegressor
from sklearn.linear_model.logistic import _logistic_loss
##################################
def get_rows():
return
def iter_minibatches(X, y, y1, chunksize=10000):
# Provide chunks one by one
chunkstartmarker = 0
numtrainingpoints = len(X)
while chunkstartmarker < numtrainingpoints:
#chunkrows = range(chunkstartmarker,chunkstartmarker+chunksize)
X_chunk, y_chunk, y1_chunk = X[chunkstartmarker:chunkstartmarker+chunksize], y[chunkstartmarker: chunkstartmarker+chunksize], y1[chunkstartmarker: chunkstartmarker+chunksize]
yield X_chunk, y_chunk, y1_chunk
chunkstartmarker += chunksize
###########################################################
def random_generator(start, end):
range_ = end - start
randNum = start + random.random()*range_
return randNum
def TBO(w):
iterations = global_configuration.iterations
scales = np.logspace(0.01, 1, num=30, endpoint=False, base=2)
A1 = np.array([[0, 0], [0, 0.17]])
A2 = np.array([[0.85, 0.04], [-0.04, 0.85]])
A3 = np.array([[w[0], w[1]], [w[2], w[3]]])
A4 = np.array([[w[4], w[5]], [w[6], w[7]]])
t1 = np.array([[0], [0]])
t2 = np.array([[0], [1.6]])
t3 = np.array([[0], [1.6]])
t4 = np.array([[0],[0.44]])
p1 = 0.01
p2 = 0.85
p3 = 0.07
p4 = 0.07
x = [0]
y = [0]
y1 = [0]
#v = np.array([[0],[0]])
points = []
current = 0
for n in range(1, iterations):
k = random.randint(1,100)
#k = random_generator(0,1)
if k == 1:#if (k <= p1):
#v= np.dot(A1 , v) + t1
x.append(0)
y.append(0.16 * (y[current]))
elif k >= 2 and k <= 86:#(k < p1+p2):
#v= np.dot(A2 , v) + t2
x.append(0.85 * (x[current]) + 0.04 * (y[current]))
y.append(-0.04 * (x[current]) + 0.85 * (y[current]) + 1.6)
elif k >= 87 and k <= 93:#( k < p1 + p2 + p3):
#v= np.dot(A3 , v ) + t3
x.append(w[0]* (x[current]) - w[1] * (y[current]))
y.append(w[2] * (x[current]) + w[3] * (y[current]) + 1.6)
elif k >= 94 and k <= 100:
#v= np.dot(A4 , v) + t4
x.append(w[4] * (x[current]) + w[5] * (y[current]))
y.append(w[6] * (x[current]) + w[7] * (y[current]) + 0.44)
else:
pass
# now, go back and define your (x,y) point as elements of the vector v
#x.append(v[0][0])
#y.append(v[1][0])
points.append((x[current],y[current]))#(v[0][0],v[1][0]))
current += 1
#y1.append(np.clip(np.random.rand(), -0.05,0.05))
#plt.plot(x, y, color='green', marker='.', linestyle='dashed',linewidth=2, markersize=12)
#plt.show()
#x = np.array(x)
#y = np.array(y)
#y1 = np.array(y1)
#x = x - np.min(x)
#y = y - np.min(y)
#y1 = y1 - np.min(y1)
#I use the factor 1.1 to ensure that, after dividing, all coordinates lie strictly below 1
#scale = np.max([np.max(x),np.max(y),np.max(y1)])*1.1
#x = x*(1./scale)
#y = y*(1./scale)
#for (X,Y) in zip(x,y):
# points.append((X,Y))
#y1 = y1*(1./scale)
cost = cost_function(pl.array(points),x,y, scales)
#A = np.transpose(np.concatenate((x,y)))
#lr = global_configuration.lr
#for X, Y, Y1 in iter_minibatches(x, y, y1):
# model = lr.fit(X.reshape(-1,1 ), Y)#(np.hstack((X.reshape(-1,1 ), Y.reshape(-1,1 ))), Y1)
#print("model coefficient" + str(global_configuration.lr_model.coef_))
#m = _logistic_loss(global_configuration.lr_model.coef_, np.hstack((x.reshape(-1,1), y.reshape(-1,1 ))), y1, 1 / global_configuration.lr_model.C)
#m = _logistic_loss(global_configuration.lr_model.coef_, x.reshape(-1,1), y1, 1 / global_configuration.lr_model.C)
return cost
def Fitness(x):
return TBO(x)
#Fitness([0.20, -0.26, 0.23, 0.22, -0.15, 0.28, 0.26, 0.24])
| [
"[email protected]"
] | |
a2b6b34ff047d0ec1cdb6d18f51231f84e12a928 | f2a2f41641eb56a17009294ff100dc9b39cb774b | /current_session/python/418_redo.py | a14bf61ab00583408d47eab1fcd4075630030f9d | [] | no_license | YJL33/LeetCode | 0e837a419d11d44239d1a692140a1468f6a7d9bf | b4da922c4e8406c486760639b71e3ec50283ca43 | refs/heads/master | 2022-08-13T01:46:14.976758 | 2022-07-24T03:59:52 | 2022-07-24T04:11:32 | 52,939,733 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 840 | py | # naive approach
# fill row by row
from typing import List
class Solution:
def wordsTyping(self, A: List[str], rows: int, cols: int) -> int:
# craft the 'sentence' and count the index
sen = ' '.join(A) + ' '
L = len(sen)
i = 0
for _ in range(rows):
i += cols-1 # move to the end of row
if sen[i%L] == ' ':
i += 1
elif sen[(i+1)%L] == ' ':
i += 2
else:
while i >= 0 and sen[(i-1)%L] != ' ':
i -= 1
return i//L
print(Solution().wordsTyping(sentence = ["hello","world"], rows = 2, cols = 8))
print(Solution().wordsTyping(sentence = ["a", "bcd", "e"], rows = 3, cols = 6))
print(Solution().wordsTyping(sentence = ["i","had","apple","pie"], rows = 4, cols = 5)) | [
"[email protected]"
] | |
ab3cd60d0e056e60ea68e8afd71f7c9ef342215f | 08485eb2b7df0857fe72f33a4b0d6de6ac6ce5b4 | /toycomp/diagnostics.py | 2c919974cd49eae423b8995659fe25488e3e8739 | [] | no_license | sam-roth/toycomp | fd3321eda6903a045d844d160bd867ba9d4323a2 | 0d78e82fd71a62f4934d54080ba0b660bf70e565 | refs/heads/master | 2016-08-09T09:13:56.554620 | 2016-01-06T16:06:48 | 2016-01-06T16:06:48 | 49,043,037 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,965 | py | import collections
import enum
from toycomp import color
from toycomp.translation import *
class DiagnosticSeverity(enum.Enum):
error = 0
class Diagnostic:
def __init__(self, severity, node, message):
self.severity = DiagnosticSeverity(severity)
self.node = node
self.message = message
class DiagnosticConsumer:
def __init__(self):
self.message_count = collections.defaultdict(lambda: 0)
def handle_diagnostic(self, diag):
self.message_count[diag.severity] += 1
def finish(self):
pass
class DiagnosticPrinter(DiagnosticConsumer):
def __init__(self, stream):
super().__init__()
self.stream = stream
def handle_diagnostic(self, diag):
super().handle_diagnostic(diag)
if diag.node and diag.node.source_range:
file, line, col = diag.node.source_range.begin
pos_str = '{}:{}:{}: '.format(file.name, line + 1, col)
squiggly = diag.node.source_range.to_squiggly()
else:
pos_str = ''
squiggly = ''
header = '{}{} {}'.format(pos_str, color.color('magenta', diag.severity.name + ':'), diag.message)
print(header, file=self.stream)
if squiggly:
print(color.color('green', squiggly),
file=self.stream)
def finish(self):
super().finish()
if self.message_count[DiagnosticSeverity.error]:
count = self.message_count[DiagnosticSeverity.error]
print(ntr('{} error generated.', '{} errors generated.', count).format(count),
file=self.stream)
class DiagnosticsEngine:
def __init__(self, consumer):
self.consumer = consumer
def emit(self, diag):
self.consumer.handle_diagnostic(diag)
def error(self, node, message):
self.emit(Diagnostic(DiagnosticSeverity.error,
node,
message))
| [
"[email protected]"
] | |
724cadf242537ffb969fa1b21a1538c10854c032 | 3457c98a4adf068a6ce08e72e22d522737e0ab2d | /manage.py | 6a9914b4e10cab125b7b3f2b11ecfc40279d1571 | [] | no_license | UroojMajeed/eliteshoppy | 733352b78cf360a0715e8dc99ec415a349f7af46 | 634dfdb5e5171b9bb5c9d06e5f92994e95bad544 | refs/heads/main | 2023-06-27T03:12:46.587814 | 2021-07-28T10:13:55 | 2021-07-28T10:13:55 | 381,805,166 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 664 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'garments.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
e74bca37aac7b16ca7a36c9e2412953cf0df4973 | 5bf572a132c30ec1da24aaf1028887d7d5a72499 | /test_apps/python_app/example/flows/o1_testcases/additional_erase.py | ad44933a59ff3165447b87b0fd4e5893fd3f9779 | [
"MIT"
] | permissive | Origen-SDK/o2 | 6f2543dd618d5c9dc56eac5ffaceddd92c2078f0 | 2ffa01378bbeb164b5ec42c588fc08b9f817b13c | refs/heads/master | 2023-08-19T03:57:50.887871 | 2023-08-09T16:32:33 | 2023-08-09T16:32:33 | 223,623,642 | 2 | 0 | MIT | 2023-08-05T22:01:02 | 2019-11-23T17:00:13 | Rust | UTF-8 | Python | false | false | 242 | py | with Flow() as flow:
if flow.options.get("force"):
flow.func("erase_all", number=flow.options["number"])
else:
with flow.if_enable('additional_erase'):
flow.func("erase_all", number=flow.options["number"])
| [
"[email protected]"
] | |
adae6efeb5f3ddabe22da3a3d1f3ac6f2e2ebec1 | c3317e60f8bc802316898ed22dff8901912f561e | /app/render/node.py | dcf433f18edf530979edd6d4ed58856d96395fdc | [
"Apache-2.0"
] | permissive | lechesdarwin/pipol | c725bd6751fe60577a91baa4ca0cf4e874f7f9f6 | 2f7e2af5e3e21a7a95cd24b7a97d7a4a0a958e7c | refs/heads/master | 2022-12-10T08:28:14.902444 | 2020-03-05T05:52:56 | 2020-03-05T05:52:56 | 243,095,078 | 0 | 0 | Apache-2.0 | 2021-06-02T01:06:47 | 2020-02-25T20:24:52 | HTML | UTF-8 | Python | false | false | 1,673 | py | class Node:
def __init__(self,data):
self.left = None
self.rigth = None
self.data = data
def insert(self,data):
if data == self.data:
self.data = data
elif self.data:
if data < self.data:
if self.left is None:
self.left = Node(data)
else:
self.left.insert(data)
elif data > self.data:
if self.rigth is None:
self.rigth = Node(data)
else:
self.rigth.insert(data)
else:
self.data = data
def Print(self):
if self.left:
self.left.Print()
print(self.data)
if self.rigth:
self.rigth.Print()
def find(self,lk):
if lk < self.data:
if self.left is None:
return "{} Not Found".format(lk)
return self.left.find(lk)
elif lk > self.data:
if self.rigth is None:
return "{} Not Found".format(lk)
return self.rigth.find(lk)
else:
print(f"{lk} is Founxd")
def inOrder(root,link):
if root:
inOrder(root.left,link)
link.append(root.data)
inOrder(root.rigth,link)
def preOrder(root,link):
if root:
link.append(root.data)
preOrder(root.left,link)
preOrder(root.rigth,link)
def postOrder(root,link):
if root:
postOrder(root.left,link)
postOrder(root.rigth,link)
link.append(root.data)
tree = Node(3)
tree.insert(4)
tree.insert(2)
tree.insert(2)
tree.insert(2)
tree.insert(8)
| [
"darwabo@[email protected]"
] | darwabo@[email protected] |
fee98ca2cc9aae8d231f68c9da2c1b311c9b0aca | af734fd3b8d557f6dc0bf800be8194fb110c545c | /StartCamera.py | 83b95d7dc4b0060f2c0e71129f9f9a475dba659d | [] | no_license | ghafooretemad/VFRProject | 2f85e3796092b01acc5de46fb289440b2c6ead38 | 49cc38dc5432757731e7344bd061fb732ad93dd8 | refs/heads/master | 2021-01-19T13:24:20.199554 | 2017-09-18T08:51:38 | 2017-09-18T08:51:38 | 100,841,879 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,147 | py | import cv2
from PyQt5.QtWidgets import QWidget, QLabel, QVBoxLayout
from PyQt5.QtGui import QPixmap, QImage
from picamera import PiCamera
from picamera.array import PiRGBArray
from PyQt5 import QtCore, QtWidgets
import time
from CreateButton import *
class StartCamera(QWidget):
def __init__(self, selectedMode, empID, parent = None):
super(StartCamera, self).__init__(parent)
self.parent = parent
self.selectedMode = selectedMode
self.empID = empID
self.counter = 0
self.camera = PiCamera()
self.title = "Starting Camera"
self.layout = QVBoxLayout()
self.width = 300
self.height = 300
self.top = 20
self.left = 30
self.face_cascade = cv2.CascadeClassifier('../../../opencv-3.1.0/data/haarcascades/haarcascade_frontalface_default.xml')
self.setWindowTitle("Capture Face")
self.setupUi()
self.setLayout(self.layout)
def setupUi(self):
self.video_frame = QLabel()
self.layout.addWidget(self.video_frame)
self.setGeometry(self.top, self.left, self.width, self.height)
self.buttons = CreateButton()
self.path = "../../Icons/face.png"
if(self.selectedMode.get("normal", False) == True):
self.normalButton()
if(self.selectedMode.get("excited", False) == True):
self.excitedButton()
if(self.selectedMode.get("glasses", False) == True):
self.glassesButton()
if(self.selectedMode.get("laugh", False) == True):
self.laughButton()
self.closeButton ()
def nextFrameSlot(self):
try:
self.camera.resolution = (640, 480)
self.camera.framerate = 30
rawCapture = PiRGBArray(self.camera, size=(640, 480))
# allow the camera to warmup
time.sleep(0.1)
# capture frames from the camera
for frame in self.camera.capture_continuous(rawCapture, format="bgr", use_video_port=True):
# grab the raw NumPy array representing the image, then initialize the timestamp
self.image = frame.array
self.faces = self.face_cascade.detectMultiScale(self.image, 1.3, 5)
for (x, y, w, h) in self.faces:
self.x = x
self.y = y
self.w = w
self.h = h
cv2.rectangle(self.image, (x, y), (x+w, y+h), (255, 0, 0), 2)
img = QImage(self.image, self.image.shape[1], self.image.shape[0], QImage.Format_RGB888)
# show the frame
pixmap = QPixmap.fromImage(img)
self.video_frame.setPixmap(pixmap)
key = cv2.waitKey(1) & 0xFF
# clear the stream in preparation for the next frame
rawCapture.truncate(0)
if key == ord("q"):
break
except:
QtWidgets.QMessageBox.critical(self, "Starting Camera ", "There is some problem with starting camera!, please check the camera and try again!")
self.parent.close()
self.camera.close()
def start(self):
self.timer = QtCore.QTimer()
self.timer.timeout.connect(self.nextFrameSlot)
self.timer.start(100/30)
def captureNormalFace(self):
try:
self.title = "Normal Face"
sub_face = self.image[self.y:self.y+self.h, self.x:self.x+self.w]
sub_file_name = self.empID+ "n.jpg"
#print("face is : ", sub_face)
cv2.imwrite("Faces/"+sub_file_name, sub_face)
self.photoButton.setEnabled(False)
self.counter +=1
except:
QtWidgets.QMessageBox.warning(self, "Capture Face ", "Face not detected, please try again!")
def captureExcitedFace(self):
try:
self.title = "Excited Face"
sub_face = self.image[self.y:self.y+self.h, self.x:self.x+self.w]
sub_file_name = self.empID + "e.jpg"
#print("face is : ", sub_face)
cv2.imwrite("Faces/"+sub_file_name, sub_face)
self.excited.setEnabled(False)
self.counter +=1
except:
QtWidgets.QMessageBox.warning(self, "Capture Face ", "Face not detected, please try again!")
def captureGlassesFace(self):
try:
self.title = "Glasses Face"
sub_face = self.image[self.y:self.y+self.h, self.x:self.x+self.w]
sub_file_name = self.empID+ "g.jpg"
#print( " face is : ", sub_face)
cv2.imwrite("Faces/"+sub_file_name, sub_face)
self.glasses.setEnabled(False)
self.counter +=1
except:
QtWidgets.QMessageBox.warning(self, "Capture Face ", "Face not detected, please try again!")
def captureLaughFace(self):
try:
self.title = "Laugh Face"
sub_face = self.image[self.y:self.y+self.h, self.x:self.x+self.w]
sub_file_name = self.empID + "l.jpg"
cv2.imwrite("Faces/"+sub_file_name, sub_face)
#print( " face is : ", sub_face)
self.laugh.setEnabled(False)
self.counter +=1
except:
QtWidgets.QMessageBox.warning(self, "Capture Face ", "Face not detected, please try again!")
def normalButton(self):
self.photoButton = self.buttons.getButton("Normal Face", self.path )
self.layout.addWidget(self.photoButton)
self.photoButton.clicked.connect(self.captureNormalFace)
def excitedButton(self):
self.excited = self.buttons.getButton("Excited Face", self.path)
self.layout.addWidget(self.excited)
self.excited.clicked.connect(self.captureExcitedFace)
def glassesButton(self):
self.glasses = self.buttons.getButton("Glasses Face", self.path)
self.layout.addWidget(self.glasses)
self.glasses.clicked.connect(self.captureGlassesFace)
def laughButton(self):
self.laugh = self.buttons.getButton("Laugh Face", self.path)
self.layout.addWidget(self.laugh)
self.laugh.clicked.connect(self.captureLaughFace)
def closeButton(self):
path = "Images/close.png"
self.closeBtn = self.buttons.getButton("Close", path)
self.layout.addWidget(self.closeBtn)
self.closeBtn.clicked.connect(self.closeWindow)
def closeWindow(self):
self.parent.close()
self.cameraFlag = False
if(self.counter == len(self.selectedMode)):
QtWidgets.QMessageBox.information(self, "Employee Registration ", "Record SuccessFully Inserted!")
self.parent.close()
self.camera.close()
else:
QtWidgets.QMessageBox.warning(self, "Employee Registration ", "Record SuccessFully Inserted! But you didn't capture all faces that you select!")
self.parent.close()
self.camera.close()
| [
"[email protected]"
] | |
4fce075b86241af518bd0e8d21c543513a16494d | c6a101547c2b7f36fe83a725974a8a7f02cf176d | /data_structures/graphs/level_of_nodes.py | c453fa9562c5ab62b18e78bce74eb61658ce7534 | [
"MIT"
] | permissive | prabhupant/python-ds | 737cc35574de5c2ece0f0813cf00775324a8dbe7 | f7d6d78fedaf84b7527965bb1798b7a8da989474 | refs/heads/master | 2023-08-22T05:04:22.937675 | 2022-10-04T01:29:39 | 2022-10-04T01:29:39 | 199,366,418 | 2,325 | 704 | MIT | 2022-10-10T13:01:10 | 2019-07-29T02:48:57 | Python | UTF-8 | Python | false | false | 1,238 | py | """
Level is the distance of a node from a source node. This concept can be used to find
the distance between 2 nodes in an unweighted graph as well. A simple BFS traversal
between these 2 nodes will give the level and level will always be the shortest distance
between nodes.
"""
from collections import defaultdict
class Graph:
def __init__(self, vertices):
self.vertices = vertices
self.graph = defaultdict(list)
def add_edge(self, u, v):
self.graph[u].append(v)
self.graph[v].append(u)
def print_levels(self, s):
levels = [None] * self.vertices
levels[s] = 0
queue = []
queue.append(s)
while queue:
s = queue.pop(0)
for i in self.graph[s]:
if levels[i] == None:
levels[i] = levels[s] + 1
queue.append(i)
print('Node \t Level')
for node, level in enumerate(levels):
print(f'{node} \t {level}')
g = Graph(8)
g.add_edge(0, 1)
g.add_edge(0, 2)
g.add_edge(1, 3)
g.add_edge(1, 4)
g.add_edge(1, 5)
g.add_edge(2, 5)
g.add_edge(2, 6)
g.add_edge(6, 7)
g.print_levels(0) | [
"[email protected]"
] | |
06ad6cbe6cb952b15453ae8361cb3ba24b890d50 | c123dc08b530f98365508260b7e2eb536ab232c3 | /src/split.py | b11437a8032048de6f7912a6b298a801a9166326 | [] | no_license | rozentill/GeneChip | beba2e14cd77b2a47f95fed93ac8baa76c9babca | 00cd1f8541e36b2157653b2812d97bd80df15897 | refs/heads/master | 2020-04-06T07:07:31.406084 | 2016-09-03T16:37:35 | 2016-09-03T16:37:35 | 61,384,927 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 569 | py | #!/usr/env/bin
#-*-coding:utf8-*-
import numpy as np
import string
if __name__ == '__main__':
fy = open('../data/E-TABM-185.sdrf.txt','r')
fy.readline()
CellType = []
TotalClass = []
for i in range(0,5896):
label = fy.readline().split('\t')[1:2][0]
if label not in CellType:
CellType.append(label)
TotalClass.append([])
idx = CellType.index(label)
TotalClass[idx].append(i)
for label in CellType:
print label+":"+str(len(TotalClass[CellType.index(label)]))
print CellType
| [
"[email protected]"
] | |
4a83f2d8fbc69fd3dc0229c6326d58186b6bb5b4 | 0f0330ca4549d630e343b0d24e531d7693161017 | /budget/migrations/0099_client_last_active_date.py | 82e1db860591f2f0cad4849a2e4bd6bbc97251db | [] | no_license | EricHanLiu/sparkview | 3e98026c660b1583a7aa1f9c52f063566078df43 | 7ab6d7a089d281b45c3788116018e97a90b81b92 | refs/heads/master | 2022-11-29T02:29:03.132206 | 2020-08-10T21:55:49 | 2020-08-10T21:55:49 | 286,585,993 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 402 | py | # Generated by Django 2.1.1 on 2019-11-12 16:48
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('budget', '0098_client_region'),
]
operations = [
migrations.AddField(
model_name='client',
name='last_active_date',
field=models.DateTimeField(default=None, null=True),
),
]
| [
"[email protected]"
] | |
6ac903b5dad32cd2b6d25528192cef62106aeec5 | b489763137c86282377a5e0bc0af9abac60e1a11 | /boxer.py | 0d03896d744c8cf99a0fedbed247d46193a87a5d | [] | no_license | Yuliiaden/autotesting | 5f7abc516087ce9d3b4b3737648f042c49fe6095 | 484f217e945ff911e9692b507c29287afa2bf768 | refs/heads/master | 2020-04-22T04:30:45.364693 | 2019-02-18T12:28:26 | 2019-02-18T12:28:26 | 170,125,731 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 285 | py | class Boxer:
def __init__(self, age, weight, height):
self.age = age
self.weight = weight
self.height = height
PitWilder = Boxer(age=33, weight=78, height=180)
RobeGrant = Boxer(age=28, weight=72, height=185)
TomyHurd = Boxer(age=35, weight=69, height=182)
| [
"[email protected]"
] | |
48f83a6883adac579d2345e972b523e9d16b388f | 83af5ca5ceccd88514d4b7d8eb51c5b57b599c70 | /airflow/dags/etl.py | 1f11b5c027890843a9c2a50833705a4ff65586bf | [] | no_license | gurjarprateek/data-pipelines-with-airflow | c48057a4e3d242d4cfcc4b369256ac589f68df22 | 689b0ce2e01988fe6acd771ffacde4db488c9ec7 | refs/heads/master | 2020-06-30T21:13:35.312924 | 2019-08-07T02:00:08 | 2019-08-07T02:00:08 | 200,953,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,236 | py | from datetime import datetime, timedelta
import os
from airflow import DAG
from airflow.operators.dummy_operator import DummyOperator
from airflow.operators import (StageToRedshiftOperator, LoadFactOperator, LoadDimensionOperator, DataQualityOperator)
from helpers import SqlQueries
# AWS_KEY = os.environ.get('AWS_KEY')
# AWS_SECRET = os.environ.get('AWS_SECRET')
default_args = {
'owner': 'udacity',
'start_date': datetime(2019, 8, 3),
'depends_on_past': False,
'catchup': False,
'retries': 0,
'retry_delay': timedelta(minutes=5),
'email_on_failure': False
}
dag = DAG('udac_example_dag',
default_args=default_args,
description='Load and transform data in Redshift with Airflow',
schedule_interval='@daily'
)
start_operator = DummyOperator(task_id='Begin_execution', dag=dag)
stage_events_to_redshift = StageToRedshiftOperator(
task_id='Stage_events',
redshift_conn_id="redshift",
aws_credentials_id="aws_credentials",
table="staging_events",
s3_bucket="udacity-dend/log_data",
json="auto",
dag=dag
)
stage_songs_to_redshift = StageToRedshiftOperator(
task_id='Stage_songs',
redshift_conn_id="redshift",
aws_credentials_id="aws_credentials",
compupdate="off",
region="us-west-2",
table="staging_songs",
s3_bucket="udacity-dend/song_data",
json="auto",
dag=dag
)
load_songplays_table = LoadFactOperator(
task_id='Load_songplays_fact_table',
redshift_conn_id="redshift",
table="public.songplays",
sql_template=SqlQueries.songplay_table_insert,
dag=dag
)
load_user_dimension_table = LoadDimensionOperator(
task_id='Load_user_dim_table',
redshift_conn_id="redshift",
table="public.users",
sql_template=SqlQueries.user_table_insert,
dag=dag
)
load_song_dimension_table = LoadDimensionOperator(
task_id='Load_song_dim_table',
redshift_conn_id="redshift",
table="public.songs",
sql_template=SqlQueries.song_table_insert,
dag=dag
)
load_artist_dimension_table = LoadDimensionOperator(
task_id='Load_artist_dim_table',
redshift_conn_id="redshift",
table="public.artists",
sql_template=SqlQueries.artist_table_insert,
dag=dag
)
load_time_dimension_table = LoadDimensionOperator(
task_id='Load_time_dim_table',
redshift_conn_id="redshift",
table="public.time",
sql_template=SqlQueries.time_table_insert,
dag=dag
)
run_quality_checks = DataQualityOperator(
task_id='Run_data_quality_checks',
redshift_conn_id="redshift",
table_list = ["stage_events", "stage_songs", "public.song_plays", "public.artists", "public.users", "public.songs", "public.time"],
dag=dag
)
end_operator = DummyOperator(task_id='Stop_execution', dag=dag)
start_operator >> stage_events_to_redshift >> load_songplays_table
start_operator >> stage_songs_to_redshift >> load_songplays_table
load_songplays_table >> load_user_dimension_table >> run_quality_checks
load_songplays_table >> load_song_dimension_table >> run_quality_checks
load_songplays_table >> load_artist_dimension_table >> run_quality_checks
load_songplays_table >> load_time_dimension_table >> run_quality_checks
run_quality_checks >> end_operator | [
"[email protected]"
] | |
21514dedd2c25854fe859b7604bc56cd227a2b5a | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/7/qvy.py | 0d01828a7b63a2d82ffd13d1b87fcd3ed1a3475a | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'qVY':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
0d93a44969e9307fe1328a44e28a4f6177614a64 | c8e78af14250dccbb2f89214fb2bd00d3812bcdb | /DisasterResponse_code/HerokuTwillo/venv/lib/python2.7/site-packages/mercurial/unionrepo.py | b771b76338b8831099ff5eefb2ad1702a9d31612 | [
"MIT"
] | permissive | weiningb/DisasterResponse | 5d6d5e915e95da9fbab41ed051acca31196bac95 | 8068c0a5e2efb81b34e799434b52cd05cc90b0f1 | refs/heads/master | 2021-01-21T05:40:12.009792 | 2015-11-18T18:56:37 | 2015-11-18T18:56:37 | 46,703,733 | 1 | 0 | null | 2015-11-23T07:29:07 | 2015-11-23T07:29:07 | null | UTF-8 | Python | false | false | 8,690 | py | # unionrepo.py - repository class for viewing union of repository changesets
#
# Derived from bundlerepo.py
# Copyright 2006, 2007 Benoit Boissinot <[email protected]>
# Copyright 2013 Unity Technologies, Mads Kiilerich <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""Repository class for "in-memory pull" of one local repository to another,
allowing operations like diff and log with revsets.
"""
from node import nullid
from i18n import _
import os
import util, mdiff, cmdutil, scmutil
import localrepo, changelog, manifest, filelog, revlog
class unionrevlog(revlog.revlog):
def __init__(self, opener, indexfile, revlog2, linkmapper):
# How it works:
# To retrieve a revision, we just need to know the node id so we can
# look it up in revlog2.
#
# To differentiate a rev in the second revlog from a rev in the revlog,
# we check revision against repotiprev.
opener = scmutil.readonlyvfs(opener)
revlog.revlog.__init__(self, opener, indexfile)
self.revlog2 = revlog2
n = len(self)
self.repotiprev = n - 1
self.bundlerevs = set() # used by 'bundle()' revset expression
for rev2 in self.revlog2:
rev = self.revlog2.index[rev2]
# rev numbers - in revlog2, very different from self.rev
_start, _csize, _rsize, _base, linkrev, p1rev, p2rev, node = rev
if linkmapper is None: # link is to same revlog
assert linkrev == rev2 # we never link back
link = n
else: # rev must be mapped from repo2 cl to unified cl by linkmapper
link = linkmapper(linkrev)
if node in self.nodemap:
# this happens for the common revlog revisions
self.bundlerevs.add(self.nodemap[node])
continue
p1node = self.revlog2.node(p1rev)
p2node = self.revlog2.node(p2rev)
e = (None, None, None, None,
link, self.rev(p1node), self.rev(p2node), node)
self.index.insert(-1, e)
self.nodemap[node] = n
self.bundlerevs.add(n)
n += 1
def _chunk(self, rev):
if rev <= self.repotiprev:
return revlog.revlog._chunk(self, rev)
return self.revlog2._chunk(self.node(rev))
def revdiff(self, rev1, rev2):
"""return or calculate a delta between two revisions"""
if rev1 > self.repotiprev and rev2 > self.repotiprev:
return self.revlog2.revdiff(
self.revlog2.rev(self.node(rev1)),
self.revlog2.rev(self.node(rev2)))
elif rev1 <= self.repotiprev and rev2 <= self.repotiprev:
return self.baserevdiff(rev1, rev2)
return mdiff.textdiff(self.revision(self.node(rev1)),
self.revision(self.node(rev2)))
def revision(self, nodeorrev):
"""return an uncompressed revision of a given node or revision
number.
"""
if isinstance(nodeorrev, int):
rev = nodeorrev
node = self.node(rev)
else:
node = nodeorrev
rev = self.rev(node)
if node == nullid:
return ""
if rev > self.repotiprev:
text = self.revlog2.revision(node)
self._cache = (node, rev, text)
else:
text = self.baserevision(rev)
# already cached
return text
def baserevision(self, nodeorrev):
# Revlog subclasses may override 'revision' method to modify format of
# content retrieved from revlog. To use unionrevlog with such class one
# needs to override 'baserevision' and make more specific call here.
return revlog.revlog.revision(self, nodeorrev)
def baserevdiff(self, rev1, rev2):
# Exists for the same purpose as baserevision.
return revlog.revlog.revdiff(self, rev1, rev2)
def addrevision(self, text, transaction, link, p1=None, p2=None, d=None):
raise NotImplementedError
def addgroup(self, revs, linkmapper, transaction):
raise NotImplementedError
def strip(self, rev, minlink):
raise NotImplementedError
def checksize(self):
raise NotImplementedError
class unionchangelog(unionrevlog, changelog.changelog):
def __init__(self, opener, opener2):
changelog.changelog.__init__(self, opener)
linkmapper = None
changelog2 = changelog.changelog(opener2)
unionrevlog.__init__(self, opener, self.indexfile, changelog2,
linkmapper)
def baserevision(self, nodeorrev):
# Although changelog doesn't override 'revision' method, some extensions
# may replace this class with another that does. Same story with
# manifest and filelog classes.
return changelog.changelog.revision(self, nodeorrev)
def baserevdiff(self, rev1, rev2):
return changelog.changelog.revdiff(self, rev1, rev2)
class unionmanifest(unionrevlog, manifest.manifest):
def __init__(self, opener, opener2, linkmapper):
manifest.manifest.__init__(self, opener)
manifest2 = manifest.manifest(opener2)
unionrevlog.__init__(self, opener, self.indexfile, manifest2,
linkmapper)
def baserevision(self, nodeorrev):
return manifest.manifest.revision(self, nodeorrev)
def baserevdiff(self, rev1, rev2):
return manifest.manifest.revdiff(self, rev1, rev2)
class unionfilelog(unionrevlog, filelog.filelog):
def __init__(self, opener, path, opener2, linkmapper, repo):
filelog.filelog.__init__(self, opener, path)
filelog2 = filelog.filelog(opener2, path)
unionrevlog.__init__(self, opener, self.indexfile, filelog2,
linkmapper)
self._repo = repo
def baserevision(self, nodeorrev):
return filelog.filelog.revision(self, nodeorrev)
def baserevdiff(self, rev1, rev2):
return filelog.filelog.revdiff(self, rev1, rev2)
def _file(self, f):
self._repo.file(f)
class unionpeer(localrepo.localpeer):
def canpush(self):
return False
class unionrepository(localrepo.localrepository):
def __init__(self, ui, path, path2):
localrepo.localrepository.__init__(self, ui, path)
self.ui.setconfig('phases', 'publish', False, 'unionrepo')
self._url = 'union:%s+%s' % (util.expandpath(path),
util.expandpath(path2))
self.repo2 = localrepo.localrepository(ui, path2)
@localrepo.unfilteredpropertycache
def changelog(self):
return unionchangelog(self.svfs, self.repo2.svfs)
def _clrev(self, rev2):
"""map from repo2 changelog rev to temporary rev in self.changelog"""
node = self.repo2.changelog.node(rev2)
return self.changelog.rev(node)
@localrepo.unfilteredpropertycache
def manifest(self):
return unionmanifest(self.svfs, self.repo2.svfs,
self._clrev)
def url(self):
return self._url
def file(self, f):
return unionfilelog(self.svfs, f, self.repo2.svfs,
self._clrev, self)
def close(self):
self.repo2.close()
def cancopy(self):
return False
def peer(self):
return unionpeer(self)
def getcwd(self):
return os.getcwd() # always outside the repo
def instance(ui, path, create):
if create:
raise util.Abort(_('cannot create new union repository'))
parentpath = ui.config("bundle", "mainreporoot", "")
if not parentpath:
# try to find the correct path to the working directory repo
parentpath = cmdutil.findrepo(os.getcwd())
if parentpath is None:
parentpath = ''
if parentpath:
# Try to make the full path relative so we get a nice, short URL.
# In particular, we don't want temp dir names in test outputs.
cwd = os.getcwd()
if parentpath == cwd:
parentpath = ''
else:
cwd = os.path.join(cwd,'')
if parentpath.startswith(cwd):
parentpath = parentpath[len(cwd):]
if path.startswith('union:'):
s = path.split(":", 1)[1].split("+", 1)
if len(s) == 1:
repopath, repopath2 = parentpath, s[0]
else:
repopath, repopath2 = s
else:
repopath, repopath2 = parentpath, path
return unionrepository(ui, repopath, repopath2)
| [
"[email protected]"
] | |
2b3d0f1caa1fd34f60c2e6919a20fdcc99d3a2eb | 1fd09af918f699710e79d56477b06a69ef68b830 | /node_modules/mongoose/node_modules/mongodb/node_modules/kerberos/build/config.gypi | 8cf1d525a2fe8f7c12bc781074fcf6f2fc3bf585 | [
"Apache-2.0",
"MIT"
] | permissive | embirog/ChatComplete | 08663e0e1daf2f0d863483e36a8114582c2e5a58 | 053b2b945cbf8c33a64cc214d811e667913fc547 | refs/heads/master | 2016-08-11T08:06:39.245371 | 2015-12-27T08:11:34 | 2015-12-27T08:11:34 | 48,615,516 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,781 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"asan": 0,
"host_arch": "x64",
"icu_data_file": "icudt56l.dat",
"icu_data_in": "../../deps/icu/source/data/in/icudt56l.dat",
"icu_endianness": "l",
"icu_gyp_path": "tools/icu/icu-generic.gyp",
"icu_locales": "en,root",
"icu_path": "./deps/icu",
"icu_small": "true",
"icu_ver_major": "56",
"llvm_version": 0,
"node_byteorder": "little",
"node_install_npm": "true",
"node_prefix": "/usr/local",
"node_release_urlbase": "https://nodejs.org/download/release/",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_use_dtrace": "true",
"node_use_etw": "false",
"node_use_lttng": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"openssl_fips": "",
"openssl_no_asm": 0,
"python": "/usr/bin/python",
"target_arch": "x64",
"uv_parent_path": "/deps/uv/",
"uv_use_dtrace": "true",
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 0,
"v8_random_seed": 0,
"v8_use_snapshot": 1,
"want_separate_host_toolset": 0,
"nodedir": "/Users/edmilbirog/.node-gyp/4.2.3",
"copy_dev_lib": "true",
"standalone_static_library": 1,
"save_dev": "",
"browser": "",
"viewer": "man",
"rollback": "true",
"usage": "",
"globalignorefile": "/usr/local/etc/npmignore",
"init_author_url": "",
"shell": "/bin/bash",
"parseable": "",
"shrinkwrap": "true",
"init_license": "ISC",
"if_present": "",
"cache_max": "Infinity",
"init_author_email": "",
"sign_git_tag": "",
"cert": "",
"git_tag_version": "true",
"local_address": "",
"long": "",
"fetch_retries": "2",
"npat": "",
"registry": "https://registry.npmjs.org/",
"key": "",
"message": "%s",
"versions": "",
"globalconfig": "/usr/local/etc/npmrc",
"always_auth": "",
"spin": "true",
"cache_lock_retries": "10",
"cafile": "",
"heading": "npm",
"fetch_retry_mintimeout": "10000",
"proprietary_attribs": "true",
"access": "",
"json": "",
"description": "true",
"engine_strict": "",
"https_proxy": "",
"init_module": "/Users/edmilbirog/.npm-init.js",
"userconfig": "/Users/edmilbirog/.npmrc",
"node_version": "4.2.3",
"user": "501",
"editor": "vi",
"save": "",
"tag": "latest",
"global": "",
"optional": "true",
"bin_links": "true",
"force": "",
"searchopts": "",
"depth": "Infinity",
"rebuild_bundle": "true",
"searchsort": "name",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"ca": "",
"save_prefix": "^",
"strict_ssl": "true",
"tag_version_prefix": "v",
"dev": "",
"fetch_retry_factor": "10",
"group": "20",
"save_exact": "",
"cache_lock_stale": "60000",
"version": "",
"cache_min": "10",
"cache": "/Users/edmilbirog/.npm",
"searchexclude": "",
"color": "true",
"save_optional": "",
"user_agent": "npm/2.14.7 node/v4.2.3 darwin x64",
"ignore_scripts": "",
"cache_lock_wait": "10000",
"production": "",
"save_bundle": "",
"init_version": "1.0.0",
"umask": "0022",
"git": "git",
"init_author_name": "",
"scope": "",
"onload_script": "",
"tmp": "/var/folders/lf/kfvbx8ts639bdwq3b01wv7pw0000gn/T",
"unsafe_perm": "true",
"link": "",
"prefix": "/usr/local"
}
}
| [
"[email protected]"
] | |
536983414d09b76a9c7d0c576ab743bde1a3706f | 9fb78be7f0720e389939e050f974ecaccc8fb0cf | /modules/system/blueprint.py | 57061075e99dfe96cb9e45c5da0754307089bb62 | [] | no_license | firstPeterParker/mlRig | db20f294ae1a31eb26da2b6bc9ed2be7c316a958 | 5d59c4dcf10e8b3fda5e78b7d4c4f8e4c9c03232 | refs/heads/master | 2021-01-01T03:48:39.502217 | 2016-06-03T19:58:42 | 2016-06-03T19:58:42 | 57,443,546 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 35,536 | py | '''
This is a Blueprint base class module
'''
import os
import maya.cmds as cmds
import system.utils as utils
reload(utils)
import system.group_selected as group_selected
reload(group_selected)
class Blueprint():
def __init__(self,module_name,user_specified_name,joint_info,hook_obj_in):
self.module_name = module_name
self.user_specified_name = user_specified_name
self.module_namespace = self.module_name+"__"+self.user_specified_name
self.container_name = self.module_namespace+":module_container"
self.joint_info = joint_info
self.hook_obj = None
if hook_obj_in != None:
partition_info = hook_obj_in.rpartition("_translation_control")
if partition_info[1] != ""and partition_info[2] == "":
self.hook_obj = hook_obj_in
self.can_be_mirrored = True
self.mirrored = False
# Methods intended for overriding by derived classes
def install_custom(self,joints):
print "install_custom() method is not implemented by derived class"
def lock_phase1(self):
return None
def ui_custom(self):
temp = 1
def mirror_custom(self, original_module):
print "mirror_custom() method is not implemented by derived class"
# Baseclass Methods
def install(self):
cmds.namespace(setNamespace = ":")
cmds.namespace(add=self.module_namespace)
self.joints_grp = cmds.group(empty=True, name=self.module_namespace+":joints_grp")
self.hierarchy_grp = cmds.group(empty=True, name=self.module_namespace+":hierarchy_grp")
self.ori_ctrl_grp = cmds.group(empty=True, name=self.module_namespace+":orientationControls_grp")
self.module_grp = cmds.group([self.joints_grp, self.hierarchy_grp, self.ori_ctrl_grp], name=self.module_namespace+":module_grp")
cmds.container(name=self.container_name, addNode=self.module_grp, ihb=True)
cmds.select(clear=True)
index = 0
joints = []
for joint in self.joint_info:
joint_name = joint[0]
joint_pos = joint[1]
parent_joint = ""
if index > 0:
parent_joint = self.module_namespace+":"+self.joint_info[index-1][0]
cmds.select(parent_joint, replace=True)
joint_name_full = cmds.joint(n=self.module_namespace+":"+joint_name, p=joint_pos)
joints.append(joint_name_full)
cmds.setAttr(joint_name_full+".visibility", 0)
utils.add_node_to_container(self.container_name, joint_name_full)
cmds.container(
self.container_name,
edit=True,
publishAndBind=[joint_name_full+".rotate", joint_name+"_R"]
)
cmds.container(
self.container_name,
edit=True,
publishAndBind=[joint_name_full+".rotateOrder", joint_name+"_rotateOrder"]
)
if index > 0:
cmds.joint(parent_joint, edit=True, orientJoint="xyz", sao="yup")
index += 1
if self.mirrored:
mirror_xy = False
mirror_yz = False
mirror_xz = False
if self.mirror_plane == "XY":
mirror_xy = True
elif self.mirror_plane == "YZ":
mirror_yz = True
elif self.mirror_plane == "XZ":
mirror_xz = True
mirror_behavior = False
if self.rotation_function == "behavior":
mirror_behavior = True
mirror_nodes = cmds.mirrorJoint(
joints[0],
mirrorXY=mirror_xy,
mirrorYZ=mirror_yz,
mirrorXZ=mirror_xz,
mirrorBehavior=mirror_behavior
)
cmds.delete(joints)
mirrored_joints = []
for node in mirror_nodes:
if cmds.objectType(node, isType="joint"):
mirrored_joints.append(node)
else:
cmds.delete(node)
index = 0
for joint in mirrored_joints:
joint_name = self.joint_info[index][0]
new_joint_name = cmds.rename(joint, self.module_namespace+":"+joint_name)
self.joint_info[index][1] = cmds.xform(new_joint_name, query=True, worldSpace=True, translation=True)
index += 1
cmds.parent(joints[0], self.joints_grp, absolute=True)
self.init_module_trans(self.joint_info[0][1])
trans_ctrl = []
for joint in joints:
trans_ctrl.append(self.create_trans_ctrl_at_joint(joint))
root_joint_point_con = cmds.pointConstraint(
trans_ctrl[0],
joints[0],
maintainOffset=False,
name=joints[0]+"_pointConstraint"
)
utils.add_node_to_container(self.container_name, root_joint_point_con)
self.initialize_hook(trans_ctrl[0])
# Set up stretchy joint segment
for index in range(len(joints)-1):
self.setup_stretchy_jnt_segment(joints[index], joints[index+1])
self.install_custom(joints)
utils.force_scene_update()
cmds.lockNode(self.container_name, lock=True, lockUnpublished=True)
def create_trans_ctrl_at_joint(self,joint):
pos_ctrl_file = os.environ["mlrig_tool"]+"/controlobjects/blueprint/translation_control.ma"
cmds.file(pos_ctrl_file, i=True)
container = cmds.rename("translation_control_container", joint+"_translation_control_container")
utils.add_node_to_container(self.container_name, container)
for node in cmds.container(container, q=True, nodeList=True):
cmds.rename(node, joint+"_"+node, ignoreShape=True)
control = joint+"_translation_control"
cmds.parent(control, self.module_trans, absolute=True)
joint_pos = cmds.xform(joint, q=True, worldSpace=True, translation=True)
cmds.xform(control, worldSpace=True, absolute=True, translation=joint_pos)
nice_name = utils.strip_leading_namespace(joint)[1]
attr_name = nice_name+"_T"
cmds.container(container, edit=True, publishAndBind=[control+".translate", attr_name])
cmds.container(self.container_name, edit=True, publishAndBind=[container+"."+attr_name, attr_name])
return control
def get_trans_ctrl(self,joint_name):
return joint_name+"_translation_control"
def setup_stretchy_jnt_segment(self,parent_joint,child_joint):
parent_trans_control = self.get_trans_ctrl(parent_joint)
child_trans_control = self.get_trans_ctrl(child_joint)
pole_vector_loc = cmds.spaceLocator(n=parent_trans_control+"_poleVectorLocator")[0]
pole_vector_loc_grp = cmds.group(pole_vector_loc, n=pole_vector_loc+"_parentConstraint_grp")
cmds.parent(pole_vector_loc_grp, self.module_grp, absolute=True)
parent_con = cmds.parentConstraint(parent_trans_control, pole_vector_loc_grp, maintainOffset=False)[0]
cmds.setAttr(pole_vector_loc+".visibility", 0)
cmds.setAttr(pole_vector_loc+".ty", -0.5)
ik_nodes = utils.basic_stretchy_ik(
parent_joint, child_joint,
container=self.container_name,
lock_min_len=False,
pole_vector_obj=pole_vector_loc,
scale_correct_atrr=None
)
ik_handle = ik_nodes["ik_handle"]
root_loc = ik_nodes["root_loc"]
end_loc = ik_nodes["end_loc"]
# Mirroring Method
if self.mirrored:
if self.mirror_plane == "XZ":
cmds.setAttr(ik_handle+".twist", 90)
child_point_con = cmds.pointConstraint(child_trans_control, end_loc, maintainOffset=False, n=end_loc+"_pointConstraint")[0]
utils.add_node_to_container(self.container_name, [pole_vector_loc_grp, parent_con, child_point_con], ihb=True)
for node in [ik_handle, root_loc, end_loc]:
cmds.parent(node, self.joints_grp, absolute=True)
cmds.setAttr(node+".visibility", 0)
self.create_hierarchy(parent_joint, child_joint)
def create_hierarchy(self,parent_joint,child_joint):
nodes = self.create_stretchy_obj(
"/controlobjects/blueprint/hierarchy_representation.ma",
"hierarchy_representation_container",
"hierarchy_representation",
parent_joint,
child_joint
)
con_grp = nodes[2]
cmds.parent(con_grp, self.hierarchy_grp, relative=True)
def create_stretchy_obj(self,obj_relative_filepath,obj_container_name,obj_name,parent_joint,child_joint):
obj_file = os.environ["mlrig_tool"]+obj_relative_filepath
cmds.file(obj_file, i=True)
obj_container = cmds.rename(obj_container_name, parent_joint+"_"+obj_container_name)
for node in cmds.container(obj_container, q=True, nodeList=True):
cmds.rename(node, parent_joint+"_"+node, ignoreShape=True)
obj = parent_joint+"_"+obj_name
con_grp = cmds.group(empty=True, name=obj+"_parentConstraint_grp")
cmds.parent(obj, con_grp, absolute=True)
parent_con = cmds.parentConstraint(parent_joint, con_grp, maintainOffset=False)[0]
cmds.connectAttr(child_joint+".translateX", con_grp+".scaleX")
scale_con = cmds.scaleConstraint(self.module_trans, con_grp, skip=["x"], maintainOffset=0)[0]
utils.add_node_to_container(obj_container, [con_grp, parent_con, scale_con], ihb=True)
utils.add_node_to_container(self.container_name, obj_container)
return(obj_container, obj, con_grp)
def init_module_trans(self,root_pos):
ctrl_grp_file = os.environ["mlrig_tool"]+"/controlobjects/blueprint/controlGroup_control.ma"
cmds.file(ctrl_grp_file, i=True)
self.module_trans = cmds.rename("controlGroup_control", self.module_namespace+":module_transform")
cmds.xform(self.module_trans, worldSpace=True, absolute=True, translation=root_pos)
# mirroring method
if self.mirrored:
duplicate_transform = cmds.duplicate(self.original_module+":module_transform", parentOnly=True, name="TEMP_TRANSFORM")[0]
empty_group = cmds.group(empty=True)
cmds.parent(duplicate_transform, empty_group, absolute=True)
scale_attr = ".scaleX"
if self.mirror_plane == "XZ":
scale_attr = ".scaleY"
elif self.mirror_plane == "XY":
scale_attr = ".scaleZ"
cmds.setAttr(empty_group+scale_attr, -1)
parent_constraint = cmds.parentConstraint(duplicate_transform, self.module_trans, maintainOffset=False)
cmds.delete(parent_constraint)
cmds.delete(empty_group)
temp_locator = cmds.spaceLocator()[0]
scale_constraint = cmds.scaleConstraint(self.original_module+":module_transform", temp_locator, maintainOffset=False)[0]
scale = cmds.getAttr(temp_locator+".scaleX")
cmds.delete([temp_locator, scale_constraint])
cmds.xform(self.module_trans, objectSpace=True, scale=[scale, scale, scale])
utils.add_node_to_container(self.container_name, self.module_trans, ihb=True)
# Setup global scaling
cmds.connectAttr(self.module_trans+".scaleY", self.module_trans+".scaleX")
cmds.connectAttr(self.module_trans+".scaleY", self.module_trans+".scaleZ")
cmds.aliasAttr("globalScale", self.module_trans+".scaleY")
cmds.container(self.container_name, edit=True, publishAndBind=[self.module_trans+".translate", "moduleTransform_T"])
cmds.container(self.container_name, edit=True, publishAndBind=[self.module_trans+".rotate", "moduleTransform_R"])
cmds.container(self.container_name, edit=True, publishAndBind=[self.module_trans+".globalScale", "moduleTransform_globalScale"])
def delete_hierarchy(self,parent_joint):
hierarchy_container = parent_joint+"_hierarchy_representation_container"
cmds.delete(hierarchy_container)
def create_ori_ctrl(self,parent_joint,child_joint):
self.delete_hierarchy(parent_joint)
nodes = self.create_stretchy_obj(
"/controlobjects/blueprint/orientation_control.ma",
"orientation_control_container",
"orientation_control",
parent_joint,
child_joint
)
ori_container = nodes[0]
ori_ctrl = nodes[1]
con_grp = nodes[2]
cmds.parent(con_grp, self.ori_ctrl_grp, relative=True)
parent_joint_without_namespace = utils.strip_all_namespaces(parent_joint)[1]
attr_name = parent_joint_without_namespace+"_orientation"
cmds.container(ori_container, edit=True, publishAndBind=[ori_ctrl+".rotateX", attr_name])
cmds.container(self.container_name, edit=True, publishAndBind=[ori_container+"."+attr_name, attr_name])
return ori_ctrl
def get_joints(self):
joint_basename = self.module_namespace+":"
joints = []
for joint_inf in self.joint_info:
joints.append(joint_basename+joint_inf[0])
return joints
def get_ori_ctrl(self,joint_name):
return joint_name+"_orientation_control"
def ori_ctrl_joint_get_ori(self,joint,clean_parent):
new_clean_parent = cmds.duplicate(joint, parentOnly=True)[0]
if not clean_parent in cmds.listRelatives(new_clean_parent, parent=True):
cmds.parent(new_clean_parent, clean_parent, absolute=True)
cmds.makeIdentity(
new_clean_parent,
apply=True,
rotate=True,
scale=False,
translate=False
)
ori_ctrl = self.get_ori_ctrl(joint)
cmds.setAttr(new_clean_parent+".rotateX", cmds.getAttr(ori_ctrl+".rotateX"))
cmds.makeIdentity(
new_clean_parent,
apply=True,
rotate=True,
scale=False,
translate=False
)
orient_x = cmds.getAttr(new_clean_parent+".jointOrientX")
orient_y = cmds.getAttr(new_clean_parent+".jointOrientY")
orient_z = cmds.getAttr(new_clean_parent+".jointOrientZ")
ori_values = (orient_x, orient_y, orient_z)
return (ori_values, new_clean_parent)
def lock_phase2(self, module_info):
joint_pos = module_info[0]
num_joints = len(joint_pos)
joint_ories = module_info[1]
ori_with_axis = False
pure_ori = False
if joint_ories[0] == None:
ori_with_axis = True
joint_ories = joint_ories[1]
else:
pure_ori = True
joint_ories = joint_ories[0]
num_ori = len(joint_ories)
joint_rotation_orders = module_info[2]
num_rotation_orders = len(joint_rotation_orders)
joint_pref_angle = module_info[3]
num_pref_angle = 0
if joint_pref_angle != None:
num_pref_angle = len(joint_pref_angle)
hook_obj = module_info[4]
root_trans = module_info[5]
# delete our blueprint controls
cmds.lockNode(self.container_name, lock=False, lockUnpublished=False)
cmds.delete(self.container_name)
cmds.namespace(setNamespace=":")
joint_radius = 1
if num_joints == 1:
joint_radius = 1.5
new_joints = []
for i in range(num_joints):
new_joint = ""
cmds.select(clear=True)
if ori_with_axis:
new_joint = cmds.joint(
n=self.module_namespace+":blueprint_"+self.joint_info[i][0],
p=joint_pos[i],
rotationOrder="xyz",
radius=joint_radius
)
if i != 0:
cmds.parent(new_joint, new_joints[i-1], absolute=True)
offset_index = i - 1
if offset_index < num_ori:
print joint_ories[offset_index][0]
cmds.joint(
new_joints[offset_index],
edit=True,
oj=joint_ories[offset_index][0],
sao=joint_ories[offset_index][1]
)
cmds.makeIdentity(new_joint, rotate=True, apply=True)
else:
if i != 0:
cmds.select(new_joints[i-1])
joint_ori = [0.0, 0.0, 0.0]
if i < num_ori:
joint_ori = [joint_ories[i][0], joint_ories[i][1], joint_ories[i][2]]
new_joint = cmds.joint(
n=self.module_namespace+":blueprint_"+self.joint_info[i][0],
p=joint_pos[i],
orientation=joint_ori,
rotationOrder="xyz",
radius=joint_radius
)
new_joints.append(new_joint)
if i < num_rotation_orders:
cmds.setAttr(new_joint+".rotateOrder", int(joint_rotation_orders[i]))
if i < num_pref_angle:
cmds.setAttr(new_joint+".preferredAngleX", joint_rotation_orders[i][0])
cmds.setAttr(new_joint+".preferredAngleY", joint_rotation_orders[i][1])
cmds.setAttr(new_joint+".preferredAngleZ", joint_rotation_orders[i][2])
cmds.setAttr(new_joint+".segmentScaleCompensate", 0)
blueprint_grp = cmds.group(empty=True, name=self.module_namespace+":blueprint_joints_grp")
cmds.parent(new_joints[0], blueprint_grp, absolute=True)
creation_pose_grp_nodes = cmds.duplicate(
blueprint_grp,
name=self.module_namespace+":creationPose_joints_grp",
renameChildren=True
)
creation_pose_grp = creation_pose_grp_nodes[0]
creation_pose_grp_nodes.pop(0)
i = 0
for node in creation_pose_grp_nodes:
rename_node = cmds.rename(node, self.module_namespace+":creationPose_"+self.joint_info[i][0])
cmds.setAttr(rename_node+".visibility", 0)
i +=1
cmds.select(blueprint_grp, replace=True)
cmds.addAttr(at="bool", defaultValue=0, ln="controlModuleInstalled", k=False)
hook_grp = cmds.group(empty=True, name=self.module_namespace+":HOOK_IN")
for obj in [blueprint_grp, creation_pose_grp]:
cmds.parent(obj, hook_grp, absolute=True)
settings_locator = cmds.spaceLocator(n=self.module_namespace+":SETTINGS")[0]
cmds.setAttr(settings_locator+".visibility", 0)
cmds.select(settings_locator, replace=True)
cmds.addAttr(at="enum", ln="activeModule", en="None:", k=False)
cmds.addAttr(at="float", ln="creationPoseWeight", defaultValue=1, k=False)
i = 0
utility_nodes = []
for joint in new_joints:
if i < (num_joints-1) or num_joints == 1:
add_node = cmds.shadingNode("plusMinusAverage", n=joint+"_addRotations", asUtility=True)
cmds.connectAttr(add_node+".output3D", joint+".rotate", force=True)
utility_nodes.append(add_node)
dummy_rotations_multiply = cmds.shadingNode(
"multiplyDivide",
n=joint+"_dummyRotationsMultiply",
asUtility=True
)
cmds.connectAttr(dummy_rotations_multiply+".output", add_node+".input3D[0]", force=True)
utility_nodes.append(dummy_rotations_multiply)
if i > 0:
original_tx = cmds.getAttr(joint+".tx")
add_tx_node = cmds.shadingNode("plusMinusAverage", n=joint+"_addTx", asUtility=True)
cmds.connectAttr(add_tx_node+".output1D", joint+".translateX", force=True)
utility_nodes.append(add_tx_node)
original_tx_multiply = cmds.shadingNode("multiplyDivide", n=joint+"_original_tx", asUtility=True)
cmds.setAttr(original_tx_multiply+".input1X", original_tx, lock=True)
cmds.connectAttr(settings_locator+".creationPoseWeight", original_tx_multiply+".input2X", force=True)
cmds.connectAttr(original_tx_multiply+".outputX", add_tx_node+".input1D[0]", force=True)
utility_nodes.append(original_tx_multiply)
else:
if root_trans:
original_translates = cmds.getAttr(joint+".translate")[0]
add_translate_node = cmds.shadingNode("plusMinusAverage", n=joint+"_addTranslate", asUtility=True)
cmds.connectAttr(add_translate_node+".output3D", joint+".translate", force=True)
utility_nodes.append(add_translate_node)
original_translate_multiply = cmds.shadingNode("multiplyDivide", n=joint+"_original_Translate", asUtility=True)
cmds.setAttr(
original_translate_multiply+".input1",
original_translates[0],
original_translates[1],
original_translates[2],
type="double3"
)
for attr in ["X", "Y", "Z"]:
cmds.connectAttr(settings_locator+".creationPoseWeight", original_translate_multiply+".input2"+attr)
cmds.connectAttr(original_translate_multiply+".output", add_translate_node+".input3D[0]", force=True)
utility_nodes.append(original_translate_multiply)
#Scale
original_scale = cmds.getAttr(joint+".scale")[0]
add_scale_node = cmds.shadingNode("plusMinusAverage", n=joint+"_addScale", asUtility=True)
cmds.connectAttr(add_scale_node+".output3D", joint+".scale", force=True)
utility_nodes.append(add_scale_node)
original_scale_multiply = cmds.shadingNode("multiplyDivide", n=joint+"_original_scale", asUtility=True)
cmds.setAttr(
original_scale_multiply+".input1",
original_scale[0],
original_scale[1],
original_scale[2],
type="double3"
)
for attr in ["X", "Y", "Z"]:
cmds.connectAttr(settings_locator+".creationPoseWeight", original_scale_multiply+".input2"+attr)
cmds.connectAttr(original_scale_multiply+".output", add_scale_node+".input3D[0]", force=True)
utility_nodes.append(original_scale_multiply)
i += 1
blueprint_nodes = utility_nodes
blueprint_nodes.append(blueprint_grp)
blueprint_nodes.append(creation_pose_grp)
blueprint_container = cmds.container(n=self.module_namespace+":blueprint_container")
utils.add_node_to_container(blueprint_container, blueprint_nodes, ihb=True)
module_grp = cmds.group(empty=True, name=self.module_namespace+":module_grp")
for obj in [hook_grp, settings_locator]:
cmds.parent(obj, module_grp, absolute=True)
module_container = cmds.container(n=self.module_namespace+":module_container")
utils.add_node_to_container(
module_container,
[module_grp, settings_locator, blueprint_container, hook_grp],
includeShapes=True
)
cmds.container(
module_container,
edit=True,
publishAndBind=[settings_locator+".activeModule", "activeModule"]
)
cmds.container(
module_container,
edit=True,
publishAndBind=[settings_locator+".creationPoseWeight", "creationPoseWeight"]
)
cmds.select(module_grp)
cmds.addAttr(at="float", longName="hierarchicalScale")
cmds.connectAttr(hook_grp+".scaleY", module_grp+".hierarchicalScale")
def ui(self, blueprint_ui_instance, parent_column_layout):
self.blueprint_ui_instance = blueprint_ui_instance
self.parent_column_layout = parent_column_layout
self.ui_custom()
def create_rotation_order_ui_control(self, joint):
joint_name = utils.strip_all_namespaces(joint)[1]
attr_control_group = cmds.attrControlGrp(attribute=joint+".rotateOrder", label=joint_name)
def delete(self):
cmds.lockNode(self.container_name, lock=False, lockUnpublished=False)
valid_module_info = utils.find_all_module_names("/modules/blueprint")
valid_modules = valid_module_info[0]
valid_module_names = valid_module_info[1]
hooked_modules = set()
for joint_info in self.joint_info:
joint = joint_info[0]
trans_control = self.get_trans_ctrl(self.module_namespace+":"+joint)
connections = cmds.listConnections(trans_control)
for connection in connections:
module_instance = utils.strip_leading_namespace(connection)
if module_instance != None:
split_string = module_instance[0].partition("__")
if module_instance[0] != self.module_namespace and split_string[0] in valid_module_names:
index = valid_module_names.index(split_string[0])
hooked_modules.add( (valid_modules[index], split_string[2]) )
for module in hooked_modules:
mod = __import__("blueprint."+module[0], {}, {}, [module[0]])
module_class = getattr(mod, mod.CLASS_NAME)
module_inst = module_class(module[1], None)
module_inst.rehook(None)
module_trans = self.module_namespace+":module_transform"
module_trans_parent = cmds.listRelatives(module_trans, parent=True)
cmds.delete(self.container_name)
cmds.namespace(setNamespace=":")
cmds.namespace(removeNamespace=self.module_namespace)
if module_trans_parent != None:
parent_group = module_trans_parent[0]
children = cmds.listRelatives(parent_group, children=True)
children = cmds.ls(children, transforms=True)
if len(children) == 0:
cmds.select(parent_group, replace=True)
group_selected.UngroupSelected()
def rename_module_instance(self, new_name):
if new_name == self.user_specified_name:
return True
if utils.does_blueprint_user_specified_name_exist(new_name):
cmds.confirmDialog(
title="Name Confilct",
message="Name \""+new_name+"\" already exists, aborting rename",
button=["Accept"],
defaultButton="Accept"
)
return False
else:
new_namespace = self.module_name+"__"+new_name
cmds.lockNode(self.container_name, lock=False, lockUnpublished=False)
cmds.namespace(setNamespace=":")
cmds.namespace(add=new_namespace)
cmds.namespace(setNamespace=":")
cmds.namespace(moveNamespace=[self.module_namespace, new_namespace])
cmds.namespace(removeNamespace=self.module_namespace)
self.module_namespace = new_namespace
self.container_name = self.module_namespace+"::module_container"
cmds.lockNode(self.container_name, lock=True, lockUnpublished=True)
return True
def initialize_hook(self, root_trans_control):
unhooked_locator = cmds.spaceLocator(name=self.module_namespace+":unhookedTarget")[0]
cmds.pointConstraint(root_trans_control, unhooked_locator, offset=[0, 0.001, 0])
cmds.setAttr(unhooked_locator+".visibility", 0)
if self.hook_obj == None:
self.hook_obj = unhooked_locator
root_pos = cmds.xform(root_trans_control, q=True, worldSpace=True, translation=True)
target_pos = cmds.xform(self.hook_obj, q=True, worldSpace=True, translation=True)
cmds.select(clear=True)
root_joint_without_namespace = "hook_root_joint"
root_joint = cmds.joint(name=self.module_namespace+":"+root_joint_without_namespace, p=root_pos)
cmds.setAttr(root_joint+".visibility", 0)
target_joint_without_namespace = "hook_target_joint"
target_joint = cmds.joint(name=self.module_namespace+":"+target_joint_without_namespace, p=target_pos)
cmds.setAttr(target_joint+".visibility", 0)
cmds.joint(root_joint, edit=True, orientJoint="xyz", sao="yup")
# Container method for the objects being created in the hook phase
hook_grp = cmds.group([root_joint, unhooked_locator], name=self.module_namespace+":hook_grp", parent=self.module_grp)
hook_container = cmds.container(name=self.module_namespace+":hook_container")
utils.add_node_to_container(hook_container, hook_grp, ihb=True)
utils.add_node_to_container(self.container_name, hook_container)
for joint in [root_joint, target_joint]:
joint_name = utils.strip_all_namespaces(joint)[1]
cmds.container(hook_container, edit=True, publishAndBind=[joint+".rotate", joint_name+"_R"])
# Ik functionality for the hook joints
ik_nodes = utils.basic_stretchy_ik(root_joint, target_joint, hook_container, lock_min_len=False)
ik_handle = ik_nodes["ik_handle"]
root_loc = ik_nodes["root_loc"]
end_loc = ik_nodes["end_loc"]
pole_vector_loc = ik_nodes["pole_vector_obj"]
root_point_con = cmds.pointConstraint(
root_trans_control,
root_joint,
maintainOffset=False,
name=root_joint+"_pointConstraint"
)[0]
target_point_con = cmds.pointConstraint(
self.hook_obj, end_loc,
maintainOffset=False,
name=self.module_namespace+":hook_pointConstraint"
)[0]
utils.add_node_to_container(hook_container, [root_point_con, target_point_con])
for node in [ik_handle, root_loc, end_loc, pole_vector_loc]:
cmds.parent(node, hook_grp, absolute=True)
cmds.setAttr(node+".visibility", 0)
object_nodes = self.create_stretchy_obj(
"/controlobjects/blueprint/hook_representation.ma",
"hook_representation_container",
"hook_representation",
root_joint,
target_joint
)
constrained_grp = object_nodes[2]
cmds.parent(constrained_grp, hook_grp, absolute=True)
hook_representation_container = object_nodes[0]
cmds.container(self.container_name, edit=True, removeNode=hook_representation_container)
utils.add_node_to_container(hook_container, hook_representation_container)
def rehook(self, new_hook_obj):
old_hook_obj = self.find_hook_obj()
self.hook_obj =self.module_namespace+":unhookedTarget"
if new_hook_obj != None:
if new_hook_obj.find("_translation_control") != -1:
split_string = new_hook_obj.split("_translation_control")
if split_string[1] == "":
if utils.strip_leading_namespace(new_hook_obj)[0] != self.module_namespace:
self.hook_obj = new_hook_obj
if self.hook_obj == old_hook_obj:
return
self.unconstrain_root_from_hook()
cmds.lockNode(self.container_name, lock=False, lockUnpublished=False)
hook_constraint = self.module_namespace+":hook_pointConstraint"
cmds.connectAttr(self.hook_obj+".parentMatrix[0]", hook_constraint+".target[0].targetParentMatrix", force=True)
cmds.connectAttr(self.hook_obj+".translate", hook_constraint+".target[0].targetTranslate", force=True)
cmds.connectAttr(self.hook_obj+".rotatePivot", hook_constraint+".target[0].targetRotatePivot", force=True)
cmds.connectAttr(self.hook_obj+".rotatePivotTranslate", hook_constraint+".target[0].targetRotateTranslate", force=True)
cmds.lockNode(self.container_name, lock=True, lockUnpublished=True)
def find_hook_obj(self):
hook_constraint = self.module_namespace+":hook_pointConstraint"
source_attr = cmds.connectionInfo(hook_constraint+".target[0].targetParentMatrix", sourceFromDestination=True)
source_node = str(source_attr).rpartition(".")[0]
return source_node
def find_hook_obj_for_lock(self):
hook_obj = self.find_hook_obj()
if hook_obj == self.module_namespace+":unhookedTarget":
hook_obj = None
else:
self.rehook(None)
return hook_obj
def lock_phase3(self, hook_obj):
module_container = self.module_namespace+":module_container"
if hook_obj != None:
hook_obj_module_node = utils.strip_leading_namespace(hook_obj)
hook_obj_module = hook_obj_module_node[0]
hook_obj_joint = hook_obj_module_node[1].split("_translation_control")[0]
hook_obj = hook_obj_module+":blueprint_"+hook_obj_joint
parent_con = cmds.parentConstraint(
hook_obj,
self.module_namespace+":HOOK_IN",
maintainOffset=True,
name=self.module_namespace+":hook_parent_constraint"
)[0]
scale_con = cmds.scaleConstraint(
hook_obj,
self.module_namespace+":HOOK_IN",
maintainOffset=True,
name=self.module_namespace+":hook_scale_constraint"
)[0]
module_container = self.module_namespace+":module_container"
utils.add_node_to_container(module_container, [parent_con, scale_con])
cmds.lockNode(module_container, lock=True, lockUnpublished=True)
def snap_root_to_hook(self):
root_control = self.get_trans_ctrl(self.module_namespace+":"+self.joint_info[0][0])
hook_obj = self.find_hook_obj()
if hook_obj == self.module_namespace+":unhookedTarget":
return
hook_obj_pose = cmds.xform(hook_obj, q=True, worldSpace=True, translation=True)
cmds.xform(root_control, worldSpace=True, absolute=True, translation=hook_obj_pose)
def constrain_root_to_hook(self):
root_control = self.get_trans_ctrl(self.module_namespace+":"+self.joint_info[0][0])
hook_obj = self.find_hook_obj()
if hook_obj == self.module_namespace+":unhookedTarget":
return
cmds.lockNode(self.container_name, lock=False, lockUnpublished=False)
cmds.pointConstraint(hook_obj, root_control, maintainOffset=False, name=root_control+"_hookConstraint")
cmds.setAttr(root_control+".translate", l=True)
cmds.setAttr(root_control+".visibility", l=False)
cmds.setAttr(root_control+".visibility", 0)
cmds.setAttr(root_control+".visibility", l=True)
cmds.select(clear=True)
cmds.lockNode(self.container_name, lock=True, lockUnpublished=True)
def unconstrain_root_from_hook(self):
cmds.lockNode(self.container_name, lock=False, lockUnpublished=False)
root_control = self.get_trans_ctrl(self.module_namespace+":"+self.joint_info[0][0])
root_control_hook_constraint = root_control+"_hookConstraint"
if cmds.objExists(root_control_hook_constraint):
cmds.delete(root_control_hook_constraint)
cmds.setAttr(root_control+".translate", l=False)
cmds.setAttr(root_control+".visibility", l=False)
cmds.setAttr(root_control+".visibility", 1)
cmds.setAttr(root_control+".visibility", l=True)
cmds.select(root_control, replace=True)
cmds.setToolTo("moveSuperContext")
cmds.lockNode(self.container_name, lock=False, lockUnpublished=False)
def is_root_constrained(self):
root_control = self.get_trans_ctrl(self.module_namespace+":"+self.joint_info[0][0])
root_control_hook_constraint = root_control+"_hookConstraint"
return cmds.objExists(root_control_hook_constraint)
def can_module_be_mirrored(self):
return self.can_be_mirrored
def mirror(self, original_module, mirror_plane, rotation_function, translation_function):
self.mirrored = True
self.original_module = original_module
self.mirror_plane = mirror_plane
self.rotation_function = rotation_function
self.install()
cmds.lockNode(self.container_name, lock=False, lockUnpublished=False)
for joint_info in self.joint_info:
joint_name = joint_info[0]
original_joint = self.original_module+":"+joint_name
new_joint = self.module_namespace+":"+joint_name
original_rotation_order = cmds.getAttr(original_joint+".rotateOrder")
cmds.setAttr(new_joint+".rotateOrder", original_rotation_order)
index = 0
for joint_info in self.joint_info:
mirror_pole_vector_locator = False
if index < len(self.joint_info) - 1:
mirror_pole_vector_locator = True
joint_name = joint_info[0]
original_joint = self.original_module+":"+joint_name
new_joint = self.module_namespace+":"+joint_name
original_translation_control = self.get_trans_ctrl(original_joint)
new_translation_control = self.get_trans_ctrl(new_joint)
original_translation_control_position = cmds.xform(
original_translation_control,
query=True,
worldSpace=True,
translation=True
)
if self.mirror_plane == "YZ":
original_translation_control_position[0] *= -1
elif self.mirror_plane == "XZ":
original_translation_control_position[1] *= -1
elif self.mirror_plane == "XY":
original_translation_control_position[2] *= -1
cmds.xform(new_translation_control, worldSpace=True, absolute=True, translation=original_translation_control_position)
if mirror_pole_vector_locator:
original_pole_vector_locator = original_translation_control+"_poleVectorLocator"
new_pole_vector_locator = new_translation_control+"_poleVectorLocator"
original_pole_vector_locator_position = cmds.xform(original_pole_vector_locator, query=True, worldSpace=True, translation=True)
if self.mirror_plane == "YZ":
original_pole_vector_locator_position[0] *= -1
elif self.mirror_plane == "XZ":
original_pole_vector_locator_position[1] *= -1
elif self.mirror_plane == "XY":
original_pole_vector_locator_position[2] *= -1
cmds.xform(new_pole_vector_locator, worldSpace=True, absolute=True, translation=original_pole_vector_locator_position)
index += 1
self.mirror_custom(original_module)
module_group = self.module_namespace+":module_grp"
cmds.select(module_group, replace=True)
enum_name = "none:x:y:z"
cmds.addAttr(at="enum", enumName=enum_name, longName="mirrorInfo", k=False)
enum_value = 0
if translation_function == "mirrored":
if mirror_plane == "YZ":
enum_value = 1
elif mirror_plane == "XZ":
enum_value = 2
elif mirror_plane == "XY":
enum_value = 3
cmds.setAttr(module_group+".mirrorInfo", enum_value)
linked_attribute = "mirrorLinks"
cmds.lockNode(original_module+":module_container", lock=False, lockUnpublished=False)
for module_link in ((original_module, self.module_namespace), (self.module_namespace, original_module)):
module_group = module_link[0]+":module_grp"
attribute_value = module_link[1]+"__"
if mirror_plane == "YZ":
attribute_value += "X"
elif mirror_plane == "XZ":
attribute_value += "Y"
elif mirror_plane == "XY":
attribute_value += "Z"
cmds.select(module_group)
cmds.addAttr(dt="string", longName=linked_attribute, k=False)
cmds.setAttr(module_group+"."+linked_attribute, attribute_value, type="string")
for c in [original_module+":module_container", self.container_name]:
cmds.lockNode(c, lock=True, lockUnpublished=True)
cmds.select(clear=True)
| [
"[email protected]"
] | |
a8fed0d61b34ec28756c3ae9b408f4d60edfbf49 | 2cccf0eb3cb0a87018016ceac1ca15dba7fb0edb | /Phyton Challenges/String.py | 82031536863137c683c823303c7f0b2c56028f0e | [] | no_license | Sahilreyc/Python-Files | 8ad5251becbc8fb5255b2eced238398deb0aed82 | e880478a908e3fc1e14b51ff2dd97c3c96297b98 | refs/heads/master | 2022-02-26T04:35:10.156942 | 2018-11-30T03:08:39 | 2018-11-30T03:08:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 122 | py | ComputerAge=3
MyAge = int(input('What is your age?'))
print('I am younger than you, because I am 3' % (MyAge,ComputerAge)) | [
"[email protected]"
] | |
4a259f67bc49bd6cc78823e3e07aa29800f37025 | f65d0d1326546ab8203e569421f56ca222e77f8c | /TennisUz/migrations/0001_initial.py | a64b67947821555262af1a7b84be4becb65dd359 | [] | no_license | SamusVM/MyTennis | 001a2480a57c970a87aff47472a0af45361722d8 | fb482d54ff247c799580af20fde413cfd05aabcc | refs/heads/master | 2023-08-06T18:38:56.335494 | 2021-09-29T10:56:11 | 2021-09-29T10:56:11 | 260,180,147 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,739 | py | # Generated by Django 3.0.5 on 2020-04-13 11:57
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Backhand',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='City',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('post_code', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Court',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('allows_doubles', models.BooleanField(default=True)),
('is_roof', models.BooleanField(default=False)),
('act', models.BooleanField(default=True)),
],
),
migrations.CreateModel(
name='Covering',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
],
),
migrations.CreateModel(
name='Game',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_winner', models.BooleanField(default=True)),
('winner', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Hand',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='Match',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_official', models.BooleanField(default=True)),
('s1', models.IntegerField(default=0)),
('s2', models.IntegerField(default=0)),
('g1', models.IntegerField(default=0)),
('g2', models.IntegerField(default=0)),
('is_winner', models.BooleanField(default=True)),
('winner', models.IntegerField(default=0)),
('court', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Court')),
],
),
migrations.CreateModel(
name='Match_type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('n_player', models.IntegerField(default=2)),
],
),
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=50)),
('last_name', models.CharField(max_length=50)),
('d_birth', models.DateField()),
('city', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.City')),
],
),
migrations.CreateModel(
name='Place_Court',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Player',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('is_profy', models.BooleanField(default=True)),
('d_start', models.DateField()),
('racket', models.CharField(max_length=50)),
('strings', models.CharField(max_length=50)),
('shoes', models.CharField(max_length=50)),
('balls', models.CharField(max_length=50)),
('atp_players', models.CharField(max_length=50)),
('backhand', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Backhand')),
('hand', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Hand')),
('person', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to='TennisUz.Person')),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=200)),
('pub_date', models.DateTimeField(verbose_name='date published')),
],
),
migrations.CreateModel(
name='Region',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('code', models.CharField(max_length=3)),
],
),
migrations.CreateModel(
name='Shot_Hand',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Shot_Type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Torney_Group_Name',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Torney_Round',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('n_playr', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Tourney',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('rules', models.TextField()),
('court', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Court')),
],
),
migrations.CreateModel(
name='Winner_type',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Tourney_Group',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('tourney', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Tourney')),
],
),
migrations.CreateModel(
name='Torney_Group_Player',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('player', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Player')),
('tourney_group', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Tourney_Group')),
],
),
migrations.CreateModel(
name='Stadium',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('address', models.CharField(max_length=1000)),
('t_open', models.TimeField()),
('t_close', models.TimeField()),
('act', models.BooleanField(default=True)),
('city', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.City')),
],
),
migrations.CreateModel(
name='Set',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('g1', models.IntegerField(default=0)),
('g2', models.IntegerField(default=0)),
('is_tiebreak', models.BooleanField(default=True)),
('tb1', models.IntegerField(default=0)),
('tb2', models.IntegerField(default=0)),
('is_winner', models.BooleanField(default=True)),
('winner', models.IntegerField(default=0)),
('match', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Match')),
],
),
migrations.CreateModel(
name='Player_Rank',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dt', models.DateTimeField()),
('delta_rahk', models.IntegerField(default=0)),
('delta_rahk_doubles', models.IntegerField(default=0)),
('match', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Match')),
('player', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Player')),
],
),
migrations.AddField(
model_name='match',
name='game_type',
field=models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Match_type'),
),
migrations.AddField(
model_name='match',
name='player1',
field=models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, related_name='p1', to='TennisUz.Player'),
),
migrations.AddField(
model_name='match',
name='player2',
field=models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, related_name='p2', to='TennisUz.Player'),
),
migrations.AddField(
model_name='match',
name='player3',
field=models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, related_name='p3', to='TennisUz.Player'),
),
migrations.AddField(
model_name='match',
name='player4',
field=models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, related_name='p4', to='TennisUz.Player'),
),
migrations.CreateModel(
name='Game_Log',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('dt', models.DateTimeField()),
('winner', models.IntegerField(default=0)),
('player', models.IntegerField(default=0)),
('game', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Game')),
('place_court', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Place_Court')),
('shot_hand', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Shot_Hand')),
('shot_type', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Shot_Type')),
('winner_type', models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Winner_type')),
],
),
migrations.AddField(
model_name='game',
name='set',
field=models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Set'),
),
migrations.AddField(
model_name='court',
name='covering',
field=models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Covering'),
),
migrations.AddField(
model_name='court',
name='stadium',
field=models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Stadium'),
),
migrations.AddField(
model_name='city',
name='region',
field=models.ForeignKey(blank='TRUE', null='True', on_delete=django.db.models.deletion.SET_NULL, to='TennisUz.Region'),
),
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.CharField(max_length=200)),
('votes', models.IntegerField(default=0)),
('question', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='TennisUz.Question')),
],
),
]
| [
"[email protected]"
] | |
c340a8d9c86608c4ae772af894cee4c8d13f798e | fefe34292f16706a8039ab6bc9929212178e1469 | /python-datos/punto_orange/punto_orange/pipelines.py | ddca01cec2386ae034b8d7fcc297519ddcf3acd6 | [] | no_license | pedrojsalinas/ecommerce | e9d34d8db58e3e4228c7d25816f72b94f4434cb5 | 26e08bba0d4d4a4c59cd9494908cc01348213c3c | refs/heads/master | 2022-02-27T19:37:33.863203 | 2019-09-16T22:37:58 | 2019-09-16T22:37:58 | 208,910,234 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 293 | py | # -*- coding: utf-8 -*-
# Define your item pipelines here
#
# Don't forget to add your pipeline to the ITEM_PIPELINES setting
# See: https://docs.scrapy.org/en/latest/topics/item-pipeline.html
class PuntoOrangePipeline(object):
def process_item(self, item, spider):
return item
| [
"[email protected]"
] | |
cae2a038c2305eb33e22abd1822edc53904fe169 | bfae612ab84f8c7ce3f3c5c8f4387b6aaab6c370 | /Prac02/strings,files&excep.py | 998b29af33acd24547db9b32a6b59346be8f931d | [] | no_license | jc451073/workshops | d291a835fe4f2f2db1ccdf42c963830acdd37882 | 6a6e0cbbfe13092f51ef8c60a482c52a1fbf7698 | refs/heads/master | 2020-06-11T02:14:20.067055 | 2017-02-02T22:11:58 | 2017-02-02T22:11:58 | 76,025,467 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py |
name = "Gibson L-5 CES"
year = 1922
cost = 16035.40
print("My guitar: " + name + ", first made in " + str(year))
print("My guitar: {}, first made in {}".format(name, year))
print("My guitar: {0}, first made in {1}".format(name, year))
print("My {0} was first made in {1} (that's right, {1}!)".format(name, year))
print("My {} would cost ${:,.2f}".format(name, cost))
numbers = [1, 19, 123, 456, -25]
for i in range(len(numbers)):
print("Number {0} is {1:>5}".format(i + 1, numbers[i]))
| [
"[email protected]"
] | |
04c127719225b08bad25609d1c39319bc8db8c50 | 69354214e126f8f2337ae8212e6f2105d8900cc1 | /proto.py | 0b9aeda1f15d44309bf0c3d06aef88f7e420e6bf | [] | no_license | g-x-w/snek | a734889b46977b874c258ec727d3c156363903d1 | c43f4abf58ff4448709080c4f63a4d3cac21cae2 | refs/heads/master | 2021-03-03T00:30:49.195123 | 2020-04-29T21:28:44 | 2020-04-29T21:28:44 | 245,918,557 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,777 | py | import math as m
import random as rd
import pygame as pg
import tkinter as tk
from tkinter import messagebox
class cube(object):
def __init__(self,start,dirnx=1,dirny=0, color=(0, 255, 0)):
self.pos = start
self.dirnx = 1
self.dirny = 0
self.color = color
def move(self, dirnx, dirny):
self.dirnx = dirnx
self.dirny = dirny
self.pos = (self.pos[0] + self.dirnx, self.pos[1] + self.dirny)
def draw(self, surface, eyes=False):
dis = width//rows
i = self.pos[0]
j = self.pos[1]
pg.draw.rect(surface, self.color, (i*dis+1, j*dis+1, dis-2, dis-2))
if eyes:
centre = dis//2
radius = 3
circleMiddle = (i*dis+centre-radius,j*dis+8)
circleMiddle2 = (i*dis + dis -radius*2, j*dis+8)
pg.draw.circle(surface, (0,0,0), circleMiddle, radius)
pg.draw.circle(surface, (0,0,0), circleMiddle2, radius)
class snake(object):
body = []
turns = {}
def __init__(self, color, pos):
self.color = color
self.head = cube(pos)
self.body.append(self.head)
self.dirnx = 0
self.dirny = 1
def move(self, snack_loc):
if AI:
if len(self.body) < 15:
decision = A_Star_Decider(snack_loc, self, width)
# print("DIRECTION:", decision, "\n")
if decision == 1: #LEFT
self.dirnx = -1
self.dirny = 0
self.turns[self.head.pos[:]] = [self.dirnx, self.dirny]
elif decision == 2: #RIGHT
self.dirnx = 1
self.dirny = 0
self.turns[self.head.pos[:]] = [self.dirnx, self.dirny]
elif decision == 3: #UP
self.dirnx = 0
self.dirny = -1
self.turns[self.head.pos[:]] = [self.dirnx, self.dirny]
elif decision == 4: #DOWN
self.dirnx = 0
self.dirny = 1
self.turns[self.head.pos[:]] = [self.dirnx, self.dirny]
else:
decision = A_Star_Decider((0,0), self, width)
# # # # # # hamilton()
else:
for event in pg.event.get():
if event.type == quit:
quit()
keys = pg.key.get_pressed()
for key in keys:
if keys[pg.K_LEFT] or keys[pg.K_a]:
self.dirnx = -1
self.dirny = 0
self.turns[self.head.pos[:]] = [self.dirnx, self.dirny]
elif keys[pg.K_RIGHT] or keys[pg.K_d]:
self.dirnx = 1
self.dirny = 0
self.turns[self.head.pos[:]] = [self.dirnx, self.dirny]
elif keys[pg.K_UP] or keys[pg.K_w]:
self.dirnx = 0
self.dirny = -1
self.turns[self.head.pos[:]] = [self.dirnx, self.dirny]
elif keys[pg.K_DOWN] or keys[pg.K_s]:
self.dirnx = 0
self.dirny = 1
self.turns[self.head.pos[:]] = [self.dirnx, self.dirny]
for i, c in enumerate(self.body):
p = c.pos[:]
if p in self.turns:
turn = self.turns[p]
c.move(turn[0],turn[1])
if i == len(self.body)-1:
self.turns.pop(p)
else:
if ((c.dirnx == -1 and c.pos[0] <= 0) or (c.dirnx == 1 and c.pos[0] >= rows-1) or
(c.dirny == 1 and c.pos[1] >= rows-1) or (c.dirny == -1 and c.pos[1] <= 0)):
terminate()
else: c.move(c.dirnx,c.dirny)
def reset(self, pos):
self.head = cube(pos)
self.body = []
self.body.append(self.head)
self.turns = {}
self.dirnx = 0
self.dirny = 1
def addCube(self):
tail = self.body[-1]
dx, dy = tail.dirnx, tail.dirny
if dx == 1 and dy == 0:
self.body.append(cube((tail.pos[0]-1,tail.pos[1])))
elif dx == -1 and dy == 0:
self.body.append(cube((tail.pos[0]+1,tail.pos[1])))
elif dx == 0 and dy == 1:
self.body.append(cube((tail.pos[0],tail.pos[1]-1)))
elif dx == 0 and dy == -1:
self.body.append(cube((tail.pos[0],tail.pos[1]+1)))
self.body[-1].dirnx = dx
self.body[-1].dirny = dy
def draw(self, surface):
for i, c in enumerate(self.body):
if i ==0:
c.draw(surface, True)
else:
c.draw(surface)
#####
global width, rows, s, speed, AI
width = 600
rows = 20
s = snake((0, 255, 0), (2, 2))
speed = 4000 # from 1 (painfully slow) to 1000 (impossibly fast)
AI = True
#####
def A_Star_Decider (snack_loc, s, width):
L_block = (s.body[0].pos[0]-1, s.body[0].pos[1])
R_block = (s.body[0].pos[0]+1, s.body[0].pos[1])
U_block = (s.body[0].pos[0], s.body[0].pos[1]-1)
D_block = (s.body[0].pos[0], s.body[0].pos[1]+1)
L = euc_dist(L_block, snack_loc)
R = euc_dist(R_block, snack_loc)
U = euc_dist(U_block, snack_loc)
D = euc_dist(D_block, snack_loc)
distances = [L, R, U, D]
options = [L_block, R_block, U_block, D_block]
# print("Options (LRUD):", options)
distances, options = zip(*sorted(zip(distances, options)))
distances = list(distances)
options = list(options)
illegal_body = list(map(lambda z: z.pos, s.body[1:]))
illegal_walls = []
for i in range(rows):
illegal_walls.append((i, -1)) #append -1 row
illegal_walls.append((i, rows)) #append bottom row
illegal_walls.append((-1, i)) #append -1 column
illegal_walls.append((rows, i)) #append rightmost column
allowable = []
for i in range(len(options)):
if options[i] in illegal_body or options[i] in illegal_walls:
pass
else:
allowable.append(i)
# # # # print ("HEAD:", s.body[0].pos)
# # # # print("Body:", illegal_body)
# # # # print("Distances:", distances)
# # # # print("Options:", options)
# # # # print("Allowable:", allowable)
try:
if options[allowable[0]] == L_block:
return 1
elif options[allowable[0]] == R_block:
return 2
elif options[allowable[0]] == U_block:
return 3
elif options[allowable[0]] == D_block:
return 4
else:
print("ALLOWABLE ERROR")
except Exception:
terminate()
# try:
# if options[allowable[0]] == L_block:
# if second_analysis(allowable, L_block, illegal_body, illegal_walls) == 1 or L_block == snack_loc:
# return 1
# else:
# pass
# elif options[allowable[0]] == R_block:
# if second_analysis(allowable, R_block, illegal_body, illegal_walls) == 1 or R_block == snack_loc:
# return 2
# else:
# pass
# elif options[allowable[0]] == U_block:
# if second_analysis(allowable, U_block, illegal_body, illegal_walls) == 1 or U_block == snack_loc:
# return 3
# else:
# pass
# elif options[allowable[0]] == D_block:
# if second_analysis(allowable, D_block, illegal_body, illegal_walls) == 1 or D_block == snack_loc:
# return 4
# else:
# pass
# else:
# print("ALLOWABLE ERROR")
# except Exception:
# terminate()
# # # # # # # # # # # # # # def hamilton():
def second_vision(square_tuple):
out = ((square_tuple[0]-1, square_tuple[1]),
(square_tuple[0]+1, square_tuple[1]),
(square_tuple[0], square_tuple[1]-1),
(square_tuple[0], square_tuple[1]+1)) #LRUD format
return out
def second_analysis(allowable, block, body, walls):
test = second_vision(block)
# print("TEST:", test)
case = 0
for i in range(len(test)):
if test[i] in body or test[i] in walls:
case += 1
else:
pass
if case > 2:
allowable.remove(allowable[0])
else:
return 1
def euc_dist (tup1, tup2):
out = ((tup2[0] - tup1[0])**2 + (tup2[1] - tup1[1])**2)**(0.5)
return out
def drawGrid(w, rows, surface):
sizeBtwn = w // rows
x = 0
y = 0
for l in range(rows):
x = x + sizeBtwn
y = y + sizeBtwn
pg.draw.line(surface, (255,255,255), (x,0),(x,w))
pg.draw.line(surface, (255,255,255), (0,y),(w,y))
def redrawWindow(surface):
surface.fill((0,0,0))
s.draw(surface)
snack.draw(surface)
drawGrid(width, rows, surface)
pg.display.update()
def goal(rows, item):
positions = item.body
while True:
x = rd.randrange(rows)
y = rd.randrange(rows)
if len(list(filter(lambda z: z.pos == (x,y), positions))) > 0:
continue
else:
break
return (x,y)
def message_box(subject, content):
root = tk.Tk()
root.attributes("-topmost", True)
root.withdraw()
messagebox.showinfo(subject, content)
try:
root.destroy()
except:
pass
def terminate ():
s.body[-1].color = (0, 0, 255)
redrawWindow(win)
print('{},'.format(len(s.body) -1))
message_box('Game Over!', 'You scored {} points! Press ENTER to close this window...'.format(len(s.body)-1))
s.reset((0,0))
quit()
def win_game():
print('{},'.format(len(s.body) -1))
message_box('You Won!', 'You scored the max {} points and won the game! Press any key to play again...'.format(len(s.body)-1))
s.reset((0,0))
quit()
def main():
global snack, win
snack = cube(goal(rows, s), color=(255,0,0))
win = pg.display.set_mode((width, width))
flag = True
clock = pg.time.Clock()
while flag:
pg.time.delay(1)
clock.tick(speed)
s.move(snack.pos)
if s.body[0].pos == snack.pos and len(s.body) != rows**2:
s.addCube()
snack = cube(goal(rows, s), color=(255, 0, 0))
elif s.body[0].pos == snack.pos and len(s.body) == rows**2:
win_game()
body = []
for x in range(len(s.body)):
body.append(s.body[x].pos)
body.remove(body[0])
if s.head.pos in body:
terminate()
redrawWindow(win)
main() | [
"[email protected]"
] | |
dd22bed5684048e0afd1f703c946c219eed66303 | c16c32efbc80953694903fde58c08fb53ff2dc1d | /backend/api/test.py | 674768891bb11acb30cd6303fdc8537e757a426b | [
"MIT"
] | permissive | EdwinKato/bucket-list | 9af5a7a4c6852c881dcc98d4dccf18b0a74baaff | 16b71cce59df2f28061b8f5d6b2cfb1f679381c4 | refs/heads/master | 2022-12-09T13:54:23.046711 | 2017-08-18T12:56:36 | 2017-08-18T12:56:36 | 95,651,248 | 0 | 1 | MIT | 2023-06-21T15:39:22 | 2017-06-28T09:15:04 | CSS | UTF-8 | Python | false | false | 984 | py | from flask_testing import TestCase
from api.api import *
from api import create_app, db
import json
class BaseTestCase(TestCase):
def create_app(self):
# pass in test configuration
return create_app("testing")
def setUp(self):
super(BaseTestCase, self).setUp()
db.create_all()
self.test_user = {
"email": "[email protected]",
"first_name": "Edwin",
"last_name": "Kato",
"password": "qwerty@123",
"username": "EdwinKato"
}
self.response = self.client.post('/api/v1/auth/register',
data=json.dumps(self.test_user),
content_type='application/json')
self.data = json.loads(self.response.data.decode())
self.token = self.data['token']
def tearDown(self):
super(BaseTestCase, self).tearDown()
db.session.remove()
db.drop_all()
| [
"[email protected]"
] | |
aa99b9b66a81850888799d1020e6f6f5579032f1 | 2ba0a6749c4890bc38aed7b00ec1cc52d8fec3e4 | /project/django_app/settings.py | df1b013d03811976c9c109e2206c71521c0646c7 | [] | no_license | Drag0nus/MVP_DRF | 0c0051be8a52ed0d45c72849b4093f47e96ef905 | d8519c8ca04bd919081f00f3f4a97f1e66f8782a | refs/heads/master | 2021-01-25T09:32:29.377318 | 2017-06-09T11:49:45 | 2017-06-09T11:49:45 | 93,852,769 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,571 | py | import os
from celery.schedules import crontab
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'vs^um1(fd2)m+)ol#i*q3uj&l9*nnhaf1r=)=is&79ne5)qx-1'
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
'rest_framework',
'rest_framework_swagger',
'oauth2_provider',
'tornado_app',
'common',
'users',
'tasks',
'notifications'
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'corsheaders.middleware.CorsPostCsrfMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'django_app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'django_app.wsgi.application'
# Password validation
AUTH_USER_MODEL = 'users.User'
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# -----------------
# CORS
# -----------------
CORS_ORIGIN_ALLOW_ALL = True
CORS_ALLOW_CREDENTIALS = True
CORS_REPLACE_HTTPS_REFERER = True
CORS_URLS_REGEX = '^.*$'
CORS_ALLOW_METHODS = (
'GET',
'POST',
'PUT',
'PATCH',
'DELETE',
'OPTIONS'
)
CORS_ALLOW_HEADERS = (
'cookie',
'content-type',
'content-range',
'content-length',
'content-disposition',
'cache-control',
'connection',
'accept',
'accept-encoding',
'accept-language',
'host',
'origin',
'referer',
'authorization',
'users-agent',
'x-requested-with',
'x-csrftoken',
'x-requested-with',
)
CORS_EXPOSE_HEADERS = ()
CORS_ORIGIN_WHITELIST = ()
# -----------------
# Django Rest Framework
# -----------------
REST_FRAMEWORK = {
'DEFAULT_RENDERER_CLASSES': (
'rest_framework.renderers.JSONRenderer',
),
'DEFAULT_PARSER_CLASSES': (
'rest_framework.parsers.JSONParser',
'rest_framework.parsers.FormParser',
'rest_framework.parsers.MultiPartParser'
),
'DEFAULT_FILTER_BACKENDS': (
'rest_framework.filters.DjangoFilterBackend',
'rest_framework.filters.OrderingFilter',
),
'DEFAULT_AUTHENTICATION_CLASSES': (
'oauth2_provider.ext.rest_framework.OAuth2Authentication',
),
'DEFAULT_PERMISSION_CLASSES': (
'rest_framework.permissions.AllowAny',
),
'DEFAULT_PAGINATION_CLASS':
'common.drf.pagination.StandardResultsSetPagination',
'PAGE_SIZE': 100,
'MAX_PAGE_SIZE': 300,
# Authentication
'UNAUTHENTICATED_USER': 'django.contrib.auth.models.AnonymousUser',
'UNAUTHENTICATED_TOKEN': None,
}
SWAGGER_SETTINGS = {
'token_type': 'Bearer',
}
CELERYBEAT_SCHEDULE = {
'test_periodic_task': {
'task': 'notifications.tasks.slava_ukr_notification',
'schedule': crontab(),
},
}
# -----------------------
# Local setting overwrite
# -----------------------
try:
from local_settings import *
except ImportError:
pass
| [
"[email protected]"
] | |
5f54aa99d6459a81268dfafbac4905ac96fbb540 | cd9f762873491a73871f6770742bd46b5c7d2402 | /9.2 answers.py | 71ea422a31092fb12a3e8bd6118f405925887394 | [] | no_license | hubbishop/CSCI156Activity-9.2 | 79c105477c6473590b47d9f68953038f11769daf | be29cd0bec09a88ecb1fff135c76e1ad6fa47003 | refs/heads/master | 2020-12-07T02:18:50.130019 | 2014-10-25T22:25:30 | 2014-10-25T22:25:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 194 | py | __author__ = 'Dark-Knight'
def capitalcase():
with open("r.txt", "rt", encoding="utf-8")as f:
for line in f:
line.upper()
print(line.upper())
capitalcase()
| [
"[email protected]"
] | |
7515521465732c959b29a14fb11081c9a1b56d9d | a7a0c5ed748b2ae87b670cd6aea6a717084e969c | /modelhub.py | 08ae0fc071781cbb0bd63b883f11868d299fd95f | [] | no_license | kevinmgamboa/consciousness | ec69c16c1d8c6f280663e44d2fbc039fbc733df8 | 22317ff1be0be20be14b15645210839c9a1cc62c | refs/heads/main | 2023-08-03T11:59:12.880584 | 2021-10-01T10:23:30 | 2021-10-01T10:23:30 | 390,245,570 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,112 | py | """
This file contain models implemented for the project
----------------------------
"""
# -----------------------------------------------------------------------------
# Libraries Needed
# -----------------------------------------------------------------------------
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras import layers
from helpers_and_functions import config
class simple_cnn():
def __init__(self, param):
# Initializing model
self.model = None
# parameter
self.parameters = param
# # Building model structure
# self.structure()
# # Compiling model
# self.compile()
def build_model_structure(self, in_shape):
# Adds batch size = 1
in_shape = (1,) + in_shape
flat_size = 10
num_filters = 5
kernel_size = 3
out_size = 1
self.model = tf.keras.Sequential([
layers.Conv2D(num_filters, kernel_size, padding='same', activation='relu',
kernel_initializer='he_normal', input_shape=in_shape),
layers.Flatten(),
layers.Dense(flat_size, activation='relu'),
layers.Dense(out_size)
])
def compile(self):
self.model.compile(optimizer=keras.optimizers.Adam(self.parameters['lr']),
loss=keras.losses.BinaryCrossentropy(from_logits=True),
metrics=keras.metrics.BinaryAccuracy(name='accuracy'))
class simple_cnn_2():
def __init__(self, param):
# Initializing model
self.model = None
self.feature_extractor = None
# parameter
self.parameters = param
# # Building model structure
# self.structure()
# # Compiling model
# self.compile()
def build_model_structure(self, in_shape):
# Adds batch size = 1
#in_shape = (1,) + in_shape
flat_size = 10
num_filters = 5
kernel_size = 3
out_size = 1
model = tf.keras.Sequential([
layers.Conv2D(num_filters, kernel_size, padding='same', activation='relu',
kernel_initializer='he_normal', input_shape=in_shape),
layers.Flatten(),
layers.Dense(flat_size, activation='relu', name='feature_extraction'),
layers.Dense(out_size, name='output')
])
self.model = keras.Model(
inputs=model.inputs,
#outputs=[layer.output for layer in model.layers[-2:]] # extracts the last two layers of the model
outputs=model.get_layer(name="output").output,
)
def compile(self):
self.model.compile(optimizer=keras.optimizers.Adam(self.parameters['lr']),
loss=keras.losses.BinaryCrossentropy(from_logits=True),
metrics=keras.metrics.BinaryAccuracy(name='accuracy'))
class simple_cnn_2_1():
def __init__(self, param):
# Initializing model
self.model = None
self.feature_extractor = None
# parameter
self.parameters = param
# # Building model structure
# self.structure()
# # Compiling model
# self.compile()
def build_model_structure(self, in_shape):
# Adds batch size = 1
in_shape = (1,) + in_shape
flat_size = 10
num_filters = 5
kernel_size = 3
out_size = 1
model = tf.keras.Sequential([
layers.Conv2D(num_filters, kernel_size, padding='same', activation='relu',
kernel_initializer='he_normal', input_shape=in_shape),
layers.Flatten(),
layers.Dense(flat_size, activation='relu', name='feature_extraction'),
layers.Dense(out_size, name='output')
])
self.model = keras.Model(
inputs=model.inputs,
outputs=[layer.output for layer in model.layers[-2:]] # extracts the last two layers of the model
#outputs=model.get_layer(name="feature_extraction").output,
)
def compile(self):
self.model.compile(optimizer=keras.optimizers.Adam(self.parameters['lr']),
loss=keras.losses.BinaryCrossentropy(from_logits=True),
metrics=keras.metrics.BinaryAccuracy(name='accuracy'))
# -----------------------------------------------------------------------------
# Multi-Branch Model
# -----------------------------------------------------------------------------
class multi_output_feature_model:
def __init__(self, param, input_shape):
# initializing model object
self.model = None
# initializing parameters
self.parameters = param
# builds the model
self.build_model((1,)+input_shape) # adds extra dimension to in_shape
def spectrogram_input_branch(self, x_in):
x = layers.Conv2D(self.parameters['num_filters'], self.parameters['kernel_size'],
padding='same', activation='relu', name='spec_conv_1')(x_in)
x = layers.Flatten(name='spec_flatten')(x)
x = layers.Dense(self.parameters['dense_units'], activation='relu', name='spec_dense_1')(x)
x = layers.Dense(self.parameters['out_size'], name='spec_dense_2')(x)
return x
def hilbert_transform_branch(self, x_in):
x = layers.Conv2D(self.parameters['num_filters'], self.parameters['kernel_size'],
padding='same', activation='relu', name='hilbert_conv_1')(x_in)
x = layers.Flatten(name='hilbert_flatten')(x)
x = layers.Dense(self.parameters['dense_units'], activation='relu', name='hilbert_dense_1')(x)
x = layers.Dense(self.parameters['out_size'], name='hilbert_dense_2')(x)
return x
def build_model(self, input_shape):
# creates the model input
x_in = keras.Input(input_shape)
# builds the model
self.model = keras.models.Model(inputs=x_in,
outputs=[self.spectrogram_input_branch(x_in), self.hilbert_transform_branch(x_in)],
name='multi_input_feature_model')
# %%
# -----------------------------------------------------------------------------
# Multi-Branch Model (https://keras.io/guides/functional_api/)
# -----------------------------------------------------------------------------
class multi_input_feature_model:
def __init__(self, param, input_1, input_2):
# initializing model object
self.model = None
# initializing parameters
self.parameters = param
# builds the model
self.build_model_structure((1,) + input_1, (1,) + input_2) # adds extra dimension to in_shape
def spectrogram_input_branch(self, x_in):
# creating model body structure
x = layers.Conv2D(self.parameters['num_filters'], self.parameters['kernel_size'],
padding='same', activation='relu')(x_in)
x = layers.Flatten()(x)
x = layers.Dense(self.parameters['dense_units'], activation='relu')(x)
x = layers.Dense(self.parameters['out_size'])(x)
# defining model output
y = keras.models.Model(inputs=x_in, outputs=x)
return y
def hilbert_transform_branch(self, x_in):
# creating model body structure
x = layers.Conv2D(self.parameters['num_filters'], self.parameters['kernel_size'],
padding='same', activation='relu')(x_in)
x = layers.Flatten()(x)
x = layers.Dense(self.parameters['dense_units'], activation='relu')(x)
x = layers.Dense(self.parameters['out_size'])(x)
# defines the output
y = keras.models.Model(inputs=x_in, outputs=x)
return y
def build_model_structure(self, input_1, input_2):
# creates input for spectrogram and for hilbert transform
spec_input, hilb_input = keras.Input(input_1, name='spectrogram_input'), keras.Input(input_2,
name='hilbert_input')
# creates spectrogram and hilbert transform branches
spec_b, hilb_b = self.spectrogram_input_branch(spec_input), self.spectrogram_input_branch(hilb_input)
# combining branches outputs via concatenation
comb_out = layers.concatenate([spec_b.output, hilb_b.output])
# adding final extra layers
conscious_pred = layers.Dense(self.parameters['out_size'], name='final_out')(comb_out)
# builds the model
self.model = keras.models.Model(inputs=[spec_b.input, hilb_b.input],
outputs=conscious_pred,
name='multi_input_feature_model')
def compile(self):
self.model.compile(optimizer=keras.optimizers.Adam(self.parameters['lr']),
loss=keras.losses.BinaryCrossentropy(from_logits=True),
metrics=config.metrics) | [
"[email protected]"
] | |
219ec292d34f28424ed5c462ad0c5fcf9bc6f2c5 | adf6af478dab32a287ba7648076100229c0a6068 | /dataPreparation/featureEngineering/kde-test.py | f5dc3ad921d4c7f06516192f8ce771175b6751ac | [] | no_license | mpaul7/MachineLearning | f26f574acd6f1a7cf56c2d87bf88d1a46649d44c | 43e41398d33fd06891cb6ece51a2d516cae8ffa5 | refs/heads/master | 2022-09-19T04:29:35.525098 | 2022-08-22T13:45:57 | 2022-08-22T13:45:57 | 186,191,951 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,097 | py | import os
from math import log2
import numpy as np
from statistics import mean, stdev
import pandas as pd
from matplotlib import pyplot
import seaborn as sns
from numpy import hstack
from numpy import asarray
from numpy import exp
from sklearn.neighbors import KernelDensity
from statsmodels.nonparametric.bandwidths import bw_silverman, bw_scott, select_bandwidth
from sklearn.model_selection import GridSearchCV
from datetime import datetime
import datetime
sns.set()
def listFiles(paths=None):
files = []
list_paths = []
# r=root, d=directories, f = files
# print(paths)
for path in paths:
for p, d, f in os.walk(path):
list_paths.append(p)
for file in f:
if '.csv' in file:
files.append(os.path.join(p, file))
# files.append(file)
return files
def outlierDetectionRemoval(data=None):
data_mean, data_std = mean(data), stdev(data)
# identify outliers
cut_off = data_std * 3
lower, upper = data_mean - cut_off, data_mean + cut_off
# identify outliers
outliers = [x for x in data if x < lower or x > upper]
# print(outliers)
# print('Identified outliers: %d' % len(outliers))
# remove outliers
outliers_removed = [x for x in data if x >= lower and x <= upper]
# print(type(outliers_removed))
data_df = pd.Series(outliers_removed)
# print('Non-outlier observations: %d' % len(outliers_removed))
return data_df
def kde_test():
start = datetime.datetime.now()
print(start)
files_features = listFiles(paths=[r'C:\Users\Owner\mltat\data\mltat\TestFiles\feature_engineering\similarity_score\features'])
files_labels = listFiles(paths=[r'C:\Users\Owner\mltat\data\mltat\TestFiles\feature_engineering\similarity_score\labels'])
dfX = pd.read_csv(files_features[0])
dfY = pd.read_csv(files_labels[0])
_featureX = dfX[dfX.columns[2]]
_featureY = dfY[dfY.columns[2]]
data = outlierDetectionRemoval(_featureX).to_numpy()
# featureY = outlierDetectionRemoval(_featureY).to_numpy()
silverman_bandwidth = bw_silverman(data)
print(f"Silverman bandwidth = {silverman_bandwidth}")
print('Total time {}'.format((datetime.datetime.now()) - start))
# select bandwidth allows to set a different kernel
silverman_bandwidth_gauss = select_bandwidth(data, bw='silverman', kernel='gauss')
scott_bandwidth = bw_scott(data)
print(f"Scott bandwidth = {scott_bandwidth}")
print('Total time {}'.format((datetime.datetime.now()) - start))
model = GridSearchCV(KernelDensity(), {'bandwidth': np.linspace(0.1, 15, 50)}, cv=5)
model.fit(data[:, None])
# model.best_params_['bandwidth']
cv_bandwidth = model.best_params_
print(f"Silverman bandwidth = {silverman_bandwidth}")
print(f"Scott bandwidth = {scott_bandwidth}")
print(f"CV bandwidth = {cv_bandwidth}")
end = datetime.datetime.now()
print('Total time {}'.format(end-start))
def test():
max = 100
max2 = min([max, 100000])
print(max2)
if __name__ == '__main__':
# kde_test()
test() | [
"[email protected]"
] | |
e2967cc25acebe0fcc329e17d86697df5ebd829c | c9e4d0f2479ad99b499aa5c2d8b11d27906cdead | /itlwm.sh | 05d1345002d12520ece4065d1985e4c47df40616 | [] | no_license | baddles/itlwm-wifi-info-injector | 4d7fda7ddd5115ee39daae3b25b1a3412d38e6a4 | f63cc7d6d083fd92d999bce143edf2dfdd9fe365 | refs/heads/master | 2023-01-12T10:31:37.244860 | 2020-11-11T03:27:46 | 2020-11-11T03:27:46 | 311,851,879 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,299 | sh | #!/usr/bin/env python3
# itlwm.sh
#
#
# Created by Baddles Nguyen on 7/8/20.
#
import argparse
from pathlib import Path
import getpass
import plistlib
import subprocess
import os
try:
import keyboard
print("Keyboard as package found!") # Green.
except Exception as e:
# Please remember to color format this!
print("Cannot find keyboard package!\nPlease install it by typing this command in terminal:\n\"sudo pip3 install keyboard\" (without quotes)!") # Red.
quit()
# To do:
# - Add command differentiation prefix (-l and --location).
# - Look for a switch between argparse and the other argument parser for argc & user-friendliness.
# - Switch from os.system for copying files to PathLib.
# - Get and compile itlwm from Source.
# - Color-formatting necessary print lines.
parser = argparse.ArgumentParser(description='Change info.plist inside itlwm.kext to add a new wifi information and load the kext if user wanted to.')
parser.add_argument("location", help="Your itlwm.kext location.") # (Or itlwm.xcodeproj) location. Will compile from GitHub if this argument is not provided or compile from .xcodeproj if that was provided instead and Xcode is a requirement in this case.") # -l --location
parser.add_argument("ssid", help="Your wifi name.") # Will load the kext if this argument is not provided.")
# Optional Argument - Because the user might want to only load the kext. -s --ssid
parser.add_argument("password", help="Your wifi password.") # Invalid script usage when this argument is not provided alongside with the ssid argument.")
# Optional Argument - It can be blank. -p --password
# Prerequisites: These were put here for future use, including functions or variables
# Variables:
args = parser.parse_args();
print(len(args))
p = Path(args.location)
scriptPath = os.path.dirname(os.path.realpath(__file__))
x = ""
if args.location[0] == '~':
x += "/Users/" + getpass.getuser() + args.location[1:len(args.location)]
else:
x = args.location
kext = Path(x) # Reserved for later use when we need itlwm.kext.
p = Path(x)
# Functions:
# check()
# Step 1: Check if Xcode is installed and at least version corresponding to 10.14 (Because maximum Xcode version for 10.13 cannot load project).
# If Xcode not found:
# 1.1: Warn that Xcode is not found or Xcode version needs to be > macOS 10.14 minimum equivalent version of Xcode.
# 1.2: quit()
# function ends.
def check():
# backup()
# Step 2: Check if already exists:
# 2.1: itlwm.kext in: ~/Library/Developer/Xcode/DerivedData/itlwm-?/Build/Products/Debug/itlwm.kext. If true: Ask if they want to get new or use that.
# 2.1.1: Use that -> Step 6.
# 2.1.2: Don't use that -> Ask if they want to backup info.plist to retain their wifi info:
# 2.1.2.1: if True: Backup: Copy that into script's location.
# 2.2: itlwm project folder in the same location as scripts folder. If true: Ask if they want to use that or get new from GitHub.
# 2.2.1:
# Use that: -> Step 4.
# Don't use that -> Inside the project, there's an info.plist that the user, if they don't use this script, can use that before they compile to add their wifi info. Therefore, we ask if you want to backup the info.plist inside there
# 2.2.2.1: If true: Check if there is another backup from 2.1.2.1:
# 2.2.2.1.1: If true: Check difference between 2 files:
# If there are no difference then use either.
# If there are differences: Asks which one would they want to preserve.
# A.
# B.
# Both (By merging):
# sameKeys: tuple<id: string, wifi<ssid: string, password: string>>
# differentKeys: <ssid: string, password: string>
# 1. Scan file 1 to get ssid:
# while (not end of the list):
# add wifi info into sameKeys.
# 2. Scan file 2 and rule out scenarios:
# while (not end of the list):
# if (id existed in sameKeys):
# if (ssid is the same):
# if (password is not the same):
# asks which one to keep? 1 OR 2.
# else: (ssid is not the same):
# add 2.wifi into differentKeys.
# add 1.wifi into differentKeys.
# remove 1 entirely from sameKeys.
# else: (id did not exist in sameKeys):
# add wifi info into sameKeys.
# function ends.
# get() # For Scenario 1.2 and 3.
# Step 3: git clone.
# function ends.
# compile()
# add()
# ask()
# load()
# There are a number of scenarios that can happen in user-end when interacting with the script:
# 1. No argument:
# 1.1. User wants to show help.
# 1.2. User wants to compile and load the kext in its original state (no edit to info.plist).
# 1.3. User wants to quit.
# 2. Location-only argument: User wants to load the kext from source.
# 3. ssid-only argument: It's actually a combination of 1.2 and 4.
# 4. Location + ssid argument: User wants to edit the info.plist to add the info of their new shiny wifi - Totally not in 1/4096 chance.
# 5. Invalid argument - Which is password-only.
# Scenario 1:
# while True:
# print("No argument was provided. This either means that you want to show help or compile from source without doing anything else.")
# print("1. Show help.")
# print("2. Compile kext from source (without adding any wifi name to the info.plist).")
# print("3. Quit script.")
# print("Your option: ")
# if keyboard.is_pressed('1'):
# show help
# quit()
# elif keyboard.is_pressed('2'):
#
# break
# elif keyboard.is_pressed('3'):
# print("Alrighty, quitting.")
# quit()
# else:
# print("Invalid input! Please try again.") # Red
# Scenario 2:
# os.system("kextload -v " + kext)
# Scenario 3:
# Scenario 4:
if p.exists() == False:
print(p)
print("Invalid path! Exiting script...")
quit()
print("Given path is valid path!")
# Now we parse the info.plist!
if args.location[len(args.location) -1] != '/':
x += "/Contents/info.plist"
plist = Path(x)
else:
x += "Contents/info.plist"
plist = Path(x)
if (p.exists() == False):
print("info.plist not found! Exiting script...")
quit()
returncode = subprocess.call(["/usr/bin/sudo", "/usr/bin/id"])
if returncode == 1:
print("Sudo did not get initialized! Quitting...")
quit()
# You have to copy the damn info.plist to another location (e.g. Script location) in order to even edit it. Typical permission thingy.
os.system("cp " + x + " " + scriptPath)
# Now, read the plist:
with open(str(scriptPath) + '/info.plist', 'r+b') as pl:
readPlist = plistlib.load(pl)
i = 1 # itlwm counts from 1 (but I really want it to count from 0).
found = False
# Iterates through the list of items to check if ssid exists. If so, change the password.
for item in readPlist["IOKitPersonalities"]["itlwm"]["WiFiConfig"].items():
if item[1]["ssid"] == args.ssid:
item[1]["password"] = args.password
found = True
break
if found == False:
print("ssid name not found in list. Iterating through list to prevent duplication of items.")
while True:
try:
s = "WiFi_" + str(i) # This is the default naming scheme. We're editing based on this.
print(readPlist["IOKitPersonalities"]["itlwm"]["WiFiConfig"][s])
i = i + 1
# To a certain point, it doesn't exists and an attempt to print it causes a KeyError. We catch that KeyError and add the info of the user-inputted Wifi name.
except KeyError as e: # This is the only thing that prevents the script to work.
# Parses the new info.
info = dict(
s = dict(
password = args.password,
ssid = args.ssid
)
)
plistlib.dump(readPlist["IOKitPersonalities"]["itlwm"]["WiFiConfig"], info)
# THIS STILL DOESN'T WORK PROPERLY!
break
# After edit, put it back to the place
os.system("cp -R "+ scriptPath + "/info.plist " + x)
# Remember to change the permission of the kext.
os.system("chown -R root:wheel " + kext)
# Delete the temp info.plist after copying.
os.system("rm -rf " + scriptPath + "/info.plist")
while True:
# Ask if user wants to load kext? y/n button input
print("Do you want to load kext? (y/n): ")
# If it's a yes then load, no then don't, any other key, invalid.
if keyboard.is_pressed('y'):
os.system("kextload -v " + kext)
break;
elif keyboard.is_pressed('n'):
break;
else:
print("Invalid input! Please try again.")
# And we are done with the script.
| [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.