blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 5
283
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
41
| license_type
stringclasses 2
values | repo_name
stringlengths 7
96
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 58
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 12.7k
662M
⌀ | star_events_count
int64 0
35.5k
| fork_events_count
int64 0
20.6k
| gha_license_id
stringclasses 11
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 43
values | src_encoding
stringclasses 9
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 7
5.88M
| extension
stringclasses 30
values | content
stringlengths 7
5.88M
| authors
sequencelengths 1
1
| author
stringlengths 0
73
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
44df4dcf17e329ecfa0dc1322e01f7781d412da7 | c3ad52c73b7d918932a1cf31b2ade9f48d0dc4a2 | /lino_book/projects/team/wsgi.py | c3796cbfbaca65ca5b039f940d0062ac89d7c749 | [
"BSD-2-Clause"
] | permissive | khchine5/book | db48d800aac323fbb50ebc26167f0be02e3477b5 | b6272d33d49d12335d25cf0a2660f7996680b1d1 | refs/heads/master | 2020-12-28T19:34:13.592286 | 2018-08-22T07:23:34 | 2018-08-22T07:23:34 | 58,850,532 | 1 | 0 | BSD-2-Clause | 2018-08-19T12:27:51 | 2016-05-15T08:03:56 | Python | UTF-8 | Python | false | false | 416 | py | """
WSGI config for superlists project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "lino_book.projects.team.settings.demo")
application = get_wsgi_application()
| [
"[email protected]"
] | |
297dbfe8602834f4797d5e5d7a7dd90597615a5a | 9b64f0f04707a3a18968fd8f8a3ace718cd597bc | /huaweicloud-sdk-rds/huaweicloudsdkrds/v3/model/set_binlog_clear_policy_response.py | 935af92de8fbb7f55e5faea1f885bcdd215c91cb | [
"Apache-2.0"
] | permissive | jaminGH/huaweicloud-sdk-python-v3 | eeecb3fb0f3396a475995df36d17095038615fba | 83ee0e4543c6b74eb0898079c3d8dd1c52c3e16b | refs/heads/master | 2023-06-18T11:49:13.958677 | 2021-07-16T07:57:47 | 2021-07-16T07:57:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,831 | py | # coding: utf-8
import re
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
class SetBinlogClearPolicyResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'resp': 'str'
}
attribute_map = {
'resp': 'resp'
}
def __init__(self, resp=None):
"""SetBinlogClearPolicyResponse - a model defined in huaweicloud sdk"""
super(SetBinlogClearPolicyResponse, self).__init__()
self._resp = None
self.discriminator = None
if resp is not None:
self.resp = resp
@property
def resp(self):
"""Gets the resp of this SetBinlogClearPolicyResponse.
操作结果。
:return: The resp of this SetBinlogClearPolicyResponse.
:rtype: str
"""
return self._resp
@resp.setter
def resp(self, resp):
"""Sets the resp of this SetBinlogClearPolicyResponse.
操作结果。
:param resp: The resp of this SetBinlogClearPolicyResponse.
:type: str
"""
self._resp = resp
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
import simplejson as json
return json.dumps(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, SetBinlogClearPolicyResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
043efa442dd452c0eff7f619de7ea394af93981a | 7cf8cc1f944946f0378da2e6af4ba1c89466dfb4 | /dbconnector.py | 16f6a0d672ffa809e3434f3739f363da0023aca5 | [] | no_license | ashilz/pythonnew | 8abd164f757efaefa2216d663db2082c241cf4f5 | 5b57e0f1211a67671999bd3a1cae064318ab1e2f | refs/heads/master | 2022-12-10T21:22:02.597080 | 2020-09-16T06:01:53 | 2020-09-16T06:01:53 | 292,829,812 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 288 | py | import mysql.connector
db=mysql.connector.connect(
host="localhost",
user="root",
password="Ashil333!",
auth_plugin="mysql_native_password"
)
cursor=db.cursor()
sql="SELECT VERSION()"
cursor.execute(sql)
data=cursor.fetchone()
print("Database version: ",data)
db.close() | [
"[email protected]"
] | |
fdd4510c68c081929de07559683941fe8168de87 | c87b3d41f4a7afb75733c53c810588d1cc87d828 | /review_sentiment/wsgi.py | ca2f7a9b334696da0fe5899fac13a17c861a6925 | [] | no_license | susvicky/seminar | c5601423f60c36ad2b44ab05f312fc52a745f598 | 580ce4bfc34a63458f6ad6f8abb5935f1418591b | refs/heads/master | 2023-02-24T06:59:05.472513 | 2021-02-01T16:41:28 | 2021-02-01T16:41:28 | 334,999,174 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 409 | py | """
WSGI config for review_sentiment project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'review_sentiment.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
4f768229d4c87d06441f81a93ae972add2e23983 | 6d713f120794849f32bf66544d133ef50889c40a | /setup.py | bd62fe2b613dfcb1120179c0f69450609f01bd2c | [] | no_license | stothe2/maru | 5889092c91210b297b61283c10b5ecfadbd16bb4 | 749e017fbf3fd757b89ab31f0bf81f0ea36cfac6 | refs/heads/master | 2020-04-26T21:06:11.007032 | 2015-03-29T00:52:16 | 2015-03-29T00:52:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,307 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
""" distribute- and pip-enabled setup.py """
import logging
import os
import re
from setuptools.command.install import install as Install
# ----- overrides -----
# set these to anything but None to override the automatic defaults
packages = None
package_name = None
package_data = None
scripts = None
# ---------------------
# ----- control flags -----
# fallback to setuptools if distribute isn't found
setup_tools_fallback = True
# don't include subdir named 'tests' in package_data
skip_tests = False
# print some extra debugging info
debug = True
# -------------------------
if debug: logging.basicConfig(level=logging.DEBUG)
# distribute import and testing
try:
import distribute_setup
distribute_setup.use_setuptools()
logging.debug("distribute_setup.py imported and used")
except ImportError:
# fallback to setuptools?
# distribute_setup.py was not in this directory
if not (setup_tools_fallback):
import setuptools
if not (hasattr(setuptools,'_distribute') and \
setuptools._distribute):
raise ImportError("distribute was not found and fallback to setuptools was not allowed")
else:
logging.debug("distribute_setup.py not found, defaulted to system distribute")
else:
logging.debug("distribute_setup.py not found, defaulting to system setuptools")
import setuptools
def find_scripts():
return [s for s in setuptools.findall('scripts/') if os.path.splitext(s)[1] != '.pyc']
def package_to_path(package):
"""
Convert a package (as found by setuptools.find_packages)
e.g. "foo.bar" to usable path
e.g. "foo/bar"
No idea if this works on windows
"""
return package.replace('.','/')
def find_subdirectories(package):
"""
Get the subdirectories within a package
This will include resources (non-submodules) and submodules
"""
try:
subdirectories = os.walk(package_to_path(package)).next()[1]
except StopIteration:
subdirectories = []
return subdirectories
def subdir_findall(dir, subdir):
"""
Find all files in a subdirectory and return paths relative to dir
This is similar to (and uses) setuptools.findall
However, the paths returned are in the form needed for package_data
"""
strip_n = len(dir.split('/'))
path = '/'.join((dir, subdir))
return ['/'.join(s.split('/')[strip_n:]) for s in setuptools.findall(path)]
def find_package_data(packages):
"""
For a list of packages, find the package_data
This function scans the subdirectories of a package and considers all
non-submodule subdirectories as resources, including them in
the package_data
Returns a dictionary suitable for setup(package_data=<result>)
"""
package_data = {}
for package in packages:
package_data[package] = []
for subdir in find_subdirectories(package):
if '.'.join((package, subdir)) in packages: # skip submodules
logging.debug("skipping submodule %s/%s" % (package, subdir))
continue
if skip_tests and (subdir == 'tests'): # skip tests
logging.debug("skipping tests %s/%s" % (package, subdir))
continue
package_data[package] += subdir_findall(package_to_path(package), subdir)
return package_data
# makes the _data.so before install
class MyInstall(Install):
def run(self):
cmd = 'cd %s/src && make all' % os.path.dirname(os.path.realpath(__file__))
assert os.system(cmd) == 0, 'Failed to make concat_core.so'
Install.run(self)
# ----------- Override defaults here ----------------
if packages is None: packages = setuptools.find_packages()
if len(packages) == 0: raise Exception("No valid packages found")
if package_name is None: package_name = packages[0]
if package_data is None: package_data = find_package_data(packages)
# kludge way of adding concat_core.so
package_data['maru.io'] += ['concat_core.*']
if scripts is None: scripts = find_scripts()
setuptools.setup(
name = package_name,
version = '0.0.1',
packages = packages,
scripts = scripts,
url = 'https://github.com/hahong/maru.git',
author = 'Ha Hong',
author_email = '[email protected]',
description = 'Multi-electrode Array Recording Utilities',
long_description = open('README.md').read(),
classifiers = [
'Development Status :: 3 - Alpha',
'Intended Audience :: Education',
'Intended Audience :: Science/Research',
'Intended Audience :: Developers',
'Environment :: Console',
'License :: OSI Approved :: BSD License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Operating System :: Unix',
'Programming Language :: Python',
'Programming Language :: C',
'Programming Language :: C++',
'Topic :: Scientific/Engineering',
'Topic :: Software Development',
],
platforms = ['Linux', 'OS-X'],
license = 'BSD',
keywords = 'Multi-electrode Array MWorks BlackRock Plexon',
package_data = package_data,
include_package_data = True,
install_requires = None,
cmdclass={'install': MyInstall},
)
| [
"[email protected]"
] | |
f2d192a279a465cd0da09663607672fb9a7a6d8b | a045055cb41f7d53e1b103c3655a17dc4cd18d40 | /python-master/kubernetes/test/test_v1_daemon_endpoint.py | 6b209e0c35ca46df33c6e5352667445de9ff6abb | [] | no_license | 18271693176/copy | 22f863b180e65c049e902de0327f1af491736e5a | ff2511441a2df03817627ba8abc6b0e213878023 | refs/heads/master | 2020-04-01T20:20:28.048995 | 2018-11-05T02:21:53 | 2018-11-05T02:21:53 | 153,599,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 962 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.10.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1_daemon_endpoint import V1DaemonEndpoint
class TestV1DaemonEndpoint(unittest.TestCase):
""" V1DaemonEndpoint unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1DaemonEndpoint(self):
"""
Test V1DaemonEndpoint
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1_daemon_endpoint.V1DaemonEndpoint()
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
77eb55506ed931c36654ce2d0a5a6c2482183496 | be44c827a826d9d73f1901b7e32ed25ffd63c325 | /challenge 1.py | c092f93e80537e51ae74673083b0d9b20a6e4d35 | [] | no_license | rajesh005/python | 51bf58279731d42032bbeb984be3cf03c77c9b1e | 7d13e5e72d7e20e889c5d4b345e48929c47a2991 | refs/heads/master | 2021-03-22T04:20:08.358091 | 2017-09-22T19:35:40 | 2017-09-22T19:35:40 | 104,265,824 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 231 | py | name = input("What is your name?")
age = int(input("what is your age {0}?".format(name)))
if age > 18 and age < 31:
print("huray, you can enjoy the holiday")
else:
print("I am sorry, you are not allowed for this holiday")
| [
"[email protected]"
] | |
91ed3d06bf69cff60c4eb62e7eb7ebdb9657bb70 | 6a7953d4bcd4f63786c774dec742092d0ecac971 | /voicemap/metrics.py | d80738c480ac850e4250caea7f6e1d1bb0a7d44e | [] | no_license | Vanova/voicemap | f636f139fd8aac3fb74df60e0862f49256ad5b9f | 8d9a4d777a0611eba393d8733fa80dbab1f2c6d4 | refs/heads/master | 2020-05-25T06:37:25.530694 | 2019-09-13T07:30:01 | 2019-09-13T07:30:01 | 187,670,745 | 0 | 0 | null | 2019-05-20T15:48:09 | 2019-05-20T15:48:09 | null | UTF-8 | Python | false | false | 2,500 | py | import numpy as np
import sklearn.metrics as skm
def eer(y_true, y_pred):
"""
EER for binary classifier
y_true: ndarray, [smps; 1]
y_score: ndarray, [smps; 1], real valued scores of classifier
# Output
EER value
"""
fpr, tpr, thresholds = skm.roc_curve(y_true, y_pred, drop_intermediate=True)
eps = 1E-6
points = [(0, 0)] + list(zip(fpr, tpr))
for i, point in enumerate(points):
if point[0] + eps >= 1 - point[1]:
break
p1 = points[i - 1]
p2 = points[i]
# Interpolate between p1 and p2
if abs(p2[0] - p1[0]) < eps:
res = p1[0]
else:
m = (p2[1] - p1[1]) / (p2[0] - p1[0])
x = p1[1] - m * p1[0]
res = (1 - x) / (1 + m)
return 100. * res
def class_wise_eer(y_true, y_pred):
"""
Calculate eer per each class, multi-class classifier
Y_true: ndarray, [smps; n_class]
Y_pred: ndarray, [smps; n_class]
# Output
list of eer values per class, n_class
"""
cw_eer = []
smp, n_clc = y_true.shape
for cl in range(n_clc):
er = eer(y_true=y_true[:, cl], y_pred=y_pred[:, cl])
cw_eer.append(er)
return cw_eer
def micro_f1(y_true, y_pred, accuracy=True):
"""
Calculate micro-F1 measure for multi-class classifier
y_true: ndarray, [smps; n_class]
y_pred: ndarray, [smps; n_class], thresholded (with step function) binary integers
# Output
Accuracy or Error of micro-F1
"""
assert (len(y_true) == len(y_pred))
neg_r = np.logical_not(y_true)
neg_p = np.logical_not(y_pred)
tp = np.sum(np.logical_and(y_true, y_pred) == True)
fp = np.sum(np.logical_and(neg_r, y_pred) == True)
fn = np.sum(np.logical_and(y_true, neg_p) == True)
f1 = 2.0 * tp / (2.0 * tp + fp + fn) * 100.
return f1 if accuracy else 100. - f1
def pooled_accuracy(y_true, y_pred):
"""
Accuracy for multi-class classifier,
all scores are pooled in single list
y_true: list, class ids
y_pred: list, class ids
# Output
Accuracy
"""
N = float(len(y_true))
return sum(int(x == y) for (x, y) in zip(y_true, y_pred)) / N * 100.
def step(a, threshold=0.5):
"""
Heaviside step function:
a < threshold = 0, else 1.
a: ndarray, [smps; n_class]
# Output
binary ndarray [smps; n_class]
"""
res = np.zeros_like(a)
res[a < threshold] = 0
res[a >= threshold] = 1
return res
| [
"[email protected]"
] | |
fa09bfa08d15dd8cb19373e64b6add0653565c83 | 6a893c0e3cd9e73be6aa4401d14017ebcb802953 | /venv/bin/pip3 | 49f23255fd988bb13d3aff95dda0c76b661eeda1 | [] | no_license | Gobinde43/Ex1 | 2929034c38442481af8197581591e4f694db6e1f | fc164761aef6d3457fa80b2382f45831f14a2a07 | refs/heads/master | 2020-03-28T11:10:59.311248 | 2018-09-10T16:29:49 | 2018-09-10T16:29:49 | 148,185,890 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | #!/Users/belen/PycharmProjects/Ex1/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip3'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip3')()
)
| [
"[email protected]"
] | ||
625783ed57b057e3e185b620b215e217d6f0ca6c | eabf86b6e381ab13d08c60003437946857fefcad | /PyShop/products/migrations/0002_offer.py | 9355bbe40081d5110238eb0b8ace19147fa6ce5f | [
"MIT"
] | permissive | ejrach/exercises-mosh | 4a1d471e513a1c0f7ff78b25c0bff5b0ba699f23 | baaa02dff58652a9910d654be9bdd3e76dece9b7 | refs/heads/master | 2023-03-20T12:10:13.836764 | 2019-10-01T14:39:35 | 2019-10-01T14:39:35 | 198,443,046 | 0 | 0 | MIT | 2023-03-03T07:12:07 | 2019-07-23T14:04:41 | JavaScript | UTF-8 | Python | false | false | 618 | py | # Generated by Django 2.1 on 2019-07-29 15:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('products', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Offer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('code', models.CharField(max_length=10)),
('description', models.CharField(max_length=255)),
('discount', models.FloatField()),
],
),
]
| [
"[email protected]"
] | |
cc6c5f74b00f256727c6ae1e80e9db76bbab3144 | 0d905aab1b7c1eb2a8b0c04b94d3047adced56ab | /products/migrations/0001_initial.py | 3a1373268cc5132bad7d00b1b9dcd6c7ebd8eabe | [] | no_license | samishken/python-django-onlinestore | 3ff83f418029181c54916b6834e4ba515dfd2d13 | 61f1a2c461a08e5262c98ad4a794eaf2676704f5 | refs/heads/master | 2020-05-04T02:58:32.465186 | 2019-10-22T23:33:55 | 2019-10-22T23:33:55 | 178,938,086 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 649 | py | # Generated by Django 2.1 on 2019-04-01 18:16
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255)),
('price', models.FloatField()),
('stock', models.IntegerField()),
('image_url', models.CharField(max_length=2083)),
],
),
]
| [
"[email protected]"
] | |
2c5579eaa66d8bcd568be4c8d7fb7c389c6c475e | 2f71bf4a00e33404d4ff2c4121412b2ca08f518c | /video_classes.py | 453e8761cfa97872909d243e7654b30e7945fcbb | [] | no_license | briskkk/video_info_obtain | 6c9e7103a621f9c8143ecf1cf853a4f1b999e988 | 1b85e8c199f425bac12c4a6c2edb5ab30125a5fb | refs/heads/master | 2020-04-09T10:57:12.547352 | 2018-11-29T14:13:28 | 2018-11-29T14:13:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 827 | py | # -*- coding:UTF-8 -*-
class Video():
def __init__(self,video_aid=None,video_title=None):
if video_aid:
self.aid = video_aid
if video_title:
if isinstance(video_title,unicode):
video_title = video_title.encode('utf8')
self.title = video_title
aid = None
title = None
view = None
shoucang = None
danmu = None
date = None
cover = None
reply = None
description = None
share = None
like = None
dislike = None
tag = None
author_mid = None
author_name = None
page = None
credit = None
coin = None
spid = None
cid = None
Iscopy = None
subtitle = None
duration = None
episode = None
arcurl = None#网页地址
tid = None
typename = None
pubtime = None | [
"[email protected]"
] | |
79936fa84285e98af4596782a8a456f500bb3e74 | 95dba78b83dfb1e9bdfcba4964ea48fe725bff33 | /cadenas/cadenas.py | 54e4383c8e79c092b8afd6138d52439065796961 | [] | no_license | engelcituk/python-bases | 36d5fa5cdb73cf87f1ace429beb130519004d841 | 2303fc3482f91889f7d0a95f607a5cace18e3f75 | refs/heads/main | 2022-12-20T16:41:21.548026 | 2020-10-04T22:41:39 | 2020-10-04T22:41:39 | 300,962,375 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 622 | py | texto = "Este es una cadena" # los strings al igual que las tuplas, son inmutables
resultado = len(texto); #para saber la longitud de la cadena
print(resultado)
primeraLetra = texto[0] #similar a hacerlo con las lista o tuplas
ultimaLetra = texto[-1] #trae el ultimo elemento del string
penultimaLetra = texto[-2] #trae el penultimo elemento del string
#si colocamos un indice que no existe se obtendrá un error
print(primeraLetra)
print(ultimaLetra)
print(penultimaLetra)
#podemos generar substrings
subStringConSaltos = texto[1:7:2] #obtener los elementos a partir del indice:hasta:saltos
print(subStringConSaltos)
| [
"[email protected]"
] | |
1c8d914ba6adc344ea26ae605e8c92c396c17221 | dcc38f2d59a609523c2624f4c361e522d6b3360f | /UpWork_Projects/residential-reits/clipperrealty/gBucket.py | 9c855cb1f7bcb8870bd0ce95a7fac9a6d476022d | [
"MIT"
] | permissive | SurendraTamang/Web-Scrapping-1 | 096572319b0a5d6411c6a0b8812baaad8c9a772e | e82dab1685d6d5429950d08f71f58ee6f97653cd | refs/heads/master | 2023-04-04T05:30:38.270111 | 2021-04-13T18:15:40 | 2021-04-13T18:15:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,547 | py | import os
import pandas as pd
from google.cloud import storage
import gzip
import csv
from datetime import datetime
import smtplib
from email.message import EmailMessage
def send_email(body):
EMAIL_USER = os.environ.get('EMAIL_USER')
EMAIL_PASS = os.environ.get('EMAIL_PASS')
msg = EmailMessage()
msg['Subject'] = 'GOOGLE BUCKET SCRIPT ERROR ALERT: clipper realty'
msg['From'] = EMAIL_USER
msg['To'] = '[email protected], [email protected]'
msg.set_content(body)
with smtplib.SMTP_SSL('smtp.gmail.com', 465) as smtp:
smtp.login(EMAIL_USER, EMAIL_PASS)
smtp.send_message(msg)
def get_both_rows(today, yesterday, which=None):
"""Find rows which are different between two DataFrames."""
comparison_df = today.merge(yesterday, indicator=True, how='outer')
if which is None:
diff_df = comparison_df[comparison_df['_merge'] == 'both']
else:
diff_df = comparison_df[comparison_df['_merge'] == which]
return diff_df
def get_left_rows(today, yesterday, which=None):
"""Find rows which are different between two DataFrames."""
comparison_df = today.merge(yesterday, indicator=True, how='outer')
if which is None:
diff_df = comparison_df[comparison_df['_merge'] == 'left_only']
else:
diff_df = comparison_df[comparison_df['_merge'] == which]
return diff_df
def generate_file_name():
now = datetime.now()
dt_string = now.strftime("%d-%m-%Y_%H-%M")
return f"data_{dt_string}.csv.gz"
def upload_to_bucket(fname):
storage_client = storage.Client.from_service_account_json("../rr_gcp_credentials.json")
bucket = storage_client.get_bucket("clipper_realty")
blob = bucket.blob(fname)
blob.upload_from_filename(fname)
def main():
# Reading yesterday's & today's file to compare the data
try:
td = pd.read_csv('./today.csv')
yd = pd.read_csv('./yesterday.csv')
except:
send_email(f'''Hi,\ngBucket.py encountered an error at {datetime.now().strftime("%d/%m/%Y %H:%M:%S")}.\nThe error is: "The clipper realty CSV file is empty. Script terminated without pushing data into Google Bucket".\nPlease see more information here: /home/p.byom26/residentialReits/rrScrapers/clipperrealty/gBucket.py\nContact [email protected] for help.\n\nSent From\nGCP Ubuntu VM''')
os._exit(1)
# Generating the gzip csv file name
fname = generate_file_name()
# Formatiing the dataframe & writing into a gunzipped csv file
df1 = get_both_rows(td, yd)
df2 = get_left_rows(td, yd)
df2.loc[df2['_merge'] == 'left_only', 'Timestamp'] = datetime.now().strftime("%d/%m/%Y %H:%M:%S")
final_df = df1.append(df2, ignore_index=True)
final_df.drop('_merge', inplace=True, axis=1)
final_df.to_csv(fname, index=False, compression='gzip')
# Uploading the file to Google Cloud Bucket
try:
upload_to_bucket(fname)
except:
send_email(f'''Hi,\ngBucket.py encountered an error at {datetime.now().strftime("%d/%m/%Y %H:%M:%S")}.\nThe error is: "upload_to_bucket function failed. Please check the cedentials file / bucket name".\nPlease see more information here: /home/p.byom26/residentialReits/rrScrapers/clipperrealty/gBucket.py\nContact [email protected] for help.\n\nSent From\nGCP Ubuntu VM''')
os._exit(1)
# Deleting the unwanted files & generating yesterday.csv
os.remove('./today.csv')
os.remove('./yesterday.csv')
os.remove(fname)
final_df.to_csv('./yesterday.csv', index=False)
main()
| [
"[email protected]"
] | |
fbf588d645769abcd13434ffe49d6404d58725c6 | ac4b9385b7ad2063ea51237fbd8d1b74baffd016 | /.history/utils/ocr/handle_image_20210209135127.py | 1135c639d20f290cadf012273d5562fc4ccd1ea3 | [] | no_license | preethanpa/ssoemprep | 76297ef21b1d4893f1ac2f307f60ec72fc3e7c6f | ce37127845253c768d01aeae85e5d0d1ade64516 | refs/heads/main | 2023-03-09T00:15:55.130818 | 2021-02-20T06:54:58 | 2021-02-20T06:54:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,735 | py | import os
import cv2
import re
import numpy as np
from PIL import Image
import pytesseract
from pytesseract import Output
from fpdf import FPDF
'''
IMAGE HANDLING METHODS
'''
# get grayscale image
def get_grayscale(image):
return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# blur removal
def remove_blur(image):
return cv2.medianBlur(image,5)
# noise removal
def remove_noise(image):
return cv2.fastNlMeansDenoisingColored(image, None, 10, 10, 7, 15)
#thresholding
def thresholding(image):
return cv2.threshold(image, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)[1]
#dilation
def dilate(image):
kernel = np.ones((5,5),np.uint8)
return cv2.dilate(image, kernel, iterations = 1)
#erosion
def erode(image):
kernel = np.ones((5,5),np.uint8)
return cv2.erode(image, kernel, iterations = 1)
def extract_pdf_from_image(fileName='', pdf_path='', action='', psm=3):
'''
Extract text from image and save as PDF.
fileName=''
pdf_path='',
action='',
psm=3
'''
#custom_config = r'-c tessedit_char_whitelist=123456789MALEPQRETHANabcdefghijklmnopqrstuvwxyz --psm 6'
#custom_config = r'-l eng --psm 11'
custom_config = r'-l eng --psm ' + str(psm)
pdfdir = pdf_path
if not os.path.exists(pdfdir):
os.makedirs(pdfdir)
pdfFileName = os.path.basename(fileName).split('.')[0] + '.pdf'
pdfFilePath = pdfdir + '/' + pdfFileName
print(f'PDF File Path {pdfFilePath}')
#d = pytesseract.image_to_data(img, output_type=Output.DICT)
img = cv2.imread(fileName)
if (action == 1):
img1 = remove_noise(img)
if (action == 2):
img1 = get_grayscale(img)
#img1 = erode(img)
if (action == 3):
img1 = remove_blur(img)
#text = pytesseract.image_to_string(img1, config=custom_config,lang='eng')
text = pytesseract.image_to_pdf_or_hocr(img1, extension='pdf')
with open(pdfFilePath, mode = 'w+b') as f:
f.write(text)
return pdfFilePath
def convert_text_to_pdf(text='', pdf_path='', filename=''):
'''
Convert text file to PDF
'''
tempdir = "/tmp"
pdfdir = pdf_path
textFileName = tempdir + '/' + filename + ".txt"
pdfFileName = pdfdir + '/' + filename + ".pdf"
if not os.path.exists(tempdir):
os.makedirs(tempdir)
if not os.path.exists(pdfdir):
os.makedirs(pdfdir)
# save FPDF() class into a
# variable pdf
pdf = FPDF()
# Add a page
pdf.add_page()
# set style and size of font
# that you want in the pdf
pdf.set_font("Arial", size = 15)
with open(textFileName, mode = 'w+b') as f:
f.write(text)
line = 1
f = open(textFileName, "r")
for x in f:
x1 = re.sub(u"(\u2014|\u2018|\u2019|\u201c|\u201d)", "", x)
pdf.cell(100, 10, txt=x1, ln=line, align='L')
line=line+1
#save the pdf with name .pdf
pdf.output(pdfFileName,'F') | [
"{[email protected]}"
] | |
3c79eb5b77740128d1f1643ffc6b9fb5be3338ca | 27eb66e8d768f16a176766001ccaac79ded8a901 | /src/tools/tree_to_gas.py | 136e6e2306be4d0115985d6b77a9c793d4523b46 | [] | no_license | LukeEcomod/TreeBoxModel | d683df5caf7ac7f4186fd5a588bc7bbb6ecf2975 | 8cdbe86de9b08bc71841f831dfd8cd14eec8ed55 | refs/heads/main | 2023-08-17T23:31:10.692566 | 2023-05-23T07:07:07 | 2023-05-23T07:07:07 | 244,628,430 | 0 | 0 | null | 2023-08-14T22:25:59 | 2020-03-03T12:19:08 | Python | UTF-8 | Python | false | false | 3,763 | py | from typing import Dict, Tuple
import numpy as np
def convert_tree_to_gas_properties(model, gas_dims: Tuple, c_gas_soil=0.0) -> Dict:
'''Convert tree properties in model.py to gas properties to be used in gas.py'''
r, r_mask = convert_tree_radii_to_gas(model, gas_dims)
h, h_mask = convert_tree_height_to_gas(model, gas_dims)
v = convert_tree_flux_to_velocity(model)
n = gas_dims[0]*gas_dims[1]
params = {}
params['radius'] = np.repeat(r, repeats=n).reshape(gas_dims)
params['height'] = np.repeat(h, repeats=n).reshape(gas_dims)
params['velocity'] = np.zeros(gas_dims, dtype=np.float64)
params['root_uptake'] = convert_root_fluxes_to_source_term(model, c_gas_soil)
# set the velocity to correct values
for row, _ in enumerate(params['velocity']):
params['velocity'][row, :] = np.array([0.0 if i == 0
else v[0][h_mask[row], 0] if i == 1
else v[1][h_mask[row], 0] for i in r_mask])
return params
def convert_tree_flux_to_velocity(model):
'''Convert axial upward flux to sap flow velocity to be used in gas.py'''
RHO_WATER = 1000
_, flux, _ = model.axial_fluxes()
flux = -1.0*flux/RHO_WATER # flux in m3/s
velocity_xylem = flux[:, 0].reshape(model.tree.num_elements, 1) / model.tree.element_area([], 0)
velocity_phloem = flux[:, 1].reshape(model.tree.num_elements, 1) / model.tree.element_area([], 1)
return (velocity_xylem, velocity_phloem)
def convert_root_fluxes_to_source_term(model, c_gas_soil: float):
''' Convert root fluxes to be used as a source term in sources_and_sinks_func in gas.py'''
RHO_WATER = 1000
return model.root_fluxes()/RHO_WATER*c_gas_soil
def convert_tree_radii_to_gas(model, gas_dims: Tuple) -> Tuple:
"""Converts the tree heartwood, xylem and phloem radii to equally spaced gas element radii
Args:
tree (Tree): Instance of the tree class
gas_dims (Tuple): Number of axial and radial elements in the gas in this order.
Returns:
Tuple where the first element is the radii for every element and the second list
contains mask whether the element is heartwood (0), sapwood (1) or phloem(2)
"""
r = (model.tree.element_radius[0, :])
percentage_r = np.cumsum(r/np.sum(r))
r = np.sum(r)/gas_dims[1]
mask = [0 if i/gas_dims[1] < percentage_r[0]
else 1 if (i/gas_dims[1] < percentage_r[1] and i/gas_dims[1] >= percentage_r[0])
else 2 for i in range(gas_dims[1])]
return (r, mask)
def convert_tree_height_to_gas(model, gas_dims: Tuple) -> Tuple:
""" Converts tree element heights to equally spaced heights such that
.. math::
n*h_{new} = \\sum_{i=1}^m h_i
where
* :math:`n`: gas_dims[0] (number of axial elements in gas).
* :math:`m`: number of axial elements in the tree.
* :math:`h_{new}`: new element height.
* :math:`h_i`: element heights in the tree.
Args:
tree (Tree): Instance of the tree class
gas_dims (Tuple): Number of axial and radial elements in the gas in this order.
"""
h = np.sum(model.tree.element_height)
h_percentage = np.cumsum(model.tree.element_height/h)
mask = np.zeros((gas_dims[0], 1), dtype='int')
gas_h_percentage = np.array([i/gas_dims[0] for i in range(gas_dims[0])])
gas_h_percentage[-1] = 1.0
for (ind, _) in enumerate(h_percentage):
if ind == 0:
mask[np.where(gas_h_percentage <= h_percentage[0])] = ind
else:
mask[np.where((gas_h_percentage <= h_percentage[ind]) & (gas_h_percentage > h_percentage[ind-1]))] = ind
h = h/gas_dims[0]
return (h, mask)
| [
"[email protected]"
] | |
bc6fcd4d1fdd870fd6a3d1cfae12828f6c8c1f05 | c9e1c95d79a61c2dc909e79a4e45bd779cf119ec | /hw02/main.py | b4304bd0d64d16e26a93dd191241136825af379b | [] | no_license | wq-LearnHub/IS | 1a136210b5d57a78aca7dea69ac7aba1b588c368 | 894ff7622dd425e8e1816c0e273a19e488340272 | refs/heads/master | 2022-01-27T22:14:38.326789 | 2018-12-01T11:23:57 | 2018-12-01T11:23:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 907 | py | import turtle
def move(x, y):
turtle.up()
turtle.goto(x, y)
turtle.down()
def ls(x,y,z):
move(x, y)
turtle.setheading(z)
turtle.begin_fill()
for i in range(5):
turtle.forward(46.264)
turtle.right(144)
turtle.end_fill()
turtle.setup(width=0.9,height=0.9)
turtle.color('red','red')
turtle.speed(5)
move(-330,220)
turtle.begin_fill()
turtle.forward(660)
turtle.right(90)
turtle.forward(440)
turtle.right(90)
turtle.forward(660)
turtle.right(90)
turtle.forward(440)
turtle.right(90)
turtle.end_fill()
turtle.color('yellow','yellow')
move(-220,176)
turtle.right(72)
turtle.begin_fill()
for i in range(5):
turtle.forward(138.793)
turtle.right(144)
turtle.end_fill()
turtle.setheading(0)
ls(-128.865,164.681,53)
ls(-87.779,128.888,30)
ls(-87.154,72.044,5)
ls(-127.185,35.74,336)
turtle.hideturtle()
turtle.done()
| [
"[email protected]"
] | |
4b8e155e4c2aaf46296152fc46d589b1cf6822ba | 40f4417f983fdd9176d003577dfd06934801e26d | /bananas/management/commands/syncpermissions.py | fd5b1d4b8c8ea2bacf52e37ed3ccce7e0167b3da | [
"MIT"
] | permissive | lydell/django-bananas | 8709adafabbbc8511a4738cb82a55e14617b35e3 | 42a72a7c5af15bb3ae4fe014bcbc443644044c07 | refs/heads/master | 2023-06-04T18:33:56.747112 | 2017-09-28T14:26:52 | 2017-09-28T14:26:52 | 105,158,794 | 0 | 0 | null | 2017-09-28T14:29:47 | 2017-09-28T14:29:47 | null | UTF-8 | Python | false | false | 1,482 | py | from django.contrib.auth.models import Permission
from django.core.management.base import BaseCommand, CommandError
class Command(BaseCommand):
help = "Create admin permissions"
def handle(self, *args, **options):
if args:
raise CommandError("Command doesn't accept any arguments")
return self.handle_noargs(**options)
def handle_noargs(self, *args, **options):
from bananas import admin
from django.contrib import admin as django_admin
from django.contrib.contenttypes.models import ContentType
django_admin.autodiscover()
for model, _ in admin.site._registry.items():
if issubclass(getattr(model, 'View', object), admin.AdminView):
meta = model._meta
ct, created = ContentType.objects.get_or_create(
app_label=meta.app_label,
model=meta.object_name.lower(),
)
if created:
print('Found new admin view: {} [{}]'.format(
ct.name, ct.app_label
))
import pdb; pdb.set_trace()
for codename, name in model._meta.permissions:
p, created = Permission.objects.get_or_create(
codename=codename, name=name, content_type=ct
)
if created:
print('Created permission: {}'.format(name))
| [
"[email protected]"
] | |
1e2ac21c895dbf7785efeca235de30dbd913ec1e | e45efe468372014c9b9e6c62a6bdd70611dc47a9 | /database/mapmaker/migrations/0005_auto_20150626_2120.py | ec6cda4235a77905d959b97f4175a3f8e8678a12 | [] | no_license | nepakala/hostgator-django | 90e626f8f3a2f339a284c862d5190bc059c5ab72 | 98d8d8f74e1a2732ab5dc8f75d58eb076757d591 | refs/heads/master | 2021-01-12T07:48:01.262560 | 2015-10-21T15:43:26 | 2015-10-21T15:43:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('mapmaker', '0004_city_company'),
]
operations = [
migrations.AlterField(
model_name='city',
name='company',
field=models.TextField(default=b'', null=True),
),
]
| [
"[email protected]"
] | |
1bbe3d98d272b21dcdc471326b887f7e3836e070 | ddbaed1e0707b6bb4f388e11d4038c340c2ede16 | /EstruturaDeRepeticao/Ex_11.py | 31469419965fdcdcc618796024e76eb61073dcca | [] | no_license | JoaoZati/ListaDeExerciciosPythonPro | bb3ca47c8488029f1be5984f3256c15ec1123e9a | 0fda738590813a6526c688213898504e3e58a165 | refs/heads/main | 2023-07-13T13:36:17.334002 | 2021-08-21T20:26:21 | 2021-08-21T20:26:21 | 395,388,834 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 519 | py | #%% 11-Altere para mostrar soma
"""
Altere o programa anterior para mostrar no final a soma dos números.
"""
while True:
try:
num_1 = int(input('Digite um numero inteiro: '))
num_2 = int(input('Digite um numero inteiro: '))
except ValueError:
print('Os Numeros tem que ser inteiros')
else:
break
minimo = min(num_1, num_2)
maximo = max(num_1, num_2)
soma = 0
for i in range(minimo + 1, maximo):
soma += i
print(f'Os numeros entre {num_1} e {num_2} é: {soma}')
| [
"[email protected]"
] | |
12e987c5a4b573b3ed6fa91f2c42fc5933d70d60 | db6f57d0da04eb0baa840c6b0486545eafa784b8 | /py_backwards/transformers/__init__.py | d0cb70eb51b6ac028f624a98b5f89f8bfe90ec7f | [
"MIT"
] | permissive | JacksonKearl/py-backwards | 49ddaaf3891be5ff7bd978d3070bd253d6b49aad | 6ad0b864eebecbd23c319c2c569159931e45b811 | refs/heads/master | 2021-01-20T04:36:29.362742 | 2017-04-28T12:37:02 | 2017-04-28T12:37:02 | 89,704,552 | 0 | 0 | null | 2017-04-28T12:35:27 | 2017-04-28T12:35:27 | null | UTF-8 | Python | false | false | 1,874 | py | from traceback import format_exc
from typing import List, Type
from typed_ast import ast3 as ast
from typed_astunparse import unparse, dump
from autopep8 import fix_code
from ..types import CompilationTarget
from .dict_unpacking import DictUnpackingTransformer
from .formatted_values import FormattedValuesTransformer
from .functions_annotations import FunctionsAnnotationsTransformer
from .starred_unpacking import StarredUnpackingTransformer
from .variables_annotations import VariablesAnnotationsTransformer
from .yield_from import YieldFromTransformer
from .return_from_generator import ReturnFromGeneratorTransformer
from .python2_future import Python2FutureTransformer
from .super_without_arguments import SuperWithoutArgumentsTransformer
from .class_without_bases import ClassWithoutBasesTransformer
from .base import BaseTransformer
transformers = [DictUnpackingTransformer,
StarredUnpackingTransformer,
FormattedValuesTransformer,
FunctionsAnnotationsTransformer,
VariablesAnnotationsTransformer,
YieldFromTransformer,
ReturnFromGeneratorTransformer,
Python2FutureTransformer,
SuperWithoutArgumentsTransformer,
ClassWithoutBasesTransformer] # type: List[Type[BaseTransformer]]
def transform(path: str, code: str, target: CompilationTarget) -> str:
"""Applies all transformation for passed target."""
from ..exceptions import TransformationError
for transformer in transformers:
tree = ast.parse(code, path)
if transformer.target >= target:
transformer().visit(tree)
try:
code = unparse(tree)
except:
raise TransformationError(path, transformer,
dump(tree), format_exc())
return fix_code(code)
| [
"[email protected]"
] | |
f0ab5a59b3a151e150796fcdf89cf12dc3b7b9fa | 145bf6912b4ec6602fe5ca2486db5dc6121cb9e4 | /console.py | 10001a71c5869a1631302219814394ff82706049 | [
"Apache-2.0"
] | permissive | CW2X/idewave-core | 03a8da474c9fb60fb0dcf1900f12f9d5b9a4b157 | a48ee0ce3ff8f05e6baf9c34c01f21ae51f555fe | refs/heads/master | 2020-09-12T01:47:05.996471 | 2019-09-03T06:15:42 | 2019-09-03T06:15:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,379 | py | import argparse
from DB.CreateDB import create_db, create_tables
from DB.DropDB import drop_db
from DB.Fixtures.Loader.load_world_data import load_world_data
from Account.AccountManager import AccountManager
from World.Object.Item.ItemManager import ItemManager
from World.Object.Unit.Spell.SpellManager import SpellManager
from World.Object.Unit.Player.Skill.SkillManager import SkillManager
from World.Region.RegionManager import RegionManager
from World.Object.Unit.UnitManager import UnitManager
from Utils.Debug.Logger import Logger
def process():
parser = argparse.ArgumentParser(prog='cmd')
commands = parser.add_subparsers(help='Available console commands')
# database
db_parser = commands.add_parser('db')
args = db_parser.parse_known_args()
parser_name = args[1][0]
subcommand = args[1].pop()
if parser_name == 'db':
if subcommand == 'create':
create_db()
Logger.success('All dbs was created')
elif subcommand == 'create_tables':
create_tables()
Logger.success('All required tables was created')
elif subcommand == 'drop':
drop_db()
Logger.warning('All db was dropped')
elif subcommand == 'recreate':
drop_db()
create_db()
create_tables()
Logger.notify('DB was successfully re-created')
elif subcommand == 'load_data':
load_world_data()
elif subcommand == 'recreate_with_load':
drop_db()
create_db()
create_tables()
Logger.notify('DB was successfully re-created')
load_world_data()
# accounts
account_parser = commands.add_parser('account')
account_parser.add_argument('-n', '--name')
account_parser.add_argument('-p', '--password')
args = account_parser.parse_known_args()
parser_name = args[1][0]
subcommand = args[1].pop()
if parser_name == 'account':
if subcommand == 'create':
with AccountManager() as account_mgr:
account_mgr.create(name=args[0].name, password=args[0].password)
Logger.success('Account "{}" created successfully!'.format(args[0].name))
# items
item_parser = commands.add_parser('item')
item_parser.add_argument('-d', '--display_id')
item_parser.add_argument('-i', '--item_type')
item_parser.add_argument('-e', '--entry')
args = item_parser.parse_known_args()
parser_name = args[1][0]
subcommand = args[1].pop()
if parser_name == 'item':
if subcommand == 'create':
with ItemManager() as item_mgr:
item_mgr.create(
display_id=args[0].display_id,
item_type=args[0].item_type,
entry=int(args[0].entry)
).save()
Logger.success('Item "{}" created successfully!'.format(args[0].entry))
# spells
spell_parser = commands.add_parser('spell')
spell_parser.add_argument('-e', '--entry')
spell_parser.add_argument('-n', '--name')
spell_parser.add_argument('-c', '--cost')
spell_parser.add_argument('-s', '--school')
spell_parser.add_argument('-r', '--range')
args = spell_parser.parse_known_args()
parser_name = args[1][0]
subcommand = args[1].pop()
if parser_name == 'spell':
if subcommand == 'create':
SpellManager().create(
entry=args[0].entry,
name=args[0].name,
cost=args[0].cost,
school=args[0].school,
range=args[0].range
).save()
Logger.test('Spell "{}" ({}) created successfully!'.format(args[0].name, args[0].entry))
# default spells
default_spell_parser = commands.add_parser('default_spell')
default_spell_parser.add_argument('-e', '--entry')
default_spell_parser.add_argument('-r', '--race')
default_spell_parser.add_argument('-c', '--char_class')
args = default_spell_parser.parse_known_args()
parser_name = args[1][0]
subcommand = args[1].pop()
if parser_name == 'default_spell':
if subcommand == 'create':
SpellManager().create_default_spell(
entry=args[0].entry,
race=args[0].race,
char_class=args[0].char_class
).save()
Logger.test(
'Default spell "{}" ({}:{}) created successfully!'.format(
args[0].entry,
args[0].race,
args[0].char_class
)
)
# skills
# skill_parser = commands.add_parser('skill')
# skill_parser.add_argument('-e', '--entry')
# skill_parser.add_argument('-n', '--name')
#
# args = skill_parser.parse_known_args()
# parser_name = args[1][0]
# subcommand = args[1].pop()
#
# if parser_name == 'skill':
# if subcommand == 'create':
# SkillManager().create(
# entry=args[0].entry,
# name=args[0].name
# ).save()
#
# Logger.test('Skill "{}" ({}) created successfully!'.format(args[0].name, args[0].entry))
# skills
skill_parser = commands.add_parser('skill')
skill_parser.add_argument('-e', '--entry')
skill_parser.add_argument('-n', '--name')
skill_parser.add_argument('--min')
skill_parser.add_argument('--max')
args = skill_parser.parse_known_args()
parser_name = args[1][0]
subcommand = args[1].pop()
if parser_name == 'skill':
if subcommand == 'create':
SkillManager().create(
entry=args[0].entry,
name=args[0].name,
min=args[0].min,
max=args[0].max
).save()
Logger.success('Skill "{}" ({}) created successfully!'.format(args[0].name, args[0].entry))
# default skills
default_skill_parser = commands.add_parser('default_skill')
default_skill_parser.add_argument('-e', '--entry')
default_skill_parser.add_argument('-r', '--race')
default_skill_parser.add_argument('-c', '--char_class')
args = default_skill_parser.parse_known_args()
parser_name = args[1][0]
subcommand = args[1].pop()
if parser_name == 'default_skill':
if subcommand == 'create':
SkillManager().create_default_skill(
entry=args[0].entry,
race=args[0].race,
char_class=args[0].char_class
).save()
Logger.success(
'Default skill "{}" ({}:{}) created successfully!'.format(
args[0].entry,
args[0].race,
args[0].char_class
)
)
# regions
region_parser = commands.add_parser('region')
region_parser.add_argument('-i', '--identifier')
region_parser.add_argument('--y1')
region_parser.add_argument('--y2')
region_parser.add_argument('--x1')
region_parser.add_argument('--x2')
region_parser.add_argument('-c', '--continent_id')
# # arguments for default region
region_parser.add_argument('-r', '--race')
region_parser.add_argument('-m', '--map_id')
# # arguments for region unit # #
region_parser.add_argument('-u', '--unit_entry')
# # arguments for both default region and region unit
region_parser.add_argument('-x')
region_parser.add_argument('-y')
region_parser.add_argument('-z')
args = region_parser.parse_known_args()
parser_name = args[1][0]
subcommand = args[1].pop()
if parser_name == 'region':
if subcommand == 'create':
with RegionManager() as region_mgr:
region_mgr.create(
identifier=args[0].identifier,
y1=args[0].y1,
y2=args[0].y2,
x1=args[0].x1,
x2=args[0].x2,
continent_id=args[0].continent_id,
).save()
Logger.notify('Region "{}" created successfully!'.format(args[0].identifier))
elif subcommand == 'add_default_location':
with RegionManager() as region_mgr:
region_mgr.create_default_location(
identifier=args[0].identifier,
x=args[0].x,
y=args[0].y,
z=args[0].z,
race=args[0].race,
map_id=args[0].map_id
)
Logger.success('Default location ({}) for race "{}" successfully added'.format(
args[0].identifier, args[0].race
))
elif subcommand == 'add_unit':
with UnitManager() as unit_mgr:
unit_mgr.new(
entry=args[0].unit_entry,
identifier=args[0].identifier,
x=args[0].x,
y=args[0].y,
z=args[0].z
).set_stats().save()
Logger.notify(
'Unit "{}" IN ({} - {} - {}) created successfully!'.format(
args[0].unit_entry,
args[0].x,
args[0].y,
args[0].z
)
)
process()
| [
"[email protected]"
] | |
8cd47b841ad575b55bb9be73bdc8df9e6dbcf60e | d5500d243e0067413390ae7402be55a10f98bbbf | /tests/unit/test_general.py | a6abbdf24807cb6ed52e07acce0c91ca7bb66dca | [
"MIT"
] | permissive | lumi-io/boards | cbc5a844033ad67f39e45c681fddba091a7220d9 | d56aafc70e0eede8e3027159bfb755023c4ecbb3 | refs/heads/main | 2023-04-25T20:33:43.668297 | 2021-05-28T19:05:22 | 2021-05-28T19:05:22 | 306,117,913 | 5 | 0 | MIT | 2021-06-08T15:31:51 | 2020-10-21T18:47:29 | Python | UTF-8 | Python | false | false | 166 | py | def test_homepage_with_fixture(test_client):
"""Testing to see if server is running."""
response = test_client.get('/')
assert response.status_code == 200 | [
"[email protected]"
] | |
5e95d64ec34ad4194d5831f1bcdd85b880724440 | 9271b14425ceaaf0741b758229cb7a8ab9ad8a85 | /reset.py | c5a59c3612d42a69d043d0bdfe13ebd991130c97 | [] | no_license | jeg100/Sebastian | 78a6273d8fea3372fb0a6ae0489c35674dbd9636 | 6f9f4ac2eac8bfa153f8846d6829cd341f570334 | refs/heads/master | 2021-03-16T13:35:51.767993 | 2021-01-22T18:18:50 | 2021-01-22T18:18:50 | 246,912,030 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 472 | py | ### Author: John Grezmak
###
### Reset the Sebastian robot using reset function with mode=2
import time
from med_Sebastian_info import *
from Sebastian_library_Maestro import Sebastian
# Create instance of Sebastian class
reset_config = reset5_pos_info # Choice of reset configuration
robot = Sebastian(center_pos_info, pins_info, reset_config, robot_dims)
error = robot.mu.get_error()
if error:
print(error)
else:
print('Robot initialized')
robot.reset(mode=2)
| [
"[email protected]"
] | |
025216dbecfd085dfb8c137b57bfa829d502fccc | 7fe5fea5abfe7e6a88730ebb5ee8beb4dd3498c2 | /GammaStack/sampletask/urls.py | db67ec686f1f7620941b32197613a9db94d74cf2 | [] | no_license | RITESH-Kapse/GammaStack-Assignment | 5c984db711dc8faae0be217b8efee2e041c5a1c1 | ca647c643900b8030cd6b866e6cf1200ccd21ca1 | refs/heads/master | 2023-05-02T20:51:36.399628 | 2021-06-01T20:49:12 | 2021-06-01T20:49:12 | 372,957,448 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 111 | py | from django.urls import path
from . import views
urlpatterns = [
path('', views.hello, name="hello"),
]
| [
"[email protected]"
] | |
1af4d6fffa03acec2ae8ac89440f41245004a03e | 46cb7d5fc2585bffce006f6d81cef9edbe961e87 | /2020-08-19/treasure_hunting.py | e72d5585373e201c56703f3ef552022178820d49 | [
"MIT"
] | permissive | garciaha/DE_daily_challenges | b17dbe1697552330fe237168855530e247d89d29 | 90805a1d0275647fafecc80ed2cf3548d1b47bff | refs/heads/main | 2023-06-16T23:46:37.576470 | 2021-07-16T00:00:14 | 2021-07-16T00:00:14 | 382,933,335 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,816 | py | """Helping Alex with Treasure
Alex and Cindy, two students who recently spent some time on treasure hunting.
Apart from scrap metal, they found a number of boxes full of old coins. Boxes
are of different value and now are lined up in a row. Cindy proposes an idea to
divide the treasure into two parts. She thinks that a fair way is that she and
Alex take turns, and each of them chooses one box from either left or right side
of the line. Cindy is a very generous person and lets Alex to go first.
Alex wants to check whether this idea is actually good for him. He asks you to
write a program to calculate the total value that he will get compared to how
much Cindy will get if he chooses a box first. You can be sure that they both
are very smart, and always select the next box in such way that it leads to the
best overall individual solution for them. This means they may not always choose
the highest value box of the two currently available in order to ensure they
get a higher value box later.
Notes
The function should return alex_value - cindy_value
"""
def treasure_hunting(boxes):
pass
if __name__ == "__main__":
assert treasure_hunting([7, 2]) == 5
# Alex will choose the 7, and then Cindy gets the 2.
# So the result is 7 - 2 = 5.
assert treasure_hunting([2, 7, 3]) == -2
# It doesn't matter whether Alex chooses the 2 or the 3. Cindy will
# choose the 7 and Alex will get the remaining box. (2+3) - 7 = -2.
assert treasure_hunting([1000, 1000, 1000, 1000, 1000]) == 1000
# Since Alex chooses first, he will get 3 boxes and Cindy will get only 2.
# They all have the same value so (1000+1000+1000) - (1000+1000) = 1000.
assert treasure_hunting(
[823, 912, 345, 100000, 867, 222, 991, 3, 40000]) == -58111
print("All cases passed!")
| [
"[email protected]"
] | |
5e7e06f4d1c2518189980b9f44976d4630237971 | 262e1c150b075ecf25b80301a768e74b8b986940 | /external-scripts/airthings-wave/find_wave.py | 9fc10f35b3222c723d62a5d422e1c44f62a9b05e | [] | no_license | yfaway/openhab-rules | 30bbd2bfe696fd65f2747b8153bc592903de350c | c64c9e109173277b6b4b2473adaac9d2da623cdb | refs/heads/master | 2023-07-29T01:21:29.980812 | 2023-07-15T19:52:36 | 2023-07-15T19:52:36 | 133,594,317 | 10 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,537 | py | # MIT License
#
# Copyright (c) 2018 Airthings AS
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# https://airthings.com
from bluepy.btle import Scanner, DefaultDelegate
import time
import struct
class DecodeErrorException(Exception):
def __init__(self, value):
self.value = value
def __str__(self):
return repr(self.value)
class ScanDelegate(DefaultDelegate):
def __init__(self):
DefaultDelegate.__init__(self)
scanner = Scanner().withDelegate(ScanDelegate())
try:
while 1:
devices = scanner.scan(2.0)
for dev in devices:
ManuData = ""
ManuDataHex = []
for (adtype, desc, value) in dev.getScanData():
serial_no = ""
if (desc == "Manufacturer"):
ManuData = value
if (ManuData == ""):
continue
for i, j in zip (ManuData[::2], ManuData[1::2]):
ManuDataHex.append(int(i+j, 16))
#Start decoding the raw Manufacturer data
if ((ManuDataHex[0] == 0x34) and (ManuDataHex[1] == 0x03)):
serial_no = str(256*256*256*ManuDataHex[5] + 256*256*ManuDataHex[4] + 256*ManuDataHex[3] + ManuDataHex[2])
print "%s (%s), RSSI=%d dB, SN=%s" % (dev.addr, dev.addrType, dev.rssi, serial_no)
else:
continue
except DecodeErrorException:
pass
| [
"[email protected]"
] | |
789a81cae226d145195fe982245809cdba32eee0 | 022b8014f17ae9c46da9de67ad0b7d27d9a242ed | /tools/g15.py | ae407bf77fa1a472696b5977469b06292178b1fd | [] | no_license | endlessnessbruh/tf2-config | b6a62b47b43901f1ceb348b794a4737b79c17d4a | d254248f2607a7331f4cd66408a8122e9ca65fad | refs/heads/master | 2023-03-15T19:26:26.908298 | 2013-12-31T04:00:48 | 2013-12-31T04:00:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,754 | py | #!/usr/bin/env python
from console.console import Console
from mumble.point import Vector3D
from overlay.overlay import OSD
from PyQt4 import QtCore, QtGui
from PyQt4.QtCore import *
from PyQt4.QtGui import *
from PyQt4.QtOpenGL import *
import re
import pprint
debug = False
class GrowingList(list):
def __setitem__(self, index, value):
if index >= len(self):
self.extend([None]*(index + 1 - len(self)))
list.__setitem__(self, index, value)
def unpack(type, val):
if type == "bool":
if val == "true": return True
else: return False
elif type == "short" or type == "integer":
return int(val)
elif type == "float":
return float(val)
elif type == "vector":
return Vector3D(*tuple([float(x) for x in val.split(" ")]))
else:
return str(val)
def parse(line):
obj = re.search(r"(.*) (bool|short|integer|float|vector|string) \((.*)\)", line)
if obj:
var = obj.group(1)
val = unpack(obj.group(2), obj.group(3))
# More pythonic, but m_iPing and m_iHealth are both variables and arrays. Should I parse individual scopes?
#index = var.find("[")
#if index >= 0:
# name = var[:index]
# print(name)
# if not name in vars:
# vars[name] = GrowingList([])
# index = var[index+1:var.find("]")]
# vars[name][int(index)] = val
#else:
vars[var] = val
else: pass
#print("Ignored %s" % line)
class LCD(OSD):
def logic(self):
c.send("g15_dumpplayer")
lines = c.read(1/self.rate_update)
for line in lines:
parse(line)
self.repaint()
def render(self, qp):
if debug:
qp.setPen(QColor.fromRgbF(1, 1, 1, 0.5))
qp.setFont(QFont("monospaced", 8))
initial = [50, 100]
current = [50, 100]
try:
for line in pprint.pformat(vars).split("\n"):
qp.drawText(current[0], current[1], line)
current[1] += 15
if current[1] >= 1000:
current[1] = initial[1]
if current[0] == initial[0]: current[0] += 200
current[0] += 200
except AttributeError: pass
xs = {1:100, 3:400, 2:960}
ys = {1:200, 3:200, 2:200}
for i in range(33):
i = str(i)
try:
connected = vars["m_bConnected["+i+"]"]
if not connected: continue
alive = vars["m_bAlive["+i+"]"]
deaths = vars["m_iDeaths["+i+"]"]
if alive:
health = vars["m_iHealth["+i+"]"]
else:
health = 0
ping = vars["m_iPing["+i+"]"]
score = vars["m_iScore["+i+"]"]
team = vars["m_iTeam["+i+"]"] # 0 = no, 1 = spec, 2/3 = red/blu
name = vars["m_szName["+i+"]"] # Names. '' = none, 'unconnected' = empty slot
string = "%s (%s) %.2f" % (name, health, score / max(deaths, 1))
font = QFont("monospaced", 11)
fm = QFontMetrics(font)
qp.setFont(font)
height = fm.height() + fm.descent()
qp.fillRect(xs[team], ys[team] - fm.height(), fm.width(string), height, QColor.fromRgbF(0,0,0,.5))
qp.drawText(xs[team], ys[team], string)
if health == 0:
qp.setPen(QColor.fromRgbF(1, 0, 0, .75))
elif health <= 150:
qp.setPen(QColor.fromRgbF(1, .5, 0, .75))
else:
qp.setPen(QColor.fromRgbF(1, 1, 1, .75))
qp.drawText(xs[team], ys[team], string)
ys[team] += height
except KeyError: pass
"""
m_Activity Overall activity - idling?
m_Local.m_bDrawViewmodel
m_Local.m_bDucked
m_Local.m_bDucking
m_Local.m_bInDuckJump
m_Local.flDuckJumpTime
m_Local.m_flDucktime
m_Local.m_flFallVelocity
m_Local.m_flJumpTime
m_Shared.m_bFeignDeathReady
m_Shared.m_bJumping
m_Shared.m_bLastDisguisedAsOwnTeam
m_Shared.m_bRageDraining
m_Shared.m_flChargeMeter
m_Shared.m_flCloakMeter
m_Shared.m_flDisguiseCompleteTime
m_Shared.m_flDuckTimer
m_Shared.m_flEnergyDrinkMeter
m_Shared.m_flHypeMeter
m_Shared.m_flInvisChangeCompleteTime
m_Shared.m_flNextRageEarnTime
m_Shared.m_flRageMeter
m_Shared.m_iAirDash
m_Shared.m_nAirDucked
m_Shared.m_nDesiredDisguiseClass
m_Shared.m_nDesiredDisguiseTeam
m_Shared.m_nDisguiseClass
m_Shared.m_nDisguiseTeam
m_bAltFiresUnderwater
m_bBeingRepurposedForTaunt
m_bCritFire
m_bCurrentAttackIsCrit
m_bDisguiseWeapon
m_bFiresUnderwater
m_bFiringWholeClip
m_bInReload
m_bReadyToBackstab
m_bReloadsSingly
m_fFireDuration
m_fOnTarget
m_flChargeBeginTime
m_flChargedDamage
m_flDetonateTime
m_flEffectBarRegenTime
m_iAmmo[0-31] your reserve ammo, 1 2 3
m_iClip1 Current clip
m_iClip2 Current clip (for single clip weapons)
m_iComboCount
m_iDeaths
m_iFOVStart current FOV
m_iHealth my health
m_iPing my ping
m_iPrimaryAmmoCount
m_iPrimaryAmmoType
m_iReloadMode
m_iRoundsWon
m_iScore my score?
m_iSecondaryAmmoCount
m_iSecondaryAmmoType
m_iState
m_iTeamNum
m_iViewModelIndex Current weapon
m_iWorldModelIndex Current weapon
m_nViewModelIndex Old? Current weapon?
pl.deadflag Me dead
"""
if __name__ == "__main__":
c = Console()
vars = {}
g15 = LCD()
g15.rate_update = 1
g15.start()
| [
"[email protected]"
] | |
b20ca43deed3e0f9a76f5b0683505ae22844ec20 | a70d43926850eec6790da93fbe2fffdf5a29965f | /tests/autopilot/uitests/main/test_main.py | ce12ff1163e8542d3b98df1ce0404e5a5d6e31d9 | [] | no_license | zan-xhipe/spaceship | 672098fee505d5802ba3e8d7de9723694fa02d35 | 984d6e197f48d7fa5c46d7ab8fbeffd6c2c0989e | refs/heads/master | 2016-09-06T12:42:59.727384 | 2013-07-25T18:56:36 | 2013-07-25T18:56:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 700 | py | # -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
"""Tests for the Hello World"""
from autopilot.matchers import Eventually
from textwrap import dedent
from testtools.matchers import Is, Not, Equals
from testtools import skip
import os
from uitests import UbuntuTouchAppTestCase
class MainTests(UbuntuTouchAppTestCase):
"""Generic tests for the Hello World"""
test_qml_file = "%s/%s.qml" % (os.path.dirname(os.path.realpath(__file__)),"../../../../spaceship")
def test_0_can_select_mainView(self):
"""Must be able to select the mainview."""
mainView = self.get_mainview()
self.assertThat(mainView.visible,Eventually(Equals(True)))
| [
"[email protected]"
] | |
04ed9edffce7cc3ed47ba731938608c4a4d98992 | e6233098a835fd3041b189888358731203aefd43 | /burstACS.py | ac28eb9097f896415328ca572bc42e4033d4cb8d | [
"Unlicense"
] | permissive | akononovicius/python-stats | 0edf05a4bf5260d9f3a3cbbb6c27d7ba0e6332bc | 4627057719277f75d769aa25c2484ee7241e2568 | refs/heads/main | 2021-11-12T10:47:42.316008 | 2021-10-25T17:41:26 | 2021-10-25T17:41:26 | 144,122,601 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,347 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
##
## Burst statistics analysis as described in [Gontis et al., ACS, 2012].
##
import numpy as np
#
# Prepend and append series with fake data so that first and last bursts
# do not become lost
#
def __PrepSeries(s,thresh,delta=1):
series=s.copy()
if(series[0]<thresh):
series=np.append([thresh-delta,thresh+delta],series)
else:
series=np.append([thresh+delta,thresh-delta],series)
if(series[-1]<thresh):
series=np.append(series,[thresh+delta,thresh-delta])
else:
series=np.append(series,[thresh-delta,thresh+delta])
return series
#
# Various stats extraction functions
#
def __ExtractBurstMax(s,bst,bd,tr):
def _peak(s,fr,n,tr):
return np.max(s[fr:fr+n]-tr)
return np.array([_peak(s,bst[i],bd[i],tr)
for i in range(len(bst))])
def __ExtractBurstSize(s,bst,bd,tr,dt):
def _size(s,fr,n,tr,h):
return np.sum(s[fr:fr+n]-tr)*h
return np.array([_size(s,bst[i],bd[i],tr,dt)
for i in range(len(bst))])
def __ExtractIBurstMin(s,bst,ibd,tr):
def _ipeak(s,to,n,tr):
return np.max(tr-s[to-n:to])
return np.array([_ipeak(s,bst[i],ibd[i],tr)
for i in np.arange(1,len(bst)-1)])
def __ExtractIBurstSize(s,bst,ibd,tr,dt):
def _isize(s,to,n,tr,h):
return np.sum(tr-s[to-n:to])*h
return np.array([_isize(s,bst[i],ibd[i],tr,dt)
for i in np.arange(1,len(bst)-1)])
#
# The main public extraction function
#
def ExtractBurstData(ser,thresh,samplePeriod=1,returnBurst=True,
returnInterBurst=False,extractOther=False,
prepSeries=False):
if((not returnBurst) and (not returnInterBurst)):
raise ValueError("The function will not return anything")
rez=()
series=ser.copy().astype(float)
if(prepSeries):
series=__PrepSeries(series,thresh=thresh,delta=0.1*thresh)
eventTimes=np.where(series>=thresh)[0]
interEventPeriods=np.diff(eventTimes)
iearr=np.where(interEventPeriods>1)[0]
burstStartTimes=eventTimes[iearr[:-1]+1]
del eventTimes
interBurstDuration=None
if(returnInterBurst):
interBurstDuration=interEventPeriods[iearr[:-1]]-1
del interEventPeriods
burstDuration=None
if(returnBurst):
burstDuration=np.diff(iearr)
del iearr
if(returnBurst):
rez=rez+(burstDuration*samplePeriod,)
if(extractOther):
burstMax=__ExtractBurstMax(series,burstStartTimes,burstDuration,
thresh)
burstSize=__ExtractBurstSize(series,burstStartTimes,burstDuration,
thresh,samplePeriod)
rez=rez+(burstMax,)
rez=rez+(burstSize,)
if(returnInterBurst):
rez=rez+(interBurstDuration*samplePeriod,)
if(extractOther):
interBurstMin=__ExtractIBurstMin(series,burstStartTimes,
interBurstDuration,thresh)
interBurstSize=__ExtractIBurstSize(series,burstStartTimes,
interBurstDuration,thresh,
samplePeriod)
rez=rez+(interBurstMin,)
rez=rez+(interBurstSize,)
return rez
| [
"[email protected]"
] | |
6d3f78774df64f55e3ec9f83c05c4d9766829aa0 | 48e124e97cc776feb0ad6d17b9ef1dfa24e2e474 | /sdk/python/pulumi_azure_native/network/v20200401/get_policy.py | 962e6ebed92836613177fffb74f9a4f6d43965c1 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | bpkgoud/pulumi-azure-native | 0817502630062efbc35134410c4a784b61a4736d | a3215fe1b87fba69294f248017b1591767c2b96c | refs/heads/master | 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,728 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetPolicyResult',
'AwaitableGetPolicyResult',
'get_policy',
'get_policy_output',
]
@pulumi.output_type
class GetPolicyResult:
"""
Defines web application firewall policy.
"""
def __init__(__self__, custom_rules=None, etag=None, frontend_endpoint_links=None, id=None, location=None, managed_rules=None, name=None, policy_settings=None, provisioning_state=None, resource_state=None, routing_rule_links=None, tags=None, type=None):
if custom_rules and not isinstance(custom_rules, dict):
raise TypeError("Expected argument 'custom_rules' to be a dict")
pulumi.set(__self__, "custom_rules", custom_rules)
if etag and not isinstance(etag, str):
raise TypeError("Expected argument 'etag' to be a str")
pulumi.set(__self__, "etag", etag)
if frontend_endpoint_links and not isinstance(frontend_endpoint_links, list):
raise TypeError("Expected argument 'frontend_endpoint_links' to be a list")
pulumi.set(__self__, "frontend_endpoint_links", frontend_endpoint_links)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if managed_rules and not isinstance(managed_rules, dict):
raise TypeError("Expected argument 'managed_rules' to be a dict")
pulumi.set(__self__, "managed_rules", managed_rules)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if policy_settings and not isinstance(policy_settings, dict):
raise TypeError("Expected argument 'policy_settings' to be a dict")
pulumi.set(__self__, "policy_settings", policy_settings)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if resource_state and not isinstance(resource_state, str):
raise TypeError("Expected argument 'resource_state' to be a str")
pulumi.set(__self__, "resource_state", resource_state)
if routing_rule_links and not isinstance(routing_rule_links, list):
raise TypeError("Expected argument 'routing_rule_links' to be a list")
pulumi.set(__self__, "routing_rule_links", routing_rule_links)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter(name="customRules")
def custom_rules(self) -> Optional['outputs.CustomRuleListResponse']:
"""
Describes custom rules inside the policy.
"""
return pulumi.get(self, "custom_rules")
@property
@pulumi.getter
def etag(self) -> Optional[str]:
"""
Gets a unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="frontendEndpointLinks")
def frontend_endpoint_links(self) -> Sequence['outputs.FrontendEndpointLinkResponse']:
"""
Describes Frontend Endpoints associated with this Web Application Firewall policy.
"""
return pulumi.get(self, "frontend_endpoint_links")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter(name="managedRules")
def managed_rules(self) -> Optional['outputs.ManagedRuleSetListResponse']:
"""
Describes managed rules inside the policy.
"""
return pulumi.get(self, "managed_rules")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="policySettings")
def policy_settings(self) -> Optional['outputs.FrontDoorPolicySettingsResponse']:
"""
Describes settings for the policy.
"""
return pulumi.get(self, "policy_settings")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
Provisioning state of the policy.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="resourceState")
def resource_state(self) -> str:
return pulumi.get(self, "resource_state")
@property
@pulumi.getter(name="routingRuleLinks")
def routing_rule_links(self) -> Sequence['outputs.RoutingRuleLinkResponse']:
"""
Describes Routing Rules associated with this Web Application Firewall policy.
"""
return pulumi.get(self, "routing_rule_links")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
class AwaitableGetPolicyResult(GetPolicyResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetPolicyResult(
custom_rules=self.custom_rules,
etag=self.etag,
frontend_endpoint_links=self.frontend_endpoint_links,
id=self.id,
location=self.location,
managed_rules=self.managed_rules,
name=self.name,
policy_settings=self.policy_settings,
provisioning_state=self.provisioning_state,
resource_state=self.resource_state,
routing_rule_links=self.routing_rule_links,
tags=self.tags,
type=self.type)
def get_policy(policy_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetPolicyResult:
"""
Defines web application firewall policy.
:param str policy_name: The name of the Web Application Firewall Policy.
:param str resource_group_name: Name of the Resource group within the Azure subscription.
"""
__args__ = dict()
__args__['policyName'] = policy_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:network/v20200401:getPolicy', __args__, opts=opts, typ=GetPolicyResult).value
return AwaitableGetPolicyResult(
custom_rules=__ret__.custom_rules,
etag=__ret__.etag,
frontend_endpoint_links=__ret__.frontend_endpoint_links,
id=__ret__.id,
location=__ret__.location,
managed_rules=__ret__.managed_rules,
name=__ret__.name,
policy_settings=__ret__.policy_settings,
provisioning_state=__ret__.provisioning_state,
resource_state=__ret__.resource_state,
routing_rule_links=__ret__.routing_rule_links,
tags=__ret__.tags,
type=__ret__.type)
@_utilities.lift_output_func(get_policy)
def get_policy_output(policy_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetPolicyResult]:
"""
Defines web application firewall policy.
:param str policy_name: The name of the Web Application Firewall Policy.
:param str resource_group_name: Name of the Resource group within the Azure subscription.
"""
...
| [
"[email protected]"
] | |
e19e952f8114ff1d46b331e100eea2c2f95e901d | 71f2b1a20caa53b1ec735341a54310978b17c886 | /FastSpeech.py | 9c6d2065a82de766385e21add5fffe33aaf7a588 | [] | no_license | meelement/fastspeech-2 | 22bf0d4db2a3334070b2e80808bed112b19db5ed | fc2786d14715f385795b2bb7d44c8707b1c5e0ff | refs/heads/master | 2022-03-04T03:37:26.872253 | 2019-10-29T16:02:36 | 2019-10-29T16:02:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,728 | py | import torch.nn as nn
from transformer.Models import Encoder, Decoder
from transformer.Layers import Linear, PostNet
from Networks import LengthRegulator
import hparams as hp
class FastSpeech(nn.Module):
""" FastSpeech """
def __init__(self):
super(FastSpeech, self).__init__()
self.encoder = Encoder()
self.length_regulator = LengthRegulator()
self.decoder = Decoder()
self.mel_linear = Linear(hp.decoder_output_size, hp.num_mels)
self.postnet = PostNet()
def forward(self, src_seq, src_pos, mel_max_length=None, length_target=None, alpha=1.0):
encoder_output, encoder_mask = self.encoder(src_seq, src_pos)
if self.training:
length_regulator_output, decoder_pos, duration_predictor_output = self.length_regulator(
encoder_output,
encoder_mask,
length_target,
alpha,
mel_max_length)
decoder_output = self.decoder(length_regulator_output, decoder_pos)
mel_output = self.mel_linear(decoder_output)
mel_output_postnet = self.postnet(mel_output) + mel_output
"""Here, we should conduct mel-spectrogram normalization."""
return mel_output, mel_output_postnet, duration_predictor_output
else:
length_regulator_output, decoder_pos = self.length_regulator(
encoder_output, encoder_mask, alpha=alpha)
decoder_output = self.decoder(length_regulator_output, decoder_pos)
mel_output = self.mel_linear(decoder_output)
mel_output_postnet = self.postnet(mel_output) + mel_output
return mel_output, mel_output_postnet
| [
"[email protected]"
] | |
93c18c059b48e6bae10a44e89e491708a20f986f | b96a4062f5ad420dd02efed82b47dd9c249cb46c | /pytorch_lightning/plugins/training_type/parallel.py | f3c825fe9cd7aaf39df26ff0443433ebc4b626d5 | [
"Apache-2.0",
"LicenseRef-scancode-proprietary-license"
] | permissive | borisdayma/pytorch-lightning | ebc210a1e7901b5f87ab67e4886bfe20b478fe33 | 4b7c0fae00084b72dffe37fdd0ea7d2e9b60d103 | refs/heads/master | 2021-11-23T07:34:01.842134 | 2021-02-19T17:00:27 | 2021-02-19T17:00:27 | 238,756,095 | 1 | 1 | Apache-2.0 | 2020-02-06T18:27:51 | 2020-02-06T18:27:50 | null | UTF-8 | Python | false | false | 4,617 | py | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import io
import os
from abc import ABC, abstractmethod
from contextlib import contextmanager
from typing import List, Optional
import torch
from torch.nn.parallel import DistributedDataParallel
from pytorch_lightning.core.lightning import LightningModule
from pytorch_lightning.overrides.base import unwrap_lightning_module
from pytorch_lightning.plugins.environments.cluster_environment import ClusterEnvironment
from pytorch_lightning.plugins.training_type.training_type_plugin import TrainingTypePlugin
from pytorch_lightning.utilities.distributed import all_gather_ddp_if_available, ReduceOp
class ParallelPlugin(TrainingTypePlugin, ABC):
def __init__(
self,
parallel_devices: Optional[List[torch.device]] = None,
cluster_environment: Optional[ClusterEnvironment] = None,
):
super().__init__()
self.parallel_devices = parallel_devices
self.world_size = 1
self.local_rank = 0
self.cluster_environment = cluster_environment
@property
def cluster_local_rank(self):
try:
return self.cluster_environment.local_rank()
except KeyError:
return 0
@property
@abstractmethod
def root_device(self):
raise NotImplementedError
@property
def on_gpu(self):
return self.root_device.type == "cuda" and torch.cuda.is_available()
@property
def lightning_module(self):
return unwrap_lightning_module(self._model)
@abstractmethod
def setup(self, model):
raise NotImplementedError
def connect(self, model, *args, **kwargs):
self.setup(model)
return self.model
@property
def is_global_zero(self) -> bool:
return self.global_rank == 0
@property
def distributed_sampler_kwargs(self):
distributed_sampler_kwargs = dict(num_replicas=len(self.parallel_devices), rank=self.global_rank)
return distributed_sampler_kwargs
def reduce_early_stopping_decision(self, should_stop: bool) -> bool:
should_stop = torch.tensor(int(should_stop), device=self.lightning_module.device)
should_stop = self.reduce(should_stop, reduce_op=ReduceOp.SUM)
should_stop = bool(should_stop == self.world_size)
return should_stop
@property
def torch_distributed_backend(self):
torch_backend = os.getenv("PL_TORCH_DISTRIBUTED_BACKEND")
if torch_backend is None:
torch_backend = "nccl" if self.on_gpu else "gloo"
return torch_backend
@staticmethod
def configure_sync_batchnorm(model: LightningModule) -> LightningModule:
"""
Add global batchnorm for a model spread across multiple GPUs and nodes.
Override to synchronize batchnorm between specific process groups instead
of the whole world or use a different sync_bn like `apex`'s version.
Args:
model: pointer to current :class:`LightningModule`.
Return:
LightningModule with batchnorm layers synchronized between process groups
"""
model = torch.nn.SyncBatchNorm.convert_sync_batchnorm(model)
return model
@contextmanager
def block_backward_sync(self):
"""
Blocks ddp sync gradients behaviour on backwards pass.
This is useful for skipping sync when accumulating gradients, reducing communication overhead
Returns: context manager with sync behaviour off
"""
if isinstance(self.model, DistributedDataParallel):
with self.model.no_sync():
yield None
else:
yield None
def broadcast(self, obj: object, src: int) -> object:
buffer = io.BytesIO()
torch.save(obj, buffer)
data = bytearray(buffer.getbuffer())
data_tensor = torch.tensor(data).to(self.root_device, dtype=torch.float)
data = all_gather_ddp_if_available(data_tensor)
buffer = io.BytesIO(data.cpu().byte().numpy())
obj = torch.load(buffer)
return obj
| [
"[email protected]"
] | |
0e253c2a0f309e9ea44cfbc0c86039ca87bd630e | dec1f4c491c7f325d7e58afde08cef8a47f33261 | /security_check/U_57.py | 5c61b34a70db3cc917c85741c906ade5cea6a2e9 | [] | no_license | wespito/BoB-Linux | adc8767789911fc70eaacde0b8cd244f57697a65 | e6a8ec81330de57dc1d5f7b09bd8681cb248a5af | refs/heads/main | 2023-07-08T05:11:56.493580 | 2021-08-15T09:29:31 | 2021-08-15T09:29:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,826 | py | # [U-57] UMASK 설정 값 점검
# 2020/11/27 : 노무승
# 1. 매뉴얼에 나와 있는 UMASK 설정 파일 리스트에
# 추가로 조사해 설정 파일 여러 개 추가함.
# 2. 기본적인 UMASK 설정 값은 /etc/login.defs에 저장 됨.
# 3. /etc/pam.d/common-session, common-session-noninteractive 파일 내부에 설정을 추가해 시스템 전체 UMASK를 지정 가능함.
# 4. /etc/bashrc, /etc/bash.bashrc에 설정을 추가해 UMASK를 지정하는 것도 가능함.
import getpass
import os.path
C_END = "\033[0m"
C_RED = "\033[31m"
C_GREEN = "\033[32m"
C_YELLOW = "\033[33m"
C_NUM = "U-57"
def U57() :
f_output = ""
f_output = f_output + "[" + C_NUM + "] UMASK 설정 값 점검" + "\n"
flag = False
list = "/etc/profile /etc/default /etc/bashrc /etc/bash.bashrc /etc/login.defs /etc/pam.d/common-session /etc/pam.d/common-session-noninteractive .cshrc .kshrc .bashrc .login .profile"
list = list.split()
path = "/home/" + getpass.getuser() + "/"
for i in list:
if (i.find("/etc/") == -1) :
i = path + i
if (os.path.isfile(i)):
handle = open(i, mode="r", encoding="utf-8")
temp = handle.readline()
while temp:
if (temp[0] != "#"):
temp = temp.upper()
if (temp.find("UMASK") != -1):
temp = temp.replace(" ", "")
temp = temp.replace("\t", "")
temp = temp.replace("\n", "")
temp = temp.replace("=", "")
temp = temp.replace(":", "")
temp = temp[temp.find("UMASK")+5:len(temp)]
if (temp.isdecimal()):
if (int(temp) < 22) :
f_output = f_output + C_YELLOW + "\t[경고] " + i + " : 설정된 UMASK 값이 22 미만 입니다.\n" + C_END
f_output = f_output + C_YELLOW + "\t\t(설정된 UMASK 값 : " + str(int(temp)) + ")\n" + C_END
flag = True
temp = handle.readline()
handle.close()
if (flag) :
f_output = f_output + C_RED + "\t[검사 결과] 보안 조치가 필요합니다.\n" + C_END
f_output = f_output + "\n"
f_output = f_output + "[" + C_NUM + "] 조치 방법\n"
f_output = f_output + "\t텍스트 편집기로 해당 파일을 열어 UMASK 값을 22로 설정해주세요.\n"
else :
f_output = f_output + C_GREEN + "\t[검사 결과] 안전합니다.\n" + C_END
f_output = f_output + "\n"
print(f_output,end='')
handle = open("./" + C_NUM + ".txt", mode='w', encoding='utf-8')
handle.write(f_output)
handle.close()
U57()
| [
"[email protected]"
] | |
e742888cc0187e291a1a46392a8299f283286861 | 8e2dd8979702ea7b95236385fe1b1e7715c3af56 | /handlers/__init__.py | 8ca692956d4027e89b1e8ca54a6f9ec9cbf61139 | [
"WTFPL"
] | permissive | Watch-Dogs-HIT/Watch_Dogs-Server | 061c8724e4ddc80bfdaa676fdfe412fddce86cab | 3dfcb412197c56d6fcbf1a7e12f4fcaf16ca1eae | refs/heads/master | 2022-01-13T00:49:20.024591 | 2019-06-22T01:57:27 | 2019-06-22T01:57:27 | 172,664,891 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,483 | py | #!/usr/bin/env python
# encoding:utf-8
"""
Watch_Dogs
base handler
"""
import os
import json
import traceback
import tornado.web
from tornado import gen
from conf.setting import Setting
setting = Setting()
def byteify(input_unicode_dict, encoding='utf-8'):
"""
将unicode字典转为str字典
reference : https://www.jianshu.com/p/90ecc5987a18
"""
if isinstance(input_unicode_dict, dict):
return {byteify(key): byteify(value) for key, value in input_unicode_dict.iteritems()}
elif isinstance(input_unicode_dict, list):
return [byteify(element) for element in input_unicode_dict]
elif isinstance(input_unicode_dict, unicode):
return input_unicode_dict.encode(encoding)
else:
return input_unicode_dict
class BaseHandler(tornado.web.RequestHandler):
""""""
def set_default_headers(self):
self.set_header("Access-Control-Allow-Origin", "*")
self.set_header("Access-Control-Allow-Headers", "x-requested-with")
self.set_header("Access-Control-Allow-Methods", "POST, GET, OPTIONS")
self.set_header("Access-Control-Allow-Credentials", True)
def post(self):
pass
def get(self):
pass
@property
def db(self):
"""异步数据库操作对象"""
return self.application.db
@property
def log(self):
"""日志对象"""
return self.application.log
@property
def data(self):
"""业务逻辑,数据处理"""
return self.application.data
@property
def setting(self):
"""静态设置"""
return self.application.setting
@property
def remote_api(self):
"""远程API"""
return self.application.remote_api
@property
def uid(self):
return self.get_cookie('uid')
@property
def user_status(self):
return self.get_cookie('user_status') if self.get_cookie('user_status') else "-1"
def get_current_user(self):
return self.get_cookie("user")
def get_request_json(self):
"""解析json"""
if "Content-Type" in self.request.headers and "application/json" in self.request.headers["Content-Type"]:
return byteify(json.loads(self.request.body)) # return str dict
return {"error": "no json found"}
@gen.coroutine
def update_cookie(self):
"""更新cookie值"""
user_status = yield self.data.update_cookie(self.uid)
self.set_cookie("user_status", str(user_status))
def write_error(self, status_code, **kwargs):
"""500"""
error_message = ["Oops! Something wrong,"]
if self.settings.get("serve_traceback") and "exc_info" in kwargs:
error_message = traceback.format_exception(*kwargs["exc_info"])
return self.render(
'error.html',
http_code=500,
error_message=error_message
)
class TestHandler(BaseHandler):
"""/"""
def get(self):
return self.render("test.html", date=self.setting.get_local_time(),
author="h-j-13",
repo_link="https://github.com/Watch-Dogs-HIT/Watch_Dogs-Server")
class NotFoundHandler(BaseHandler):
"""404"""
def get(self):
return self.render("404.html", status_code=404)
class ClientDownloadHandler(tornado.web.RequestHandler):
"""/client"""
def get(self):
"""远程客户端下载"""
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Disposition', 'attachment; filename={fn}'.format(fn=Setting.CLIENT_FILE_TAR))
with open(os.path.join(setting.CONF_PATH, Setting.CLIENT_FILE_TAR), 'rb') as c:
while True:
data = c.read(4096)
if not data:
break
self.write(data)
self.finish()
class ClientScriptDownloadHandler(tornado.web.RequestHandler):
"""/client/script"""
def get(self):
"""远程客户端安装脚本下载"""
self.set_header('Content-Type', 'application/octet-stream')
self.set_header('Content-Disposition', 'attachment; filename={fn}'.format(fn=Setting.CLIENT_SCRIPT))
with open(os.path.join(setting.CONF_PATH, Setting.CLIENT_SCRIPT), 'rb') as c:
while True:
data = c.read(4096)
if not data:
break
self.write(data)
self.finish()
| [
"[email protected]"
] | |
c855770ff311e1c544bf7b358a41914de8212a82 | c24b9c8dfd47527db9f6cd5158a1a0562e03f46c | /GalDynPsr/galpyMWBHpl.py | 76608c27f5d2787f8ae752a15e7ed521b18d40b1 | [
"BSD-3-Clause"
] | permissive | pathakdhruv/GalDynPsr | a165c082e0cb97ebfdb359b88ec73e749b7264fb | ed9b582454c3a09a1351cdd2cd529a3a50068e1d | refs/heads/master | 2021-04-28T14:29:56.029738 | 2018-10-14T09:55:29 | 2018-10-14T09:55:29 | 152,193,421 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,798 | py | import math
from galpy.potential import MWPotential2014
from galpy.potential import PowerSphericalPotentialwCutoff
from galpy.potential import MiyamotoNagaiPotential
from galpy.potential import NFWPotential
from galpy.util import bovy_conversion
from astropy import units
from galpy.potential import KeplerPotential
from galpy.potential import vcirc
from GalDynPsr import read_parameters as par
global MWBH
def VpratioMWBH(Rpkpc):
#MWPotential2014.append(KeplerPotential(amp=4*10**6./bovy_conversion.mass_in_msol(par.Vs,par.Rskpc)))
#a = vcirc(MWPotential2014,Rpkpc/par.Rskpc)
'''
bp= PowerSphericalPotentialwCutoff(alpha=1.8,rc=1.9/8.,normalize=0.05)
mp= MiyamotoNagaiPotential(a=3./8.,b=0.28/8.,normalize=.6)
np= NFWPotential(a=16./8.,normalize=.35)
kp = KeplerPotential(amp=4*10**6./bovy_conversion.mass_in_msol(par.Vs,par.Rskpc))
MWBH = [bp,mp,np,kp]
a = vcirc(MWBH,Rpkpc/par.Rskpc)
'''
MWPotential2014wBH= [MWPotential2014,KeplerPotential(amp=4*10**6./bovy_conversion.mass_in_msol(par.Vs,par.Rskpc))]
a = vcirc(MWPotential2014wBH,Rpkpc/par.Rskpc)
return a;
def MWBHpl(ldeg, sigl, bdeg, sigb, dkpc, sigd):
b = bdeg*par.degtorad
l = ldeg*par.degtorad
c = par.c
Rskpc = par.Rskpc
kpctom = par.kpctom
Vs = par.Vs
Rpkpc = par.Rpkpc(ldeg, sigl, bdeg, sigb, dkpc, sigd)
Vprat = VpratioMWBH(Rpkpc)
Vp = Vprat*Vs
zkpc = dkpc*math.sin(b)
Vsms = 1000.0*Vs #m/s
Rs = Rskpc*kpctom
be = (dkpc/Rskpc)*math.cos(b) - math.cos(l)
t0 = math.sin(l)*math.sin(l) + be*be
t2 = (-1.0)*(math.cos(l) + Vprat*Vprat*(be/t0)) #dimensionless
t3 = (Vsms*Vsms)/(Rs) #in SI
adr = t2*t3*math.cos(b) #m sec^-2 (divide by c to get in s^-1)
Excpl = adr/c #sec^-1
return Excpl;
| [
"[email protected]"
] | |
0edcaf04d837804f4cce7e34b0e01714f1c7e38a | 2a9922b22a075ff0a373c6db8d269b625d672829 | /fashionist/constants.py | 5d6c17d146d212721dc187c4b21a7ac0f4caee7f | [] | no_license | WhhhzFdUNpun/distributed_fashion_mnist | e7328546e342629620defcad13c7e32cc84f3fa1 | 85f25d8fef81e1f0d02e2af2fd6a7a1ff76ba019 | refs/heads/main | 2023-09-01T08:31:27.764049 | 2021-10-21T16:05:43 | 2021-10-21T16:05:43 | 419,788,839 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | from pathlib import Path
PROJECT_DIR = Path(__file__).parents[1]
STORAGE_DIR = PROJECT_DIR / 'storage'
| [
"[email protected]"
] | |
254012a45627f62cf14136792d64f56fd48523d5 | c53d57b6ea5b117d8fa260a29942cf9422a46063 | /examples/DeepWisdom/speech_autodl_config.py | d833f42a3efde61c0177599bfe9c6640cdc4f2e3 | [
"Apache-2.0"
] | permissive | zichuan-scott-xu/automl-workflow | c52ee6c7c01347274a6b1d6801a7889daa35a40d | d108e55da943775953b9f1801311a86ac07e58a0 | refs/heads/main | 2023-04-01T16:20:10.885273 | 2021-04-14T05:44:48 | 2021-04-14T05:44:48 | 327,514,705 | 0 | 0 | Apache-2.0 | 2021-01-07T05:41:38 | 2021-01-07T05:41:38 | null | UTF-8 | Python | false | false | 3,487 | py | import os
import json
from collections import namedtuple
import copy
# Config for Covertor
IF_RESET_TFGRAPH_SESS_RUN = False
TF_DATASET_TO_NUMPY_MODE = "graph" # eager/graph
# os.environ["CUDA_VISIBLE_DEVICES"] = "1"
# 全局配置数据
autodl_global_config = {
"meta_solution": {
"cv_solution": "DeepWisdom",
# "cv_solution": "kakaobrain",
# "nlp_solution": "DeepBlueAI",
"nlp_solution": "upwind_flys",
"speech_solution": "PASA_NJU",
# "speech_solution": "rank_2_fuzhi",
},
"data_space": {
"domain_dataset": {
"text": {"if_sample": True, "sample_ratio": 0.5},
"speech": {"if_sample": True, "sample_ratio": 0.5},
}
},
"speech_global_conf": None,
}
speech_global_conf_data = {
"data_space": {
"tf_dataset": {
"if_shuffle": False,
"shuffle_ratio": 0.5,
"if_sample": True,
# "sample_ratio": 0.2,
# "sample_ratio": [0.1, 0.2, 0.3, 0.2, 0.2, 0.2],
"sample_ratio": [0.1, 0.2, 0.4, 0.1, 0.2, 0.2],
}
},
"model_space": {
"model_loop_planniing": {
"lightwei_train_end_loop": 3,
"midwei_train_start_loop": 3,
"midwei_predict_block_loop": 11,
}
},
}
autodl_global_config["speech_global_conf"] = speech_global_conf_data
# Config for Covertor
IF_RESET_TFGRAPH_SESS_RUN = False
TF_DATASET_TO_NUMPY_MODE = "graph" # eager/graph
# Config for Solution
VIDEO_SOLUTION_FLAG = "2d" # 2d/3d
class MetaSoluConf(object):
def __init__(self):
self.cv_solution = None
self.nlp_solution = None
self.speech_solution = None
class DsDomainDatasetConf(object):
def __init__(self):
self.if_sample = None
self.sample_ratio = None
class DsDomainDatasetSets(object):
def __init__(self):
self.text = DsDomainDatasetConf()
self.speech = DsDomainDatasetConf()
class DsConf(object):
def __init__(self):
self.domain_dataset = DsDomainDatasetSets()
class AutoDlConf(object):
def __init__(self):
self.meta_solution = MetaSoluConf()
self.data_space = DsConf()
class ConfigParserA(object):
def _json_object_hook(self, d):
return namedtuple("X", d.keys())(*d.values())
def json2obj(self, data):
return json.loads(data, object_hook=self._json_object_hook)
def from_type_autodlconf(self, conf_data) -> AutoDlConf:
# obj: typeclass = copy.deepcopy(self.json2obj(json.dumps(conf_data)))
return copy.deepcopy(self.json2obj(json.dumps(conf_data)))
autodl_g_conf_repr = json.dumps(autodl_global_config, indent=4)
config_parser_a = ConfigParserA()
AUTODL_G_CONF = config_parser_a.from_type_autodlconf(autodl_global_config)
META_SOLUS = AUTODL_G_CONF.meta_solution
DM_DS_PARAS = AUTODL_G_CONF.data_space.domain_dataset
speech_global_conf = AUTODL_G_CONF.speech_global_conf
speech_ds_tds_conf = speech_global_conf.data_space.tf_dataset
speech_ms_conf = speech_global_conf.model_space
speech_ms_mlp_conf = speech_ms_conf.model_loop_planniing
def main():
config_parser_a = ConfigParserA()
autodl_g_conf = config_parser_a.from_type_autodlconf(autodl_global_config)
print(autodl_g_conf.meta_solution.speech_solution)
print(autodl_g_conf.data_space.domain_dataset.text.if_sample)
print(autodl_g_conf.data_space.domain_dataset.speech.sample_ratio)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
ed1765e3a5caedf0c2941f564b78d88cedcb7aaa | 82e46ddaeca0a9147f0da00f3edc7c25e2b52596 | /project08_django_mineralcatalog2/minerals/tests/tests_models.py | 51d878ee633b631e181419808af303c94ced4557 | [] | no_license | sabinem/treehouse-python-techdegree | 8f0fd57681fa0cc620e4c3bfa7553a6647bbef06 | 8bfbba09132b405f7c68cbfd9a0e7596223c3a53 | refs/heads/master | 2021-09-15T05:21:25.631722 | 2018-03-07T08:20:11 | 2018-03-07T08:20:11 | 121,496,395 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 11,679 | py | """
Tests for the minerals app's models
---------------------------------------------------------
- The Database is filled with data by the datamigrations,
Therefore testdata is already available and must not be
created.
"""
import os
import re
from django.test import TestCase
from django.conf import settings
from django.utils.text import slugify
from ..models import Mineral
from minerals.views import SearchParams
class MineralModelTests(TestCase):
"""Tests the Model Mineral"""
def setUp(self):
"""a mineral is set up"""
self.mineral = Mineral.minerals.first()
def test_attributes_weigthed(self):
"""returns attributes in order of
how often they occur"""
fields = Mineral.attributes_weighted()
self.assertListEqual(fields[:-2], [
'group',
'formula',
'category',
'strunz_classification',
'crystal_system',
'mohs_scale_hardness',
'luster',
'color',
'specific_gravity',
'cleavage',
'diaphaneity',
'crystal_habit',
'streak',
'optical_properties',
'refractive_index', ])
self.assertSetEqual(set(fields[-2:]), {
'unit_cell',
'crystal_symmetry',
})
def test_image_path(self):
"""the minerals image path is returned"""
imgfile = \
os.path.join(settings.MINERALS_STATIC_DIR,
self.mineral.image_path)
self.assertTrue(
os.path.isfile(imgfile)
)
def test___str__(self):
"""the mineral is represented by its name"""
self.assertEqual(
str(self.mineral),
self.mineral.name)
def test_get_gravity_bounds(self):
"""
extracts specific gravity bounds correctly from the input data
for specific gravity
"""
self.assertEqual(
Mineral.get_gravity_bounds('5.8–6.2 (meas.); 6.37 (calc.)'),
(5.8, 6.37)
)
self.assertEqual(
Mineral.get_gravity_bounds('1 - 2.6'),
(1.0, 2.6)
)
self.assertEqual(
Mineral.get_gravity_bounds('1.993'),
(1.993, 1.993)
)
self.assertEqual(
Mineral.get_gravity_bounds(
'3.564 (Fo100); 3.691 (Fo90); 4.845 (Fa100)'),
(3.564, 4.845)
)
self.assertEqual(
Mineral.get_gravity_bounds(
'7000352000000000000♠3.52±0.01',),
(3.52, 3.52)
)
self.assertEqual(
Mineral.get_gravity_bounds(
'Whiteite-(CaFeMg) 2.58Whiteite-(MnFeMg)2.67Whiteite-(CaMnMg)2.63'),
(2.58, 2.67)
)
self.assertEqual(
Mineral.get_gravity_bounds('3'),
(3, 3)
)
self.assertEqual(
Mineral.get_gravity_bounds('3.41\xa0g/cm3'),
(3.41, 3.41)
)
def test_get_gravity_bounds_for_blank_input(self):
"""
sets the bound to None, if the mineral
has no specific gravity attribute
"""
self.assertTupleEqual(
Mineral.get_gravity_bounds(''), (None, None)
)
def test_group_slug(self):
"""for all groups a slug is derived from the group
it should be possible to recover the group from that slug"""
groups = Mineral.minerals.order_by('group')\
.values_list('group', flat=True).distinct()
for group in groups:
slug = Mineral.get_group_slug(group)
group_from_slug = Mineral.get_group_from_slug(slug)
self.assertEquals(
group, group_from_slug
)
def test_get_search_letter_letter(self):
"""a normal search letter is returned as is"""
self.assertEqual(
Mineral.get_search_letter("c"), "c"
)
def test_get_search_letter_default(self):
"""when no search letter is given 'a' is returned"""
self.assertEqual(
Mineral.get_search_letter(), settings.MINERALS_DEFAULT_LIST_LETTER
)
class MineralManagerTests(TestCase):
"""tests the minerals querysets"""
def setUp(self):
"""one minerals and all minerals are set up"""
self.all_minerals = Mineral.minerals.all()
self.one_mineral = Mineral.minerals.first()
def test_get_minerals_by_group(self):
"""the minerals for a group are determined correctly"""
group = self.one_mineral.group
test_qs = Mineral.minerals.get_minerals_by_group(group)
self.assertListEqual(
list(test_qs),
[m for m in self.all_minerals if m.group == group]
)
def test_get_minerals_for_letter(self):
"""the minerals for a letter are determined correctly"""
letter = "b"
test_qs = Mineral.minerals.get_minerals_for_letter(letter)
self.assertListEqual(
list(test_qs),
[m for m in self.all_minerals if slugify(m.name[0]) == letter]
)
def test_get_mineral_from_slug_exists(self):
"""the mineral can be derived from its slug"""
mineral = self.one_mineral
test_mineral_get = \
Mineral.minerals.get_mineral_from_slug(mineral.mineral_slug)
self.assertEqual(
mineral, test_mineral_get
)
def test_filter_minerals_by_id_list(self):
"""minerals can be found form an id_list"""
id_list = [m.id for m in self.all_minerals[0:10]]
test_qs = Mineral.minerals.filter_minerals_by_id_list(id_list)
self.assertSetEqual(
set([m.id for m in test_qs]), set(id_list)
)
def test_filter_minerals_by_chem_element_one_letter(self):
"""minerals for a one letter chemical
element are determined correctly"""
test_qs = Mineral.minerals.filter_minerals_by_chem_element("F")
self.assertSetEqual(
set([(m.name, m.formula)
for m in self.all_minerals
if re.search(r'F[^a-z]', m.formula)]),
set([(m.name, m.formula) for m in test_qs])
)
def test_filter_minerals_by_chem_element_two_letter(self):
"""minerals for a two letter
chemical element are determined correctly"""
test_qs = Mineral.minerals.filter_minerals_by_chem_element("Fe")
self.assertSetEqual(
set([(m.name, m.formula)
for m in self.all_minerals
if re.search(r'Fe[^a-z]', m.formula)]),
set([(m.name, m.formula) for m in test_qs])
)
def test_get_random_mineral(self):
"""a random mineral is returned"""
test_mineral_get = Mineral.minerals.get_random_mineral()
self.assertIn(test_mineral_get.id,
[m.id for m in self.all_minerals])
def test_get_ordered_groups(self):
"""the groups are returned with 'Other'
at the last position"""
test_list = Mineral.minerals.get_ordered_groups()
groups = {m.group for m in self.all_minerals}
self.assertEqual(len(test_list), len(groups))
self.assertEqual(test_list[-1], "Other")
def test_filter_minerals_by_searchterm(self):
"""the fulltext search in the mineral attributes returns
a record correctly, if the searchterm appears in one of its
fields"""
mineral = self.one_mineral
for field in mineral._meta.fields:
if hasattr(mineral, field.name):
if field.name not in ['id', 'image_filename']:
term = str(getattr(mineral, field.name))[3:12]
test_qs = \
Mineral.minerals.filter_minerals_by_searchterm(term)
self.assertIn(
mineral.id,
[m.id for m in test_qs]
)
def test_filter_minerals_by_specific_gravity_exact(self):
"""the mineral is found if search for its excact specific
gravity range"""
mineral = \
Mineral.minerals.exclude(specific_gravity="").first()
gravity_bounds = \
Mineral.get_gravity_bounds(mineral.specific_gravity)
test_qs = \
Mineral.minerals.filter_minerals_by_specific_gravity(
gravity_bounds)
self.assertIn(
mineral.id,
[m.id for m in test_qs]
)
def test_filter_minerals_by_specific_gravity_example(self):
"""minerals are determined correctly
for a specific gravity range"""
gravity_bounds = (6, 8)
test_qs = Mineral.minerals.filter_minerals_by_specific_gravity(
gravity_bounds)
expected_mineral_ids = \
[m.id for m in Mineral.minerals.exclude(specific_gravity="")
if (float(Mineral.get_gravity_bounds(
m.specific_gravity)[0]) <= 8 and
float(Mineral.get_gravity_bounds(
m.specific_gravity)[1]) >= 6)]
self.assertEqual(
set(expected_mineral_ids),
{m.id for m in test_qs}
)
def test_get_minerals_from_search_params_term(self):
"""minerals are determined correctly from the searchparameters
in case of just a search term"""
search_params = SearchParams("Ab", None, None)
qs_test = Mineral.minerals.get_minerals_from_search_params(
search_params
)
expected_qs = Mineral.minerals\
.filter_minerals_by_searchterm(search_params.searchterm)
self.assertSetEqual(
{m.id for m in qs_test},
{m.id for m in expected_qs}
)
def test_get_minerals_from_search_params_chem_element(self):
"""minerals are determined correctly from the searchparameters
in case of just a chemical element"""
search_params = SearchParams(None, "Na", None)
qs_test = Mineral.minerals.get_minerals_from_search_params(
search_params
)
expected_qs = Mineral.minerals\
.filter_minerals_by_chem_element("Na")
self.assertSetEqual(
{m.id for m in qs_test},
{m.id for m in expected_qs}
)
def test_get_minerals_from_search_params_gravity_bound(self):
"""minerals are determined correctly from the searchparameters
in case of just a specific gravity bound"""
search_params = SearchParams(None, None, (7, 9))
qs_test = Mineral.minerals.get_minerals_from_search_params(
search_params
)
expected_qs = Mineral.minerals\
.filter_minerals_by_specific_gravity((7, 9))
self.assertSetEqual(
{m.id for m in qs_test},
{m.id for m in expected_qs}
)
def test_get_minerals_from_search_params_combination(self):
"""minerals are determined correctly from the searchparameters
in case of a combined search"""
search_params = SearchParams("Ab", "Fe", (2, 7))
qs_test = Mineral.minerals.get_minerals_from_search_params(
search_params
)
expected_qs1 = Mineral.minerals\
.filter_minerals_by_searchterm("Ab")
expected_qs2 = Mineral.minerals\
.filter_minerals_by_specific_gravity((2, 9))
expected_qs3 = Mineral.minerals\
.filter_minerals_by_chem_element("Fe")
self.assertSetEqual(
{m.id for m in qs_test},
{m.id for m in expected_qs1
if (m in expected_qs2 and m in expected_qs3)}
)
| [
"[email protected]"
] | |
c04ca627ca582efb45ec32439fa345f6c40a0feb | 27e0651ada3d891fe88467e2158daab11b58947d | /goodtables/processors/schema.py | 1944a5a8d645cc3a8ae503e8884b8ab9615f6d90 | [
"MIT"
] | permissive | mychapati/goodtables | f94a017b4b9c476621b71839e20a22b05cce7314 | 3c81af866951ca340ca441e66f25a3597d15c8ee | refs/heads/master | 2021-01-17T06:27:06.215070 | 2015-06-07T13:44:11 | 2015-06-07T13:44:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,893 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import jtskit
from . import base
RESULTS = {
'schema_001': {
'id': 'schema_001',
'name': 'Incorrect Headers',
'msg': ('There is a mismatch between the headers according to the '
'schema, and those found in the data. The schema says the '
'headers should be: {0}.'),
'help': '',
'help_edit': ''
},
'schema_002': {
'id': 'schema_002',
'name': 'Incorrect Dimensions',
'msg': 'The row dimensions do not match the header dimensions.',
'help': '',
'help_edit': ''
},
'schema_003': {
'id': 'schema_003',
'name': 'Incorrect Type',
'msg': 'The value is not a valid {0}.',
'help': '',
'help_edit': ''
},
'schema_004': {
'id': 'schema_004',
'name': 'Required Field',
'msg': 'Column {0} is a required field, but no value can be found in row {1}.',
'help': '',
'help_edit': ''
},
'schema_005': {
'id': 'schema_005',
'name': 'Non-Required Field (Empty/Null)',
'msg': 'Column {0} is a non-required field, and has a null value in row {1}.',
'help': '',
'help_edit': ''
},
'schema_006': {
'id': 'schema_006',
'name': 'Unique Field',
'msg': 'Column {0} is a unique field, yet the value {1} already exists.',
'help': '',
'help_edit': ''
}
}
class SchemaProcessor(base.Processor):
"""Process data against a JSON Table Schema."""
name = 'schema'
RESULT_TYPES = RESULTS
def __init__(self, fail_fast=False, report_limit=1000,
row_limit=30000, schema=None, ignore_field_order=True,
report_stream=None, report=None,
result_level='error', infer_schema=False,
case_insensitive_headers=False, **kwargs):
super(SchemaProcessor, self).__init__(
fail_fast=fail_fast, report_limit=report_limit,
row_limit=row_limit, report_stream=report_stream, report=report,
result_level=result_level)
self.infer_schema = infer_schema
self.case_insensitive_headers = case_insensitive_headers
self.ignore_field_order = ignore_field_order
if not schema:
self.schema = None
else:
self.schema = self.schema_model(schema)
self._uniques = {}
def schema_model(self, schema):
try:
model = jtskit.models.SchemaModel(schema, self.case_insensitive_headers)
except (jtskit.exceptions.InvalidJSONError, jtskit.exceptions.InvalidSchemaError) as e:
raise e
return model
def pre_run(self, data_table):
if (self.schema is None) and self.infer_schema:
sample_values = data_table.get_sample(300)
self.schema = self.schema_model(jtskit.infer(data_table.headers, sample_values))
return True, data_table
def run_header(self, headers, header_index=0):
valid = True
if self.case_insensitive_headers:
headers = [name.lower() for name in headers]
if self.schema:
if self.ignore_field_order:
if not (set(headers).issuperset(set(self.schema.required_headers))):
valid = False
_type = RESULTS['schema_001']
entry = self.make_entry(
self.name,
self.RESULT_CATEGORY_HEADER,
self.RESULT_LEVEL_ERROR,
_type['msg'].format(', '.join(self.schema.headers)),
_type['id'],
_type['name'],
headers,
header_index,
self.RESULT_HEADER_ROW_NAME
)
self.report.write(entry)
if self.fail_fast:
return valid, headers
else:
header_length = len(headers)
if not (headers == self.schema.required_headers[:header_length]):
valid = False
_type = RESULTS['schema_001']
entry = self.make_entry(
self.name,
self.RESULT_CATEGORY_HEADER,
self.RESULT_LEVEL_ERROR,
_type['msg'].format(headers, self.schema.headers),
_type['id'],
_type['name'],
headers,
header_index,
self.RESULT_HEADER_ROW_NAME,
)
self.report.write(entry)
if self.fail_fast:
return valid, headers
return valid, headers
def run_row(self, headers, index, row):
valid = True
row_name = self.get_row_id(headers, row)
if self.schema:
if not (len(headers) == len(row)):
valid = False
_type = RESULTS['schema_002']
entry = self.make_entry(
self.name,
self.RESULT_CATEGORY_ROW,
self.RESULT_LEVEL_ERROR,
_type['msg'],
_type['id'],
_type['name'],
row,
index,
row_name,
)
self.report.write(entry)
if self.fail_fast:
return valid, headers, index, row
else:
for column_name, column_value in zip(headers, row):
# handle case where column_name not even in schema
if not self.schema.has_field(column_name):
pass
# we know the field is in the schema
else:
# check type and format
if self.schema.cast(column_name, column_value) is False:
valid = False
_type = RESULTS['schema_003']
entry = self.make_entry(
self.name,
self.RESULT_CATEGORY_ROW,
self.RESULT_LEVEL_ERROR,
_type['msg'].format(self.schema.get_type(column_name).name.title()),
_type['id'],
_type['name'],
row,
index,
row_name,
headers.index(column_name),
column_name
)
self.report.write(entry)
if self.fail_fast:
return valid, headers, index, row
# CONSTRAINTS
constraints = self.schema.get_constraints(column_name)
if constraints['required'] is True and \
(column_value in self.schema.NULL_VALUES):
valid = False
_type = RESULTS['schema_004']
entry = self.make_entry(
self.name,
self.RESULT_CATEGORY_ROW,
self.RESULT_LEVEL_ERROR,
_type['msg'].format(column_name, index),
_type['id'],
_type['name'],
row,
index,
row_name,
headers.index(column_name),
column_name
)
self.report.write(entry)
if self.fail_fast:
return valid, headers, index, row
if constraints['required'] is False and \
(column_value in self.schema.NULL_VALUES) and \
self.result_level == self.RESULT_LEVEL_INFO:
# add info result
_type = RESULTS['schema_005']
entry = self.make_entry(
self.name,
self.RESULT_CATEGORY_ROW,
self.RESULT_LEVEL_INFO,
_type['msg'].format(column_name, index),
_type['id'],
_type['name'],
row,
index,
row_name,
headers.index(column_name),
column_name
)
self.report.write(entry)
if self.fail_fast:
return valid, headers, index, row
if constraints.get('unique') is True:
if not self._uniques.get(column_name):
self._uniques[column_name] = set([column_value])
elif column_value in self._uniques[column_name]:
_type = RESULTS['schema_006']
entry = self.make_entry(
self.name,
self.RESULT_CATEGORY_ROW,
self.RESULT_LEVEL_ERROR,
_type['msg'].format(column_name, column_value),
_type['id'],
_type['name'],
row,
index,
row_name,
headers.index(column_name),
column_name
)
self.report.write(entry)
if self.fail_fast:
return valid, headers, index, row
else:
self._uniques[column_name].add(column_value)
# TODO: check constraints.min* and constraints.max*
return valid, headers, index, row
| [
"[email protected]"
] | |
cb9f0a252c18c9148ff97d724c5ff0ef3776dd78 | 1a5392dad90182c521753127d04a4e7c367fca0e | /app.py | fac131be0dd4d7e9b90ff32935cec6a2834994b9 | [] | no_license | Cvam06/digiflux-monitor-backend | eb5799c50e3713bfb8392835399f321bc7f9a5e4 | 5f7a4b0f38f2e7217c8d983285c6acc7b1de9db4 | refs/heads/master | 2023-02-12T12:34:48.336672 | 2021-01-08T17:34:23 | 2021-01-08T17:34:23 | 327,970,590 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 86 | py | from digifluxMonitor import app
#Run Server
if __name__ == '__main__':
app.run()
| [
"[email protected]"
] | |
0660ad18789747b956180df9c82ae52168af5906 | 803988c9f1d649456757395d37ed6a657e4ddaf2 | /04-code-golf/tf-04-pn.py | 87c6b3e3da3aec7af073ba2fab74f6ce08db6882 | [
"MIT"
] | permissive | aglie/exercises-in-programming-style | 81a3d6af444830cf6667803f4e15ac551b4690bf | e0cf1639266c48ebc29b164fe156886a4267027a | refs/heads/master | 2021-01-15T10:53:32.307052 | 2013-11-27T05:07:42 | 2013-11-27T05:07:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 349 | py | # My golf score is slightly lower!
# Best wishes, Peter Norvig
import re, sys, collections
stopwords = set(open('../stop_words.txt').read().split(','))
words = re.findall('[a-z]{2,}', open(sys.argv[1]).read().lower())
counts = collections.Counter(w for w in words if w not in stopwords)
for (w, c) in counts.most_common(25):
print w, '-', c
| [
"[email protected]"
] | |
08527f6b7228c02ce9b5815eefa3d666d57cbfd4 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/320/usersdata/279/87865/submittedfiles/lecker.py | 2edf18ff90115c4a8a56d88b79c72f2c4e082079 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 381 | py | # -*- coding: utf-8 -*-
import math
a1=float(input())
a2=float(input())
a3=float(input())
a4=float(input())
r=0
for i in range (1,5,1) :
if a1>ai :
r=r+1
for i in range (1,5,1) :
if a2>ai :
r=r+1
for i in range (1,5,1) :
if a3>ai :
r=r+1
for i in range (1,5,1) :
if a4>ai :
r=r+1
if r==1 :
print('S')
else :
print('N') | [
"[email protected]"
] | |
3aad4316f5e3d0680f707d2ec4ca589b34723030 | f7dde2747e2acc74e38c564931ff88f508a1f8e8 | /app/settings.py | 1fd219103dbaa08730990af97f159eea7c53c933 | [] | no_license | pavancse17/user-activity | 8c1013985c9bf14e8bffc71a08bfcf65cae42555 | 43d65224d062507040475be4687254c86cf4c567 | refs/heads/master | 2021-05-22T19:31:59.764556 | 2020-04-04T21:56:17 | 2020-04-04T21:56:17 | 253,059,830 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,217 | py | """
Django settings for app project.
Generated by 'django-admin startproject' using Django 3.0.5.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 't%o7hy6egs%pk^@32a0e8l-c287_e_$(8l^ba2_dd&z+hxv5sj'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'user_activity'
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'app.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'app.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = '/static/'
AUTH_USER_MODEL = 'user_activity.User'
import django_heroku
django_heroku.settings(locals())
| [
"[email protected]"
] | |
fbdd233413a6754cd7a5c5829353797caec260d7 | 143fd49858528d25d4695b4563be5defe701a61f | /0x0F-python-object_relational_mapping/model_city.py | f3aeeaefd948fa3bfb0ef99fc14a0316df809d6e | [] | no_license | jemn21819/holbertonschool-higher_level_programming | a64e0ead14165542d3eb17cf10c8b915e1f8570e | bb8ad3e7c15086fcd3d94f1d103ac38bc851e01f | refs/heads/master | 2023-04-17T10:24:35.623482 | 2021-05-04T21:22:58 | 2021-05-04T21:22:58 | 319,295,963 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 550 | py | #!/usr/bin/python3
"""
Module that contains class definition for City
"""
from model_state import State
from sqlalchemy import Column, Integer, String, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
Base = declarative_base()
class City(Base):
"""MySQL table cities"""
__tablename__ = 'cities'
id = Column(
Integer, primary_key=True, autoincrement=True,
unique=True, nullable=False)
name = Column(String(128), nullable=False)
state_id = Column(Integer, ForeignKey(State.id), nullable=False)
| [
"[email protected]"
] | |
7339d5cfe2b6347fb993b26b03f127bb7483a24e | aa5d01cced9078c099ef62f0e1701ca078068ecb | /unitology/tests/test_views.py | a628013ecb687862870ac06c8e6e754ca20c5416 | [
"MIT"
] | permissive | erikvw/django-unitology | 951323c5adf2021f73908d8e04395e599a58c476 | 646c1867c8d838254aac714c7abb76787b219510 | refs/heads/master | 2020-09-05T13:15:52.104990 | 2019-03-23T16:35:16 | 2019-03-23T16:35:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,870 | py | # -*- coding: utf-8 -*-
from django.test import TestCase
from django.core.urlresolvers import reverse
from unitology.variables import IMPERIAL, METRIC
class ReloadViewTest(TestCase):
def test_weight_multi_field(self):
data = {
'from_units': IMPERIAL,
'to_units': METRIC,
'id': 'id_weight',
'name': 'weight',
'value': '220',
'module_name': 'unitology.formfields',
'klass_name': 'WeightMultiField'
}
response = self.client.get(reverse('unitology_reload'), data, **{
'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'})
self.assertEqual(response.status_code, 200)
self.assertTrue('99.79' in str(response.content) and 'kgs' in str(response.content))
data = {
'from_units': METRIC,
'to_units': IMPERIAL,
'id': 'id_weight',
'name': 'weight',
'value': '100',
'module_name': 'unitology.formfields',
'klass_name': 'WeightMultiField'
}
response = self.client.get(reverse('unitology_reload'), data, **{
'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'})
self.assertEqual(response.status_code, 200)
self.assertTrue('220.46' in str(response.content) and 'lbs' in str(response.content))
# pass incorrect value
data = {
'from_units': IMPERIAL,
'to_units': METRIC,
'id': 'id_weight',
'name': 'weight',
'value': 'qwetry',
'module_name': 'unitology.formfields',
'klass_name': 'WeightMultiField'
}
response = self.client.get(reverse('unitology_reload'), data, **{
'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'})
self.assertEqual(response.status_code, 200)
self.assertTrue('' in str(response.content) and 'kgs' in str(response.content))
def test_height_multi_field(self):
data = {
'from_units': IMPERIAL,
'to_units': METRIC,
'id': 'id_height',
'name': 'height',
'value[]': ['5', '8'],
'module_name': 'unitology.formfields',
'klass_name': 'HeightMultiField'
}
response = self.client.get(reverse('unitology_reload'), data, **{
'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'})
self.assertEqual(response.status_code, 200)
self.assertTrue('172.72' in str(response.content) and 'cm' in str(response.content))
data = {
'from_units': METRIC,
'to_units': IMPERIAL,
'id': 'id_height',
'name': 'height',
'value': '175',
'module_name': 'unitology.formfields',
'klass_name': 'HeightMultiField'
}
response = self.client.get(reverse('unitology_reload'), data, **{
'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'})
self.assertEqual(response.status_code, 200)
self.assertTrue('5' in str(response.content) and 'ft' in str(response.content))
self.assertTrue('8' in str(response.content) and 'in' in str(response.content))
# pass incorrect value
data = {
'from_units': METRIC,
'to_units': IMPERIAL,
'id': 'id_height',
'name': 'height',
'value[]': 'qwerty',
'module_name': 'unitology.formfields',
'klass_name': 'HeightMultiField'
}
response = self.client.get(reverse('unitology_reload'), data, **{
'HTTP_X_REQUESTED_WITH': 'XMLHttpRequest'})
self.assertEqual(response.status_code, 200)
self.assertTrue('0' in str(response.content) and 'ft' in str(response.content))
self.assertTrue('0' in str(response.content) and 'in' in str(response.content))
| [
"[email protected]"
] | |
5a66c03536fed0e76c58d291c12b60c3d4ea7df6 | ad2d2381951fc30514f8039528bf41954b62f39e | /python2/day03/Chating/chat_server.py | 50595704621d489cd137c9dc898f58b0aec6f057 | [] | no_license | seyoung5744/Basic-Python-in-playdata | bbb30213434aead1e13c3d6367916cc222dcddb3 | facb4d852ee963f0244a2e69cc02af77730e8104 | refs/heads/main | 2023-04-30T14:18:09.526009 | 2021-05-14T02:12:39 | 2021-05-14T02:12:39 | 310,472,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,696 | py | import socket, threading
soc_list=[] #채팅방. 연결된 클라이언트 소켓
def client(soc, addr):
soc_list.append(soc) #방금 접속한 클라이언트 소켓을 리스트에 담음
while True:
data = soc.recv(1024)
msg = data.decode()
if msg=='/stop':
soc.sendall(data)#본인한테 /stop 전송
soc_list.remove(soc)
msg = str(addr)+' 님이 퇴장하셨습니다.'
for s in soc_list:
s.sendall(msg.encode())
break
else:
print('Received from', addr, msg)
msg = str(addr)+' : '+msg
for s in soc_list:
s.sendall(msg.encode())
soc.close()
print(addr, '퇴장')
def main():
HOST = 'localhost' #server ip
PORT = 9999 #server port
#server socket open. socket.AF_INET:주소체계(IPV4), socket.SOCK_STREAM:tcp
server_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
#포트 여러번 바인드하면 발생하는 에러 방지
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
#바인드:오픈한 소켓에 IP와 PORT 할당
server_socket.bind((HOST, PORT))
#이제 accept할 수 있음을 알림
server_socket.listen()
print('server start')
#accept로 client의 접속을 기다리다 요청시 처리.
#client와 1:1통신할 작은 소켓과 연결된 상대방의 주소 반환
while True:
client_socket, addr = server_socket.accept()
print('Connected by', addr)
t = threading.Thread(target=client, args=(client_socket,addr))
t.start()
server_socket.close()
main()
| [
"[email protected]"
] | |
a0e244a92dc257cd1ab3627c339d5d5b068f0ebe | d4977816258bb4e28398009833aba07b8243cfa6 | /CSDNspider/searchsql.py | 8d3e9fdd8016972914bf6464e36b3ac89d23fc2f | [] | no_license | junbaibai0719/csdnPaChong | eb7b1fe6e6d0539b7b02e4df1f8274c156cbe068 | aa7a9a4fff12114ee707f4a3eb882853fe8f773e | refs/heads/master | 2020-07-31T19:35:02.408743 | 2019-09-25T01:43:16 | 2019-09-25T01:43:16 | 210,728,710 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,936 | py | import sqlite3
class MySql():
def __init__(self, database):
# 连接数据库
self.database = database
self.conn = sqlite3.connect(self.database)
self.cursor = self.conn.cursor()
self.cursor.execute('''create table if not exists urldata
(url char primary key not null ,
state int,
title message_text ,
nickname nchar ,
shown_year year ,
showm_month month ,
shown_day day ,
shown_time time ,
lable message_text
);''')
# cursor.execute('''create table if not exists offsetdata
# (shown_offset char primary key not null ,
# state int
# );''')
# #创建表保存文章数据,时间数据类型为timestamp,yyyy-mm-dd hh-mm-ss.sss,使用datetime(timestring)得到yyyy-mm-dd hh-mm-ss.sss形状的日期
# cursor.execute('''create table if not exists articedata
# (article message_text primary key not null ,
# shown_time timestamp ,
# category char
# );''')
self.conn.commit()
def save(self, tablename = 'urldata', list=[]):
# sqlcode0 = "insert into {} values (?,?)".format(tablename)
sqlcode0 = "replace into {} values (?,?,?,?,?,?,?,?,?)".format(tablename)
sqlcode1 = "insert into {} values (?,?,?,?,?,?,?,?,?)".format(tablename)
# conn = sqlite3.connect(self.database)
# cursor = conn.cursor()
# 将url设为主键重复插入时会报错,try用来规避重复插入。
# cursor.executemany(sqlcode0, (list))
# try:
# cursor.executemany(sqlcode1,(list))
# except Exception:
# print('**********************************************************************************************************')
for i in list:
try:
self.cursor.execute(sqlcode1,(i))
except Exception:
if i[1] == 1:
self.cursor.execute(sqlcode0,(i))
print(i)
self.conn.commit()
def read(self, tablename = 'urldata',value = 0):
# conn = sqlite3.connect(self.database)
# cursor = conn.cursor()
data = self.cursor.execute('select * from {} where state = {}'.format(tablename,value))
self.conn.commit()
count = data.fetchall()
return count
# 访问url函数,如果将一个url取出来访问
def access(self, tablename = 'urldata',col = 'url'):
# conn = sqlite3.connect(self.database)
# cursor = conn.cursor()
data = self.cursor.execute('select * from {} where state = {}'.format(tablename, 0))
result = data.fetchmany(20)
# list = []
# for i in result:
# list.append((1,i[0]))
# self.cursor.executemany('update {} set state = ? where {} = ?'.format(tablename, col), (list))
self.conn.commit()
return result
# 设置状态
def set_state(self, tablename, col, colval, state=1):
self.cursor.execute('update {} set state = ? where {} = ?'.format(tablename, col), (state, colval))
self.conn.commit()
def search(self,tablename = 'urldata',what = 'url', col = 'lable' ,colval = ''):
goturl = self.cursor.execute("select {} from {} where {} like ?".format(what,tablename,col),(colval,)).fetchall()
self.conn.commit()
return goturl
def close(self):
self.conn.close()
mysql = MySql('test.db')
data = mysql.read(value=1)
# for i in data:
# print(i)
print(data.__len__())
print(mysql.read().__len__())
print(mysql.search(colval='%work%'))
mysql.close()
| [
"[email protected]"
] | |
465e51513b0e0ec554100f6204b4bffb90706563 | 49479554c1992a9961102e3be9f1d38f40374a0c | /rolling_subs_calculator.py | 04b866b8dcd196c6d5602543888f4458f986c9f0 | [] | no_license | DavidJMilligan/rolling-subs-calculator | db8536d50477a83420acb253ec6ed5b732ee28bd | 8fbe890197efe512e9083cbe94c1565b9525f3c6 | refs/heads/master | 2021-01-19T16:24:35.092042 | 2017-04-23T10:52:53 | 2017-04-23T10:52:53 | 88,262,209 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,401 | py | #!/usr/bin/env python3
"""The rolling subs calculator calculates the amount of time each
player on a team will have on the pitch, times substitutions should
be made and which players are subbed on / off at each substitution.
"""
# Capture match variables. These vary week to week and will be set from the web app in future.
MATCH_DURATION = int(input(' Enter match duration in minutes '))
NUMBER_PLAYERS = int(input(' Enter total number of available players '))
NUMBER_GOALKEEPERS = 1
# Set game variables. These do not change from week to week
OUTFIELD_PLAYERS = (NUMBER_PLAYERS - NUMBER_GOALKEEPERS)
PLAYERS_ON_PITCH = 7
NUMBER_OUTFIELD_PLAYERS = (NUMBER_PLAYERS - NUMBER_GOALKEEPERS)
AVAILABLE_OUTFIELD_MINUTES = (MATCH_DURATION * (PLAYERS_ON_PITCH - 1))
MINUTES_PER_OUTFIELD = (AVAILABLE_OUTFIELD_MINUTES / OUTFIELD_PLAYERS)
SUB_FREQUENCY = (MATCH_DURATION / OUTFIELD_PLAYERS)
NUMBER_SUBS = NUMBER_PLAYERS - PLAYERS_ON_PITCH
# Capture outfield players names
OUTFIELD_PLAYERS_NAMES = []
i = 0
while len(OUTFIELD_PLAYERS_NAMES) < OUTFIELD_PLAYERS:
i += 1
PLAYER = input('Outfield player name %d: '%i)
OUTFIELD_PLAYERS_NAMES.append(PLAYER)
print(OUTFIELD_PLAYERS_NAMES)
print("\n")
# Print summary instructions
INSTRUCTIONS = "Substitute " + str(NUMBER_SUBS) + " players every " + str(SUB_FREQUENCY) + " mins"
print(INSTRUCTIONS)
print(" Every outfield player will get " + str(MINUTES_PER_OUTFIELD) + " minutes")
# Decide starting team - by first entered at the player input stage.
STARTING_TEAM = (OUTFIELD_PLAYERS_NAMES[0:PLAYERS_ON_PITCH - NUMBER_GOALKEEPERS])
STARTING_TEAM.sort()
print("\n")
print("Starting team " + str(STARTING_TEAM))
# Print substitutions plan details
SUB_COUNT = 1 # Sets the number of the first substitution to 1
NEXT_SUB = SUB_FREQUENCY #Sets initial time for NEXT_SUB variable
# Loop through sub times and print subs on, subs off and current team after each set of subs
while NEXT_SUB < (MATCH_DURATION):
print("\n") # Adds line breaks for legibility
print("@ " + str(round(NEXT_SUB, 2)) + " minutes")
print("Sub off " + str(STARTING_TEAM[0:NUMBER_SUBS]))
SUB_COUNT = SUB_COUNT + 1
NEXT_SUB = NEXT_SUB + SUB_FREQUENCY
BENCH = (set(OUTFIELD_PLAYERS_NAMES) - set(STARTING_TEAM))
del STARTING_TEAM[0:NUMBER_SUBS]
STARTING_TEAM.extend(BENCH)
print("Sub on" + str(BENCH))
print("Current team" + str(STARTING_TEAM))
| [
"[email protected]"
] | |
4b181b086fab85df2ebf93e93bfacba96ccf632b | 9e7ad8fa87a588719afa9f123a94331b44816e94 | /approachs/approach3/quickinfopanel.py | d830232291319f51ab2bbbb3d7444e6947fd71d7 | [] | no_license | gvSIGAssociation/gvsig-desktop-scripting-quickinfo | f15582e2fe3e6b4fc0e2afd782e60b38e300d233 | b231e83d07096fc75a38f9e925e2985806c84252 | refs/heads/master | 2023-05-26T05:16:55.611023 | 2023-05-17T09:56:04 | 2023-05-17T09:56:04 | 108,833,461 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,742 | py | # encoding: utf-8
import gvsig
from gvsig import getResource
from gvsig.libs.formpanel import FormPanel
from org.gvsig.tools.swing.api import ToolsSwingLocator
class QuickinfoPanel(FormPanel):
def __init__(self, layer=None):
FormPanel.__init__(self,getResource(__file__,"quickinfopanel.xml"))
self.setLayer(layer)
def setLayer(self, layer):
self.__layer = layer
if layer==None:
self.cboFields.removeAllItems()
else:
self.fillCombo(
self.cboFields,
self.__layer.getFeatureStore().getDefaultFeatureType()
)
def getLayer(self):
return self.__layer
def getFieldName(self):
name = self.cboFields.getSelectedItem()
if name == None:
return None
name = name.strip()
if name == "":
return None
return name
def fillCombo(self, combo, featureType):
combo.removeAllItems()
combo.addItem(" ")
for attr in featureType:
combo.addItem(attr.getName())
x = self.__layer.getProperty("quickinfo.fieldname")
if x in ("", None):
combo.setSelectedIndex(0)
else:
combo.setSelectedItem(x)
def save(self):
self.__layer.setProperty(
"quickinfo.fieldname",
self.getFieldName()
)
def main(*args):
viewDoc = gvsig.currentView()
layer = viewDoc.getLayer("manzanas_pob")
panel = QuickinfoPanel(layer)
winmgr = ToolsSwingLocator.getWindowManager();
dialog = winmgr.createDialog(
panel.asJComponent(),
"Quickinfo test",
"Quickinfo information",
winmgr.BUTTONS_OK_CANCEL
)
dialog.show(winmgr.MODE.DIALOG)
if dialog.getAction()==winmgr.BUTTON_OK:
print "Ok"
print "Show field: ", repr(panel.getFieldName())
panel.save()
else:
print "Cancel"
| [
"jjdelcerro@jjdc-Lenovo-G50-70"
] | jjdelcerro@jjdc-Lenovo-G50-70 |
7ced69003bd43f17bb30a6e19950b5c6713be817 | 7d87ed96be5ad70d4d394b40c3baa8a65ff531d5 | /main.py | e2fa4d6d684499da81fa88e16fb4123621710f1c | [] | no_license | CyberPopPunk/Technical_Inventory | 077c68065029804a2519a60e240b9382810affba | ba8e2309563366b801cf818780105ac61cfe16f3 | refs/heads/master | 2020-04-28T08:33:34.795304 | 2019-03-12T04:04:22 | 2019-03-12T04:04:22 | 175,132,713 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,912 | py | # Work inverntory log for HAC equipment
# 12/4/2018
# Save Items to different SQL tables in a db and take inventory easily
# Three main functions: Input Data, View Data, Take Inventory on Equip
#
import ui
import sqlalchemy as sa
from time import sleep
conn = sa.create_engine('sqlite:///tech_inv.db')
inspector = sa.inspect(conn)
def create_table():
# if table doesn't exist...generates a new SQL table for the db. Tables have 6 columns with the 6th reserved for a generated ID#
# users specify type if the column is a number (num) or description (word)
new_table_list = []
print('What is the new Table name?')
new_table = input('New Table: ').lower()
curr_tables = [x.lower() for x in inspector.get_otable_names()]
if new_table in curr_tables:
print('Table already exists!')
return
else:
new_table_list.append(new_table)
for col in range(1,6):
print('What is in column {}?'.format(col))
new_table_list.append(input("Column {}: ".format(col)))
print('What type of input is this?(num, word, boolean)')
input_type = input("Type: ").lower()
if input_type == 'number' or input_type == 'num':
new_table_list.append("INT")
elif input_type == 'bool' or input_type =='boolean':
new_table_list.append('')
else:
new_table_list.append('VARCHAR(20)')
print('Are these values correct? Y/N')
print(new_table_list)
if input('Confirm: ').lower() == 'y':
sql_call = '''CREATE TABLE {}
({} {},
{} {},
{} {},
{} {},
{} {},
id VARCHAR(3) PRIMARY KEY)'''.format(*new_table_list)
print(sql_call)
conn.execute(sql_call)
else:
print('Let\'s try again\n')
create_table()
def show_table_info(tb_name):
table_info = conn.execute('SELECT * FROM {}'.format(tb_name))
for num, row in enumerate(table_info):
print('Item {}: {}'.format(num+1, row))
print('-'*60)
def choose_input(list, title, prompt):
# Prints a numbered list and prompts the user for input based on numerical selection
# returns choice
while True:
print(title)
for num, index in enumerate(list):
print('{}. {}'.format(num + 1, index))
try:
choice = int(input(prompt))
if choice > len(list):
raise ValueError()
break
except ValueError or ValueError:
print('Invalid Entry\n')
return list[choice - 1] # compensate for list index
def table_select(title):
tables = conn.table_names()
selected_table = choose_input(tables, title, '--> ')
print("\nYou selected {}".format(selected_table))
return selected_table
def input_items(table):
# takes a table and lists out the available columns
# prompts for values and inserts them into the table in database
col_info = inspector.get_columns(table) # returns a list of dicts of attributes for each column
cols = []
new_col_vals = []
enter_items = True
#iterate over dicts in list
for i in range(len(col_info)):
#for each dict in list of dicts get 'name' from each and add it to cols list
cols.append(col_info[i].get('name'))
#clean cols into tuple for SQL INSERT
cols = tuple(cols)
while enter_items == True:
# Prompt for new values
for col in cols:
if col == 'id':
continue
new_val = input('What is the {} of new item? >>> '.format(col))
print('\n')
new_col_vals.append(new_val)
print('Values input: {}'.format(new_col_vals))
values_avail = '?,'*(len(cols)-2) + '?' #max number of columns minus 1 reserved for ID generation
print('values avail' + values_avail)
try:
new_id = generate_ID()
except:
try:
new_id = generate_ID()
except:
print('You\'re one unlucky bastard. You generated the same ID twice that already exists. \nPlease try inputting item again.... \nReturning to Main Menu...')
#add generated ID to input data
new_col_vals.append(new_id)
#compile SQL insert
ins = 'INSERT INTO {} {} VALUES {}'.format(table, cols, tuple(new_col_vals))
print(ins)
#conn.execute(ins)
#print('insert success!\n')
print('FAKE INSERT A SUCCESS! PLEASE ACTIVATE \'INSERT\' SQL query to store data!')
while True:
more = input(('Enter another item? Y/N ')).lower()
try:
if len(more) > 1:
raise ValueError("Input too long")
if more =='y':
break
elif more == 'n':
print('Returning to Main Menu...\n')
return
else:
print('Invalid Input')
except ValueError:
print('Invalid Input! please try again')
def generate_ID():
# generates and returns a 4 digit hexdigit (without 0x header) to be used for an ID
# also searches Database to verify hex doesn't already exist yet
from random import randint
new_id = '{:x}'.format(randint(1, 16**4))
print('New ID: # ' + new_id)
print('Checking if ID exists')
#searches all tables in db
for table in conn.table_names():
print('Looking through table {}...'.format(table))
#selecets the ID columns in databse
selected_IDs = conn.execute('SELECT id FROM {}'.format(table))
for row in selected_IDs:
print('Checking {}...'.format(row))
if row == new_id:
raise Exception('ID already in use! Please try again')
print('ID not used in table {}...'.format(table))
print('ID not in use')
return new_id
def inventory():
import string
curr_inv_list = []
#turn this into its own function!
counted_id = None
print('Please input an ID number, when inventory complete, type \'done\'.')
while counted_id != 'done':
while True:
counted_id = input('ID#: ').lower()
if all(char in string.hexdigits for char in counted_id) and len(counted_id) <= 4:
curr_inv_list.append(counted_id)
elif counted_id == 'done':
break
else:
print('INVALID ID, please re-enter')
total_ids = []
total_missing = []
for table in conn.table_names():
#print('Checking Table: {}'.format(table))
table_result = conn.execute('SELECT id FROM {}'.format(table))
table_ids = [table_id[0] for table_id in table_result]
#print('Category IDs for {}: {}'.format(table, table_ids))
inv_results = missing_items(curr_inv_list, table_ids)
print("______Overview of {} Category______\nMissing Items: {}\nUnknown Items count:{}".format(table, inv_results[0], len(inv_results[1])))
for item in inv_results[2]:
curr_inv_list.remove(item)
for item in inv_results[0]:
total_missing.append(item)
print('Total Items Missing: {}\n__MISSING ITEMS__'.format(len(total_missing)))
#print the info for the missing items
for table in conn.table_names():
table_result = conn.execute('SELECT * FROM {}'.format(table))
for row in table_result:
if row['id'] in total_missing:
print(row)
def missing_items(counted_items_list, expected_items_list):
# takes two lists as args, counted and expected
# returns lists of missing items and unknown items
# missing items are items not in counted list from expected
# unknown items are items in counted list not in expected
missing_items = [item for item in expected_items_list if item not in counted_items_list]
unknown_items = [item for item in counted_items_list if item not in expected_items_list]
found_items = [item for item in expected_items_list if item in counted_items_list]
return [missing_items, unknown_items, found_items]
def main_menu():
while True:
print('\nWelcome to Inventory!')
print('1. Create Category')
print('2. Input Items')
print('3. Show Category Contents')
print('4. Take Inventory')
print('5. Exit')
try:
choice = int(input("Please select an action: "))
if choice > 5:
raise ValueError()
break
except ValueError:
print("Invalid choice\n")
if choice == 1:
create_table()
elif choice == 2:
selected_table = table_select('\nWhat category would you like to enter the item in?\n')
input_items(selected_table)
elif choice == 3:
selected_table = table_select('\nWhat category of items would you like to view?\n')
show_table_info(selected_table)
elif choice == 4:
inventory()
def exit_app(sender):
print('\nGoodbye!')
exit()
#main_menu()
v = ui.load_view('tech_gui').present('sheet')
| [
"[email protected]"
] | |
c5cce2bee68e0f0d25386a2d6d4b08d99adee80a | 4ede4657c68979d7447aff783deb6b192e6edd88 | /core/utilsForProcessing.py | 3431eed17c635ca8ce7742978c391233994eeee6 | [] | no_license | zerodeku/tracking3d | 7cba6e567fc2776a73f2351b6da6248bca3b598a | 8557ea99b24a9dc4063f7e1892b54c16c6fa4564 | refs/heads/master | 2021-01-20T17:54:36.086409 | 2016-07-06T22:08:24 | 2016-07-06T22:08:24 | 61,503,057 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,153 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Aug 05 14:27:06 2014
@author: tcolomb
"""
import os
from Tkinter import *
import csv
from guidata.qt.QtGui import QFileDialog, QMessageBox
import numpy as np
from os import listdir
from os.path import isfile, join
from matplotlib import pyplot as plt
#%% MessageBox to take password entry
class takeInput(object):
def __init__(self, requestMessage, boolTextOrNumber, defaultText, hideText):
self.root = Tk()
self.string = ''
self.frame = Frame(self.root)
self.frame.pack()
self.acceptInput(requestMessage, defaultText, hideText)
def acceptInput(self, requestMessage, defaultText, hideText):
r = self.frame
k = Label(r, text=requestMessage)
k.pack(side='left')
self.e = Entry(r, text='Name')
if hideText:
self.e["show"] = "*"
self.e.pack(side='left')
self.e.insert(0, defaultText)
self.e.focus_set()
b = Button(r, text='okay', command=self.gettext)
b.pack(side='right')
def gettext(self):
self.string = self.e.get()
self.root.destroy()
def getString(self):
return self.string
def waitForInput(self):
self.root.mainloop()
def getEntry(requestMessage, boolTextOrNumber, defaultText, hideText):
msgBox = takeInput(requestMessage, boolTextOrNumber, defaultText, hideText)
msgBox.waitForInput()
if boolTextOrNumber: #True=text, False=Number
return msgBox.getString()
else:
return int(float(msgBox.getString()))
def getPassword(requestMessage, defaultText):
msgBox = takeInput(requestMessage, True, defaultText, True)
msgBox.waitForInput()
return msgBox.getString()
## Directory and file
def OpenTxtFile(text, path):
filename = QFileDialog.getOpenFileName(None, text, path, filter="txt (*.txt *.)")
return filename
def CreateDirectory(directoryPath, directoryName):
if not os.path.exists(directoryPath+'\\'+directoryName):
os.makedirs(directoryPath+'\\'+directoryName)
return directoryPath + '\\' + directoryName
def DeleteAllFilesInDirectory(directoryPath):
filelist = [f for f in os.listdir(directoryPath) if f.endswith(".bin")]
for f in filelist:
os.remove(directoryPath+'\\'+f)
def FileExists(fname, extension):
return os.path.isfile(fname) and fname.endswith(extension)
def FindFileInDirectory(directory, extension):
onlyfiles = [f for f in listdir(directory) if isfile(join(directory, f))]
return onlyfiles
def ErrorMessage(message):
QMessageBox.warning(None, 'Error', message)
def Log(message):
print message
def DisplayImage(img):
plt.imshow(img)
plt.gray()
plt.show()
def SaveParamsFile(ColumnTitles, Values, fname):
if np.size(ColumnTitles) != np.size(Values):
ErrorMessage("Not possible to save because ColumnTiltes and Values are not the same size")
else:
f = open(fname, 'w')
columntext = ''
valuetext = ''
for k in range(np.size(ColumnTitles)-1):
columntext += ColumnTitles[k]+'\t'
valuetext += str(Values[k])+'\t'
columntext += ColumnTitles[-1]+'\n'
valuetext += str(Values[-1])+'\n'
f.writelines(columntext)
f.writelines(valuetext)
f.close()
#
#def ReadParmsFile(fname):
# with open(fname,'r') as f:
# reader=csv.reader(f)
# Values=[]
# for row in islice(reader,1,None):
# line=row[0].split()
# for v in line:
# Values.append((float)(v))
# return Values
def SaveParamsFileByLine(Data, fname):
fname = QFileDialog.getSaveFileName(None, "Save file", fname)
f = open(fname, 'w')
for info in Data:
f.writelines(info[0]+'\t'+str(info[1])+'\t'+info[2]+'\n')
f.close()
def ReadParamsFromLine(fname):
Data = []
with open(fname, 'r') as f:
reader = csv.reader(f, delimiter='\t')
for row in reader:
#line=row[0].split()
Data.append((row[0], row[1], row[2]))
return Data | [
"[email protected]"
] | |
042a7387750534b05b32d92c9d59e401c4ac6ab4 | 940e44d76c5688f5920e875b260b28247ff0a81c | /config/local_settings.py | 13874ce0138119b8d12daedcb48c285bf3ecfd27 | [] | no_license | ignsv/centr_osvita | d267fb2fc41f8e8a28e6e880c1a8b38b2dc39544 | efaa930d566bed50a6d664b0906ef7fc5e5f1c46 | refs/heads/develop | 2022-12-11T16:53:09.426981 | 2020-09-29T09:49:00 | 2020-09-29T09:49:00 | 250,006,828 | 0 | 0 | null | 2022-11-22T05:07:57 | 2020-03-25T14:50:43 | Python | UTF-8 | Python | false | false | 7,065 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
import os
import environ
import raven
ROOT_DIR = environ.Path(__file__) - 2 # (/a/myfile.py - 2 = /)
APPS_DIR = ROOT_DIR.path('centr_osvita')
env = environ.Env(
DJANGO_DEBUG=(bool, False),
DJANGO_SECRET_KEY=(str, 'CHANGEME!!!x9drrvwt9y^9b)*2^9(&l@kz)jc7!5)i(-z6sp=@b2h+mo!^ae'),
DJANGO_ADMINS=(list, []),
DJANGO_ALLOWED_HOSTS=(list, []),
DJANGO_STATIC_ROOT=(str, str(APPS_DIR('staticfiles'))),
DJANGO_MEDIA_ROOT=(str, str(APPS_DIR('media'))),
DJANGO_DATABASE_URL=(str, 'postgis:///centr_osvita'),
DJANGO_EMAIL_URL=(environ.Env.email_url_config, 'consolemail://'),
DJANGO_DEFAULT_FROM_EMAIL=(str, '[email protected]'),
DJANGO_EMAIL_BACKEND=(str, 'django.core.mail.backends.smtp.EmailBackend'),
DJANGO_SERVER_EMAIL=(str, '[email protected]'),
DJANGO_USE_DEBUG_TOOLBAR=(bool, False),
DJANGO_TEST_RUN=(bool, False),
DJANGO_HEALTH_CHECK_BODY=(str, 'Success'),
DJANGO_USE_SILK=(bool, False),
)
environ.Env.read_env()
DEBUG = env.bool("DJANGO_DEBUG")
SECRET_KEY = env('DJANGO_SECRET_KEY')
ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS')
ADMINS = tuple([tuple(admins.split(':')) for admins in env.list('DJANGO_ADMINS')])
MANAGERS = ADMINS
TIME_ZONE = 'UTC'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = True
USE_L10N = True
USE_TZ = True
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'my_database',
}
}
DJANGO_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.admin',
'django.contrib.postgres'
)
THIRD_PARTY_APPS = (
'django_extensions',
'phonenumber_field',
'polymorphic',
)
LOCAL_APPS = (
'centr_osvita.common.apps.CommonConfig',
'centr_osvita.users.apps.UsersConfig',
'centr_osvita.profiles.apps.ProfilesConfig',
'centr_osvita.quiz.apps.QuizConfig',
)
INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS
AUTH_USER_MODEL = 'users.User'
ADMIN_URL = r'^admin/'
MIDDLEWARE_CLASSES = [
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
EMAIL_URL = env.email_url('DJANGO_EMAIL_URL')
EMAIL_BACKEND = EMAIL_URL['EMAIL_BACKEND']
EMAIL_HOST = EMAIL_URL.get('EMAIL_HOST', '')
if EMAIL_URL.get('EMAIL_HOST_PASSWORD', '') == 'special':
EMAIL_HOST_PASSWORD = env('DJANGO_EMAIL_HOST_PASSWORD_SPECIAL')
else:
EMAIL_HOST_PASSWORD = EMAIL_URL.get('EMAIL_HOST_PASSWORD', '')
EMAIL_HOST_USER = EMAIL_URL.get('EMAIL_HOST_USER', '')
EMAIL_PORT = EMAIL_URL.get('EMAIL_PORT', '')
EMAIL_USE_SSL = 'EMAIL_USE_SSL' in EMAIL_URL
EMAIL_USE_TLS = 'EMAIL_USE_TLS' in EMAIL_URL
EMAIL_FILE_PATH = EMAIL_URL.get('EMAIL_FILE_PATH', '')
DEFAULT_FROM_EMAIL = env('DJANGO_DEFAULT_FROM_EMAIL')
SERVER_EMAIL = env('DJANGO_SERVER_EMAIL')
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [
str(APPS_DIR.path('templates')),
],
'OPTIONS': {
'debug': DEBUG,
'loaders': [
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
],
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.template.context_processors.i18n',
'django.template.context_processors.media',
'django.template.context_processors.static',
'django.template.context_processors.tz',
'django.contrib.messages.context_processors.messages',
],
},
},
]
STATIC_URL = '/static/'
STATIC_ROOT = env('DJANGO_STATIC_ROOT')
MEDIA_URL = '/media/'
MEDIA_ROOT = env('DJANGO_MEDIA_ROOT')
STATICFILES_DIRS = (
str(APPS_DIR.path('static')),
)
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
)
ROOT_URLCONF = 'config.urls'
WSGI_APPLICATION = 'config.wsgi.application'
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
)
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
},
'require_debug_true': {
'()': 'django.utils.log.RequireDebugTrue',
},
},
'formatters': {
'verbose': {
'format': '%(levelname)s %(asctime)s %(module)s '
'%(process)d %(thread)d %(message)s'
},
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
},
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose',
},
},
'loggers': {
'django': {
'handlers': ['console'],
'propagate': True,
'level': 'WARN',
},
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True
},
}
}
if os.environ.get('SENTRY_DSN'):
INSTALLED_APPS += ('raven.contrib.django.raven_compat',)
RAVEN_CONFIG = {
'dsn': env('SENTRY_DSN'),
'release': raven.fetch_git_sha(str(ROOT_DIR)),
}
USE_DEBUG_TOOLBAR = env.bool('DJANGO_USE_DEBUG_TOOLBAR')
if USE_DEBUG_TOOLBAR:
MIDDLEWARE_CLASSES += [
'debug_toolbar.middleware.DebugToolbarMiddleware',
]
INSTALLED_APPS += (
'debug_toolbar',
)
DEBUG_TOOLBAR_CONFIG = {
'DISABLE_PANELS': [
'debug_toolbar.panels.redirects.RedirectsPanel',
],
'SHOW_TEMPLATE_CONTEXT': True,
'SHOW_TOOLBAR_CALLBACK': lambda request: True,
}
DEBUG_TOOLBAR_PATCH_SETTINGS = False
# http://django-debug-toolbar.readthedocs.org/en/latest/installation.html
INTERNAL_IPS = ('127.0.0.1', '0.0.0.0', '10.0.2.2')
if env.bool('DJANGO_TEST_RUN'):
pass
HEALTH_CHECK_BODY = env('DJANGO_HEALTH_CHECK_BODY')
# Silk config
USE_SILK = env('DJANGO_USE_SILK')
if USE_SILK:
INSTALLED_APPS += (
'silk',
)
MIDDLEWARE_CLASSES += [
'silk.middleware.SilkyMiddleware',
]
SILKY_AUTHENTICATION = True # User must login
SILKY_AUTHORISATION = True # User must have permissions
SILKY_PERMISSIONS = lambda user: user.is_superuser
| [
"[email protected]"
] | |
faedbf448d91f3277c8f273d58c15188f4123932 | 1194311067114c15c33601f12c5c92819317aefe | /pySpikeSorter.py | 54ea678285097703e2119f1652a8c7e5511154fd | [] | no_license | vhazocar/pySpikeSorter | 8b08f18635ec9170b177c0c7a61e7e590197f829 | 6fa4f872e57b84deadd82b33d0c110aa2245901d | refs/heads/master | 2020-05-30T08:39:09.684203 | 2016-01-21T21:03:38 | 2016-01-21T21:03:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 152,267 | py | #!/usr/local/bin/ipython -i
#---------------------------------------------------------------------- IMPORTS
import os
filename = os.environ.get('PYTHONSTARTUP')
if filename and os.path.isfile(filename):
execfile(filename)
import sys
import re
import tables
import numpy as np
#import pdb
# extra widgets import
import matplotlib_widgets
import helper_widgets
from PyQt4 import QtGui, QtCore
from pyqtgraph import opengl as gl
from matplotlib import rc
from matplotlib.mlab import PCA
from matplotlib import pyplot as plt
from matplotlib.path import Path
from scipy.spatial import cKDTree
import datetime
import m_BlackrockLib as BL
#filename = os.environ.get('PYTHONSTARTUP')
#if filename and os.path.isfile(filename):
# execfile(filename)
#==============================================================================
def autocorr(TimeStamp, binSize=20, Win=[0, 10000], mode='time',
Range=[-200, 200]):
if not np.any(TimeStamp):
return
Win = np.array(Win)
TimeStamp = np.array(TimeStamp)
TimeStamp = TimeStamp - TimeStamp[0]
TS = TimeStamp[(TimeStamp >= Win[0]) & (TimeStamp < Win[1])]
if TS.size > 1000:
TimeStamp = TS
binSize = int(binSize)
nBins = TimeStamp[-1] / binSize
train = np.zeros(nBins + 1, dtype=np.int16)
for k in np.floor(TimeStamp / binSize).astype('int'):
train[k] = train[k] + 1
if mode == 'time':
ac = np.correlate(train, train, mode='same')
x = np.linspace(-TimeStamp[-1] / 2, TimeStamp[-1] / 2, ac.size)
elif mode == 'ephys':
tmp = np.array([])
for k in TimeStamp:
t = TimeStamp - k
t = t[(t > Range[0]) & (t < Range[1])]
tmp = np.append(tmp, t)
ac, x = np.histogram(tmp, bins=int(np.diff(Range) / binSize))
x = x[1:]
ac[np.flatnonzero(x == 0)] = 0
elif mode == 'fft':
s = np.fft.fft(train)
ac = np.abs(np.fft.ifft(s * np.conjugate(s)))
#ac = ac/(train.size/((Win[1]-Win[0])/1000))
ac = np.concatenate([ac[ac.size / 2:], ac[0:ac.size / 2]])
x = np.linspace(-TimeStamp[-1] / 2, TimeStamp[-1] / 2, ac.size)
return ac, x
#==============================================================================
def KlustaKwik_call(data, minClust=2, maxClust=5):
''' data must be an array of observations x dimensions'''
# create a text file with the data. The first line must be the
# number of dimensions of the data
f = open('data.fet.1', 'w')
f.write('%d\n' % data.shape[1])
for k in data:
for j in k:
f.write('%f ' % j)
f.write('\n')
f.close()
# call klustakwick with the data
if os.system('KlustaKwik data 1 -MinClusters %d -MaxClusters %d'
% (minClust, maxClust)) != 256:
return
# wait while klustakwick gets the clusters
while not os.path.isfile('data.clu.1'):
continue
# read the results
f = open('data.clu.1', 'r')
clusterData = f.readlines()
f.close()
clusterData = [int(re.search('[0-9]{1,2}', k).group()) for k in clusterData]
# the first line is the number of clusters
nClusters = clusterData[0]
clusterData.pop(0)
clusterData = np.array(clusterData)
# create an array with the indices of each cluster
clustIndx = []
for k in range(1, nClusters + 1):
clustIndx.append(np.flatnonzero(clusterData == k))
return clustIndx
#==============================================================================
# Spike Sorter Main GUI Window
rc('xtick', labelsize=8)
rc('ytick', labelsize=8)
# create instance of imported widgets
settings = helper_widgets.Settings()
autocorropts = helper_widgets.AutocorrOpts()
autoclust = helper_widgets.AutoClustWidget()
#==============================================================================
class SpikeSorter(QtGui.QMainWindow):
def __init__(self):
QtGui.QMainWindow.__init__(self)
self.setWindowTitle("pySpikeSorter")
self.setWindowIcon(QtGui.QIcon(QtGui.QPixmap('spike_icon.png')))
self.MainWidget = QtGui.QWidget(self)
self.MainLayout = QtGui.QHBoxLayout(self.MainWidget)
self.MainLayout.setMargin(0)
self.MainLayout.setSpacing(0)
self.CurUnit = 0
self.PlotUnitCounter = 0
self.UnitsList = []
self.NUnits = 0
self.H5FileLoaded = False
self.ChanPlotted = False
self.RemovingTab = 0
self.UnitColors = np.array([[1, 0, 0], [0, 0.7, 0], [0, 0.4, 1],
[0.8, 0.6, 0], [0.6, 0, 1], [0, 0.7, 0.7],
[0, 0.5, 1]])
self.UnitColors = np.tile(self.UnitColors, (10, 1))
#--------------------------------------------- TOOLBAR ON THE LEFT SIDE
split1 = QtGui.QSplitter(QtCore.Qt.Horizontal, self.MainWidget) # SPLITTER
self.ToolsTab = QtGui.QTabWidget()
ToolsTab1 = QtGui.QWidget()
ToolsTab2 = QtGui.QWidget()
self.ToolsTab.addTab(ToolsTab1, 'Main Tools')
self.ToolsTab.addTab(ToolsTab2, 'Chan Tools')
self.ToolsTab.setMaximumWidth(220)
split1.addWidget(self.ToolsTab)
#---------------------------------------------------------ToolsTab No 1
toolslay = QtGui.QVBoxLayout()
#-------------------------------------------------------------- FRAME 1
grp = QtGui.QGroupBox('Overview Tools', ToolsTab1)
vlay = QtGui.QVBoxLayout()
# number of events to overview spin box
hlay = QtGui.QHBoxLayout()
self.OverviewNEventsSpin = QtGui.QSpinBox()
self.OverviewNEventsSpin.setRange(100, 1000)
self.OverviewNEventsSpin.setSingleStep(100)
self.OverviewNEventsSpin.setValue(500)
hlay.addWidget(QtGui.QLabel('N Events 2 Overview'))
hlay.addWidget(self.OverviewNEventsSpin)
vlay.addLayout(hlay)
# Y axis limits selector
hlay = QtGui.QHBoxLayout()
self.OverviewYLimsSpin = QtGui.QSpinBox()
self.OverviewYLimsSpin.setRange(100, 5000)
self.OverviewYLimsSpin.setSingleStep(100)
self.OverviewYLimsSpin.setValue(2000)
self.OverviewYLimsSpin.editingFinished.connect(self.ChangeOverviewYLim_Proc)
hlay.addWidget(QtGui.QLabel('Overview Axes YLim'))
hlay.addWidget(self.OverviewYLimsSpin)
vlay.addLayout(hlay)
#----------------------------------------------------------------------
btn = QtGui.QPushButton('Plot Overview')
btn.setStyleSheet('QPushButton{background-color: rgba(0,190,0)}')
btn.clicked.connect(self.LoadH5File)
vlay.addWidget(btn)
btn = QtGui.QPushButton('Save Overview')
btn.clicked.connect(self.SaveOverviewFig_proc)
vlay.addWidget(btn)
grp.setLayout(vlay)
toolslay.addWidget(grp)
grp = QtGui.QGroupBox('Delete Channel Tools', ToolsTab1)
vlay = QtGui.QVBoxLayout()
# add mark trash spin and button and link it to a function
hlay = QtGui.QHBoxLayout()
self.MarkTrashSpin = QtGui.QSpinBox()
self.MarkTrashSpin.setMinimum(1)
self.MarkTrashSpin.setMaximum(1000000)
self.MarkTrashSpin.setValue(1000)
hlay.addWidget(QtGui.QLabel('Below'))
hlay.addWidget(self.MarkTrashSpin)
MarkTrashBtn = QtGui.QPushButton('Mark Trash')
MarkTrashBtn.clicked.connect(self.TrashChans_proc)
hlay.addWidget(MarkTrashBtn)
vlay.addLayout(hlay)
# add delete trash chans and link it to a function
btn = QtGui.QPushButton('Delete Trash Chans')
btn.clicked.connect(self.DeleteTrashChans_proc)
vlay.addWidget(btn)
grp.setLayout(vlay)
toolslay.addWidget(grp)
#-------------------------------------------------------------- FRAME 2
grp = QtGui.QGroupBox('Channel Plot Options', ToolsTab1)
vlay = QtGui.QVBoxLayout()
hlay = QtGui.QHBoxLayout()
self.ChanSelector = QtGui.QComboBox()
hlay.addWidget(QtGui.QLabel('Chan Selector'))
hlay.addWidget(self.ChanSelector)
vlay.addLayout(hlay)
PlotChanBtn = QtGui.QPushButton('Plot Chan')
PlotChanBtn.clicked.connect(self.PlotChanProc)
vlay.addWidget(PlotChanBtn)
grp.setLayout(vlay)
toolslay.addWidget(grp)
#------------------------------------------------------------ Group No3
grp = QtGui.QGroupBox('General Tools', ToolsTab1)
glay = QtGui.QGridLayout()
setSettigsBtn = QtGui.QPushButton('Settings')
setSettigsBtn.clicked.connect(self.Settings)
glay.addWidget(setSettigsBtn, 0, 0)
aboutBtn = QtGui.QPushButton('About')
aboutBtn.clicked.connect(self.About)
glay.addWidget(aboutBtn, 0, 1)
closeH5FileBtn = QtGui.QPushButton('Close H5 File')
closeH5FileBtn.clicked.connect(self.CloseFile)
glay.addWidget(closeH5FileBtn, 1, 0)
exitBtn = QtGui.QPushButton('Exit')
exitBtn.clicked.connect(self.closeEvent)
glay.addWidget(exitBtn, 1, 1)
convertFileBtn = QtGui.QPushButton('Convert File')
convertFileBtn.clicked.connect(BL.bin2h5)
glay.addWidget(convertFileBtn, 2, 0)
grp.setLayout(glay)
toolslay.addWidget(grp)
# create an "About" Msg Box
self.AboutMsg = QtGui.QMessageBox(QtGui.QMessageBox.Information,
'About',
u'Spyke Sorter v0.1\nHachi Manzur, 2012')
toolslay.addStretch(1)
ToolsTab1.setLayout(toolslay)
#--------------------------------------------------- self.ToolsTab No 2
toolslay = QtGui.QVBoxLayout()
# group No1
grp = QtGui.QGroupBox('Features Plot Opts', ToolsTab2)
vlay = QtGui.QVBoxLayout()
# add X and Y features selection combobox
items = ['PCA1', 'PCA2', 'PCA3', 'Slice1', 'Slice2', 'Time', 'Pk2Pk Amp',
'Peak', 'Valley', 'Energy', 'Peak Pt', 'Valley Pt']
self.XPlot = QtGui.QComboBox(grp)
self.YPlot = QtGui.QComboBox(grp)
self.ZPlot = QtGui.QComboBox(grp)
self.XPlot.addItems(items)
self.YPlot.addItems(items)
self.ZPlot.addItems(items)
self.ZPlot.addItem('Density')
self.YPlot.setCurrentIndex(1)
self.ZPlot.setCurrentIndex(2)
# add the X axis combo box
hlay = QtGui.QHBoxLayout()
hlay.addWidget(QtGui.QLabel('X Axis Variable'))
hlay.addWidget(self.XPlot)
vlay.addLayout(hlay)
# add the Y axis combo box
hlay = QtGui.QHBoxLayout()
hlay.addWidget(QtGui.QLabel('Y Axis Variable'))
hlay.addWidget(self.YPlot)
vlay.addLayout(hlay)
# add the Y axis combo box
hlay = QtGui.QHBoxLayout()
hlay.addWidget(QtGui.QLabel('Z Axis Variable'))
hlay.addWidget(self.ZPlot)
vlay.addLayout(hlay)
# add a source of what to plot selection combo box
hlay = QtGui.QHBoxLayout()
hlay.addWidget(QtGui.QLabel('What to Plot ?'))
self.What2Plot = QtGui.QComboBox()
hlay.addWidget(self.What2Plot)
vlay.addLayout(hlay)
# add two slice selection spin box
hlay = QtGui.QHBoxLayout()
self.SliceSpBx1 = QtGui.QSpinBox()
self.SliceSpBx1.setObjectName('Slice1')
self.SliceSpBx1.valueChanged.connect(self.SliceDraw)
hlay.addWidget(QtGui.QLabel('Slice 1'))
hlay.addWidget(self.SliceSpBx1)
self.SliceSpBx2 = QtGui.QSpinBox()
self.SliceSpBx2.setObjectName('Slice2')
hlay.addWidget(QtGui.QLabel('Slice 2'))
self.SliceSpBx2.valueChanged.connect(self.SliceDraw)
hlay.addWidget(self.SliceSpBx2)
vlay.addLayout(hlay)
# add a plot density check and a spin box to set the resolution
hlay = QtGui.QHBoxLayout()
self.PlotDensityCheck = QtGui.QCheckBox('Plot Density ?')
hlay.addWidget(self.PlotDensityCheck)
self.PlotDensityBins = QtGui.QSpinBox()
self.PlotDensityBins.setMinimum(50)
self.PlotDensityBins.setMaximum(300)
self.PlotDensityBins.setValue(100)
hlay.addWidget(self.PlotDensityBins)
vlay.addLayout(hlay)
# plot only valid Wfs check widget
self.PlotValidsOnlyCheck = QtGui.QCheckBox('Plot Valids Only')
self.PlotValidsOnlyCheck.setChecked(True)
vlay.addWidget(self.PlotValidsOnlyCheck)
# label with number of points
hlay = QtGui.QHBoxLayout()
self.nPtsLabel = QtGui.QLabel()
hlay.addWidget(QtGui.QLabel('NPoints'))
hlay.addWidget(self.nPtsLabel)
vlay.addLayout(hlay)
# number of spikes spin box
hlay = QtGui.QHBoxLayout()
self.nPtsSpin = QtGui.QSpinBox()
self.nPtsSpin.setRange(10000, 200000)
self.nPtsSpin.setSingleStep(10000)
hlay.addWidget(self.nPtsSpin)
# number of spikes slider
self.nPtsSlider = QtGui.QSlider(QtCore.Qt.Horizontal)
self.nPtsSlider.setRange(10000, 200000)
self.nPtsSlider.setTickInterval(5000)
self.nPtsSlider.setSingleStep(5000)
hlay.addWidget(self.nPtsSlider)
# connect spinner with No-of-spikes slider
self.nPtsSlider.valueChanged.connect(self.nPtsSpin.setValue)
# connect slider with No-of-spikes spinner
self.nPtsSpin.valueChanged.connect(self.nPtsSlider.setValue)
# set N spikes value
self.nPtsSlider.setValue(50000)
vlay.addLayout(hlay)
hlay = QtGui.QHBoxLayout()
# plot features btn and funcion connection
self.PlotFeaturesBtn = QtGui.QPushButton('Plot 2D', grp)
self.PlotFeaturesBtn.clicked.connect(self.PlotFeatures)
hlay.addWidget(self.PlotFeaturesBtn)
# plot features btn and funcion connection
self.Plot3DBtn = QtGui.QPushButton('Plot 3D', grp)
self.Plot3DBtn.clicked.connect(self.Plot3DFeatures)
hlay.addWidget(self.Plot3DBtn)
vlay.addLayout(hlay)
grp.setLayout(vlay)
toolslay.addWidget(grp)
#----------------------------------------------------------- group No 2
grp = QtGui.QGroupBox('Raw Waveforms Opts')
vlay = QtGui.QVBoxLayout()
# number of spikes spin box
hlay = QtGui.QHBoxLayout()
self.NSpikesSpin = QtGui.QSpinBox()
self.NSpikesSpin.setMaximum(5000)
self.NSpikesSpin.setMinimum(100)
self.NSpikesSpin.setSingleStep(100)
hlay.addWidget(self.NSpikesSpin)
# number of spikes slider
self.NSpikesSlider = QtGui.QSlider(QtCore.Qt.Horizontal)
self.NSpikesSlider.setMaximum(5000)
self.NSpikesSlider.setMinimum(100)
self.NSpikesSlider.setSingleStep(100)
hlay.addWidget(self.NSpikesSlider)
# connect spinner with No-of-spikes slider
self.NSpikesSpin.valueChanged.connect(self.NSpikesSlider.setValue)
# connect slider with No-of-spikes spinner
self.NSpikesSlider.valueChanged.connect(self.NSpikesSpin.setValue)
# set N spikes value
self.NSpikesSlider.setValue(1000)
vlay.addLayout(hlay)
# add axes limit spin box
hlay = QtGui.QHBoxLayout()
self.WaveAxYLim_Spin = QtGui.QSpinBox()
self.WaveAxYLim_Spin.setRange(0, 10000)
self.WaveAxYLim_Spin.setValue(1000)
self.WaveAxYLim_Spin.setSingleStep(100)
self.WaveAxYLim_Spin.editingFinished.connect(self.SetWfPlotLim_proc)
hlay.addWidget(QtGui.QLabel('Axes Y Lim'))
hlay.addWidget(self.WaveAxYLim_Spin)
vlay.addLayout(hlay)
# create a "plot waveforms" check widget
self.PlotWaveformsCheck = QtGui.QCheckBox('Plot Raw Waveforms ?')
vlay.addWidget(self.PlotWaveformsCheck)
grp.setLayout(vlay)
toolslay.addWidget(grp)
# Automatic clustering box
#w = QtGui.QWidget()
autoClustBtn = QtGui.QPushButton('Automatic Clustering')
autoClustBtn.clicked.connect(autoclust.show)
toolslay.addWidget(autoClustBtn)
hlay = QtGui.QHBoxLayout()
mergeUnitsBtn = QtGui.QPushButton('Merge Units')
self.MergeUnitsWidget = helper_widgets.MergeUnitsWidget()
self.MergeUnitsWidget.AcceptBtn.clicked.connect(self.MergeUnits_proc)
mergeUnitsBtn.clicked.connect(self.CallMergeUnits_proc)
hlay.addWidget(mergeUnitsBtn)
moveUnitsBtn = QtGui.QPushButton('Move Units')
self.MoveUnitsWidget = helper_widgets.MoveUnitsWidget()
self.MoveUnitsWidget.AcceptBtn.clicked.connect(self.MoveUnits_proc)
moveUnitsBtn.clicked.connect(self.CallMoveUnits_proc)
hlay.addWidget(moveUnitsBtn)
toolslay.addLayout(hlay)
#----------------------------------------------- CHANNEL METAINFO GROUP
# button to reset a channel
btn = QtGui.QPushButton('Reset Channel')
btn.clicked.connect(self.ResetChan_proc)
toolslay.addWidget(btn)
# button to reset a channel
btn = QtGui.QPushButton('Autocorr Opts')
btn.clicked.connect(self.AutocorrOpts)
toolslay.addWidget(btn)
toolslay.addStretch(1)
ToolsTab2.setLayout(toolslay)
#------------------------------------------------ TABBED FIGURES WIDGET
self.OverviewTab1 = {}
self.OverviewTab2 = {}
self.MainFigTab = QtGui.QTabWidget()
self.MainFigTab.currentChanged.connect(self.MainFigTabProc)
self.OverviewTab1['MainWidget'] = QtGui.QWidget(self.MainFigTab)
hlay = QtGui.QHBoxLayout(self.OverviewTab1['MainWidget'])
self.MainFigTab.addTab(self.OverviewTab1['MainWidget'], 'Channels Overview')
# overview figure
self.OverviewTab1['Figure'] = matplotlib_widgets.MplWidget()
self.OverviewTab1['Figure'].figure.set_facecolor('k')
self.OverviewTab1['Toolbar'] = matplotlib_widgets.NavToolbar(self.OverviewTab1['Figure'], self.OverviewTab1['MainWidget'])
self.OverviewTab1['Toolbar'].setIconSize(QtCore.QSize(15, 15))
vlay = QtGui.QVBoxLayout()
vlay.addWidget(self.OverviewTab1['Figure'])
vlay.addWidget(self.OverviewTab1['Toolbar'])
vlay.setMargin(0)
vlay.setSpacing(1)
hlay.addLayout(vlay)
hlay.setMargin(0)
hlay.setSpacing(1)
#------------------------------------------------ OVERVIEW TABLE WIDGET
self.OverviewTab2['MainWidget'] = QtGui.QWidget(self.MainFigTab)
self.OverviewTab2['OverviewTable'] = QtGui.QTableWidget(0, 6, self.OverviewTab2['MainWidget'])
self.OverviewTab2['OverviewTable'].setAlternatingRowColors(True)
self.OverviewTab2['OverviewTable'].setFont(QtGui.QFont('sans', 8))
labels = ['Count', 'isTrash', 'MultiUnit?', 'Comments', 'Unsorted', 'Valid']
self.OverviewTab2['OverviewTable'].setHorizontalHeaderLabels(labels)
for k in range(self.OverviewTab2['OverviewTable'].columnCount()):
self.OverviewTab2['OverviewTable'].setColumnWidth(k, 65)
self.OverviewTab2['OverviewTable'].setColumnWidth(3, 150)
self.OverviewTab2['OverviewTable'].setColumnWidth(2, 75)
# associate the vertical header click to select the channel
vHeader = self.OverviewTab2['OverviewTable'].verticalHeader()
vHeader.sectionClicked.connect(self.TableRowChanged_proc)
vlay = QtGui.QVBoxLayout(self.OverviewTab2['MainWidget'])
vlay.addWidget(self.OverviewTab2['OverviewTable'])
# add a log entry browser
grp = QtGui.QGroupBox('Log Browser')
grp.setMaximumHeight(100)
hlay = QtGui.QHBoxLayout()
self.LogCombo = QtGui.QComboBox()
self.LogCombo.setMinimumWidth(200)
#self.LogCombo.setMinimumHeight(20)
self.LogCombo.currentIndexChanged.connect(self.SetLogText_proc)
hlay.addWidget(self.LogCombo)
self.LogTextBrowser = QtGui.QTextBrowser()
#self.LogTextBrowser.setMaximumHeight(40)
hlay.addWidget(self.LogTextBrowser)
hlay.setMargin(0)
hlay.setSpacing(1)
grp.setLayout(hlay)
vlay.addWidget(grp)
self.MainFigTab.addTab(self.OverviewTab2['MainWidget'], 'Summary Table')
#---------------------------------------------------------- CHANNEL TAB
self.ChanTab = {}
self.ChanTab['MainWidget'] = QtGui.QWidget()
self.MainFigTab.addTab(self.ChanTab['MainWidget'], 'Channel Tab')
mainHLay = QtGui.QHBoxLayout()
#------------------------------------------------- RAW WAVEFORMS WIDGET
# create the mpl widget to plot the raw waveforms
vlay = QtGui.QVBoxLayout()
# buttons and controls on top of raw waveforms plot
hlay = QtGui.QHBoxLayout()
self.NUnitsSpin = QtGui.QSpinBox()
self.NUnitsSpin.setMaximumHeight(20)
self.NUnitsSpin.setMinimum(1)
self.NUnitsSpin.setMaximum(10000)
self.NUnitsSpin.setValue(1)
TrimBtn = QtGui.QPushButton('Trim Waveforms')
TrimBtn.clicked.connect(self.ActivateTrimWaveforms_proc)
TrimBtn.setMaximumHeight(20)
CleanBtn = QtGui.QPushButton('Redraw')
CleanBtn.setMaximumHeight(20)
CleanBtn.clicked.connect(self.CleanWavesFigure_proc)
hlay.addStretch(1)
lbl = QtGui.QLabel('Waveforms2Plot:')
lbl.setMaximumHeight(20)
hlay.addWidget(lbl)
hlay.addWidget(self.NUnitsSpin)
hlay.addWidget(TrimBtn)
hlay.addWidget(CleanBtn)
hlay.addStretch(1)
vlay.addLayout(hlay)
#------------------------------------------- waveforms plot and toolbar
hlay = QtGui.QHBoxLayout()
self.ChanTab['WavesFigure'] = matplotlib_widgets.MplWidget()
self.ChanTab['WavesFigure'].figure.set_facecolor('k')
self.ChanTab['WaveToolbar'] = matplotlib_widgets.NavToolbar(self.ChanTab['WavesFigure'],
self.ChanTab['MainWidget'],
coordinates=False)
self.ChanTab['WaveToolbar'].setIconSize(QtCore.QSize(15, 15))
self.ChanTab['WaveToolbar'].setOrientation(QtCore.Qt.Vertical)
self.ChanTab['WaveToolbar'].setMaximumWidth(30)
hlay.addWidget(self.ChanTab['WavesFigure'])
hlay.addWidget(self.ChanTab['WaveToolbar'])
hlay.setMargin(0)
hlay.setSpacing(1)
vlay.addLayout(hlay)
#------------------------------------------------------ UNIT TABS WIDGET
self.ChanTab['UnitTabsWidget'] = QtGui.QTabWidget()
self.ChanTab['UnitTabBarWidget'] = self.ChanTab['UnitTabsWidget'].tabBar()
self.ChanTab['UnitTabsWidget'].setMaximumHeight(QtGui.QApplication.desktop().availableGeometry().height() / 4)
self.ChanTab['UnitFigures'] = {}
self.ChanTab['DelUnitBtns'] = {}
self.ChanTab['UnitCountLabel'] = {}
self.ChanTab['UnitBtns'] = {}
self.ChanTab['PlotRawCheck'] = {}
self.ChanTab['isMultiunitCheck'] = {}
self.ChanTab['UnitTabsWidget'].currentChanged.connect(self.ChangeCurrentUnit_proc)
vlay.addWidget(self.ChanTab['UnitTabsWidget'])
mainHLay.addLayout(vlay)
# configures the waveforms figure
wavesfig = self.ChanTab['WavesFigure'].figure
ax = wavesfig.add_subplot(111)
self.trimWaveformsRect = matplotlib_widgets.MyRectangleSelector(ax,
self.TrimWaveforms_proc,
drawtype='line',
useblit=True)
self.trimWaveformsRect.set_active(False)
ax.set_axis_bgcolor('k')
ax.set_xticklabels([])
ax.set_yticklabels([])
self.SampleWaveform, = ax.plot([], color=[.5, .5, .5], linewidth=2)
self.Waveforms, = ax.plot([], animated=True)
ax.set_ylim(-1000, 1000)
ax.set_xlim(0, 32)
# Create Slice plots
self.Slice1Ln = ax.axvline(0, color=[.5, .5, .5])
self.Slice2Ln = ax.axvline(0, color=[.5, .5, .5], linestyle='--')
ax.grid()
wavesfig.canvas.mpl_connect('draw_event', self.draw_callback)
#------------------------------------------------- FEATURES PLOT WIDGET
mainRightLay = QtGui.QVBoxLayout()
tab = QtGui.QTabWidget()
widget = QtGui.QWidget()
# function buttons on top of the features plot:
vlay = QtGui.QVBoxLayout(widget)
hlay = QtGui.QHBoxLayout()
hlay.addStretch(1)
self.AddUnitBtn = QtGui.QPushButton('Add Unit')
self.AddUnitBtn.setMaximumHeight(20)
self.AddUnitBtn.clicked.connect(self.AddUnit_proc)
hlay.addWidget(self.AddUnitBtn)
# add a "keep" button
self.KeepBtn = QtGui.QPushButton('Keep')
self.KeepBtn.setMaximumHeight(20)
self.KeepBtn.setToolTip('Create new unit (only when All waveforms or Unsorted are plotted)')
self.KeepBtn.clicked.connect(self.Keep_proc)
hlay.addWidget(self.KeepBtn)
# add an "add region" button
self.AddRegionBtn = QtGui.QPushButton('Add Region')
self.AddRegionBtn.setMaximumHeight(20)
self.AddRegionBtn.setToolTip('Add waveforms to the current unit')
self.AddRegionBtn.clicked.connect(self.AddRegion_proc)
hlay.addWidget(self.AddRegionBtn)
# add a "remove region" button
self.RemoveRegionBtn = QtGui.QPushButton('Remove Region')
self.RemoveRegionBtn.setMaximumHeight(20)
self.RemoveRegionBtn.clicked.connect(self.RemoveRegion_proc)
hlay.addWidget(self.RemoveRegionBtn)
# "set valid waveforms" button
self.ValidWFBtn = QtGui.QPushButton('Set Valid WFs')
self.ValidWFBtn.setMaximumHeight(20)
self.ValidWFBtn.clicked.connect(self.ValidateWFs_proc)
hlay.addWidget(self.ValidWFBtn)
# "set valid waveforms" button
self.ReplotDensityBtn = QtGui.QPushButton('Replot Density')
self.ReplotDensityBtn.setMaximumHeight(20)
self.ReplotDensityBtn.clicked.connect(self.ReplotDensity_proc)
hlay.addWidget(self.ReplotDensityBtn)
hlay.addStretch(1)
vlay.addLayout(hlay)
# Features figure and toolbar
self.ChanTab['FeaturesFig'] = matplotlib_widgets.MplWidget()
self.ChanTab['FeaturesFig'].figure.set_facecolor('k')
self.ChanTab['FeaturesFigNtb'] = matplotlib_widgets.NavToolbar(self.ChanTab['FeaturesFig'].figure.canvas,
self.ChanTab['MainWidget'])
self.ChanTab['FeaturesFigNtb'].setIconSize(QtCore.QSize(15, 15))
self.ChanTab['FeaturesFigNtb'].setMaximumHeight(30)
vlay.addWidget(self.ChanTab['FeaturesFig'])
vlay.addWidget(self.ChanTab['FeaturesFigNtb'])
vlay.setMargin(0)
vlay.setSpacing(1)
tab.addTab(widget, '2D')
mainRightLay.addWidget(tab)
#-------------------------------------------------------- 3D TAB Widget
self.Widget3d = gl.GLViewWidget()
tab.addTab(self.Widget3d, '3D')
#---------------------------------- Spikes vs time visualization widget
# add a figure adn axes
self.TimeScroll = {}
self.TimeScroll['Figure'] = matplotlib_widgets.MplWidget()
self.TimeScroll['Figure'].figure.set_facecolor('k')
self.TimeScroll['DrawFigCID'] = self.TimeScroll['Figure'].figure.canvas.mpl_connect('draw_event', self.DrawScrollFig_Func)
self.TimeScroll['Figure'].setMaximumHeight(QtGui.QApplication.desktop().availableGeometry().height() / 6)
self.TimeScroll['Ax'] = self.TimeScroll['Figure'].figure.add_subplot(111)
self.TimeScroll['Ax'].set_axis_bgcolor('k')
self.TimeScroll['Ax'].set_ylim(-1500, 1500)
self.TimeScroll['Ax'].set_xticklabels([])
self.TimeScroll['Ax'].set_yticklabels([])
self.TimeScroll['Ax'].set_axis_off()
self.TimeScroll['Plot'], = self.TimeScroll['Ax'].plot([], color=[.5, .5, .5])
self.TimeScroll['Figure'].figure.tight_layout()
self.TimeScroll['Figure'].figure.canvas.draw()
# add a vertical zoom slider
self.TimeScroll['VZoom'] = QtGui.QSlider(QtCore.Qt.Vertical)
self.TimeScroll['VZoom'].setMaximumHeight(QtGui.QApplication.desktop().availableGeometry().height() / 6)
self.TimeScroll['VZoom'].setMinimum(100)
self.TimeScroll['VZoom'].setMaximum(5000)
self.TimeScroll['VZoom'].setValue(1000)
self.TimeScroll['VZoom'].valueChanged.connect(self.VZoom_Func)
hlay = QtGui.QHBoxLayout()
hlay.addWidget(self.TimeScroll['VZoom'])
hlay.addWidget(self.TimeScroll['Figure'])
mainRightLay.addLayout(hlay)
# add an horizontal zoom slider
self.TimeScroll['HZoom'] = QtGui.QSlider(QtCore.Qt.Horizontal)
self.TimeScroll['HZoom'].setRange(5, 5000)
self.TimeScroll['HZoom'].setValue(500)
self.TimeScroll['HZoom'].setSingleStep(5)
self.TimeScroll['HZoom'].valueChanged.connect(self.HZoom_Func)
self.TimeScroll['HZoomSpin'] = QtGui.QSpinBox()
self.TimeScroll['HZoomSpin'].setMinimumWidth(80)
self.TimeScroll['HZoomSpin'].setMaximumHeight(20)
self.TimeScroll['HZoomSpin'].setRange(5, 5000)
self.TimeScroll['HZoomSpin'].setValue(500)
self.TimeScroll['HZoomSpin'].setSingleStep(10)
self.TimeScroll['HZoomSpin'].valueChanged.connect(self.TimeScroll['HZoom'].setValue)
self.TimeScroll['HZoom'].valueChanged.connect(self.TimeScroll['HZoomSpin'].setValue)
hlay = QtGui.QHBoxLayout()
hlay.addWidget(QtGui.QLabel('H Span '))
hlay.addWidget(self.TimeScroll['HZoomSpin'])
hlay.addWidget(self.TimeScroll['HZoom'])
mainRightLay.addLayout(hlay)
# add a time slider
self.TimeScroll['HScroll'] = QtGui.QSlider(QtCore.Qt.Horizontal)
self.TimeScroll['HScroll'].setRange(0, 3000000)
self.TimeScroll['HScroll'].setSingleStep(self.TimeScroll['HZoom'].value() / 10)
self.TimeScroll['HScroll'].valueChanged.connect(self.HScroll_Func)
self.TimeScroll['HSpin'] = QtGui.QSpinBox()
self.TimeScroll['HSpin'].setRange(0, 3000000)
self.TimeScroll['HSpin'].setMinimumWidth(80)
self.TimeScroll['HSpin'].setMaximumHeight(20)
self.TimeScroll['HSpin'].valueChanged.connect(self.TimeScroll['HScroll'].setValue)
self.TimeScroll['HScroll'].valueChanged.connect(self.TimeScroll['HSpin'].setValue)
hlay = QtGui.QHBoxLayout()
hlay.addWidget(QtGui.QLabel('H Scroll'))
hlay.addWidget(self.TimeScroll['HSpin'])
hlay.addWidget(self.TimeScroll['HScroll'])
mainRightLay.addLayout(hlay)
mainRightLay.setMargin(0)
mainRightLay.setSpacing(1)
# add the widget to the main horizontal layout
mainHLay.addLayout(mainRightLay)
mainHLay.setMargin(1)
self.ChanTab['MainWidget'].setLayout(mainHLay)
# create a generic Msg box
self.MsgBox = QtGui.QMessageBox()
# if running in linux set a certain style for the buttons and widgets
if sys.platform == 'linux2':
QtGui.QApplication.setStyle(QtGui.QStyleFactory.create('Plastique'))
# add the main tabbed figures widget to the main splitter
split1.addWidget(self.MainFigTab)
# add the splitter to the main layout
self.MainLayout.addWidget(split1)
# set the layout of the main widget
#self.MainWidget.setLayout(self.MainLayout)
# set the central widget of the application
self.setCentralWidget(self.MainWidget)
# finally show the object
self.show()
#__________________________________________________________________________
def SaveOverviewFig_proc(self):
if self.H5FileLoaded:
fname = str(QtGui.QFileDialog.getSaveFileName(directory=self.h5file.filename[0:-3] + '_sorted.png'))
if fname:
self.OverviewTab1['Figure'].figure.savefig(fname,
dpi=300,
facecolor='k')
#__________________________________________________________________________
def AdjustPlots_proc(self):
self.TimeScroll['Figure'].figure.tight_layout()
self.TimeScroll['Figure'].figure.canvas.draw()
self.ChanTab['WavesFigure'].figure.tight_layout()
self.ChanTab['WavesFigure'].figure.canvas.draw()
if len(self.ChanTab['FeaturesFig'].figure.axes) > 0:
self.ChanTab['FeaturesFig'].figure.tight_layout()
self.ChanTab['FeaturesFig'].figure.canvas.draw()
if len(self.OverviewTab1['Figure'].figure.axes) > 0:
self.OverviewTab1['Figure'].figure.tight_layout()
self.OverviewTab1['Figure'].figure.canvas.draw()
for k in self.ChanTab['UnitFigures']:
self.ChanTab['UnitFigures'][k].figure.tight_layout()
self.ChanTab['UnitFigures'][k].figure.canvas.draw()
#__________________________________________________________________________
def SetWfPlotLim_proc(self):
sender = self.sender()
ax = self.ChanTab['WavesFigure'].figure.axes[0]
lim = sender.value()
ax.set_ylim(-lim, lim)
self.ChanTab['WavesFigure'].figure.canvas.draw()
#__________________________________________________________________________
def HScroll_Func(self):
''' This function gets triggered whenever the user moves the bottom
scrollbar in the lower right. It helps to explore the raw waveforms'''
if not self.ChanPlotted:
return
self.TimeScroll['Figure'].figure.canvas.restore_region(self.TimeScroll['bg'])
self.ChanTab['WavesFigure'].figure.canvas.restore_region(self.ChanTab['WavesFigBG'])
v = self.TimeScroll['HScroll'].value()
h = self.TimeScroll['HZoom'].value()
indx = np.flatnonzero(np.logical_and(self.CurTs >= v, self.CurTs < (v + h)))
if any(indx):
# ontain the timestamps corresponding to the indexes
Ts = self.CurNode.TimeStamp[indx]
# substract the first timestamp to have a 0 based indexing
Ts = Ts - v
# obtain the waveforms to plot
Wf = self.CurNode.Waveforms[indx, :]
# obtain the length of units to plot
n = len(indx)
# create an array of Nones to append
nones = np.array(n * [None], ndmin=2).transpose()
# append nones to the waveforms array and reshape it to a vector
Wf = np.append(Wf, nones, axis=1).reshape((n * (self.WfSize + 1),))
# create a vector time, based on the sampling frequency, the
# the number of points per spike and the timestamp
Ts = np.tile(Ts, (self.WfSize, 1)).T + \
np.tile(np.linspace(0, self.End, self.WfSize), (n, 1))
Ts = np.append(Ts, nones, axis=1).reshape((n * (self.WfSize + 1),))
# set the plot data to the created arrays
self.TimeScroll['Plot'].set_data(Ts, Wf)
# set axes limits
self.TimeScroll['Ax'].set_xlim(0, h)
self.TimeScroll['Ax'].draw_artist(self.TimeScroll['Plot'])
self.SampleWaveform.set_data(self.WaveformXax * n, Wf)
self.ChanTab['WavesFigure'].figure.axes[0].draw_artist(self.SampleWaveform)
self.TimeScroll['Figure'].figure.canvas.blit(self.TimeScroll['Figure'].figure.bbox)
self.ChanTab['WavesFigure'].figure.canvas.blit(self.ChanTab['WavesFigure'].figure.axes[0].bbox)
#__________________________________________________________________________
def VZoom_Func(self):
v = self.TimeScroll['VZoom'].value()
self.TimeScroll['Ax'].set_ylim(-v, v)
self.TimeScroll['Figure'].figure.canvas.restore_region(self.TimeScroll['bg'])
self.TimeScroll['Ax'].draw_artist(self.TimeScroll['Plot'])
self.TimeScroll['Figure'].figure.canvas.blit(self.TimeScroll['Figure'].figure.bbox)
#__________________________________________________________________________
def HZoom_Func(self):
v = self.TimeScroll['HZoom'].value()
self.TimeScroll['HScroll'].setSingleStep(v / 10)
self.HScroll_Func()
#__________________________________________________________________________
def DrawScrollFig_Func(self, event):
fig = self.TimeScroll['Figure'].figure
self.TimeScroll['bg'] = fig.canvas.copy_from_bbox(fig.axes[0].bbox)
#__________________________________________________________________________
def LoadH5File(self, h5file=None):
''' Loads an h5 file that contains all the information about the units:
waveforms and timestamps '''
# try to load an h5 file
if settings.WorkingDir:
d = settings.WorkingDir
else:
d = ''
if not h5file:
h5file = str(QtGui.QFileDialog.getOpenFileName(parent=self,
caption='Select an H5 File',
directory=d,
filter='*.h5'))
# in case there is not file selected
if not h5file:
return
# set file loaded var = True
if hasattr(self, 'H5FileLoaded') and self.H5FileLoaded:
self.h5file.close()
# try to open the file
try:
self.h5file = tables.open_file(str(h5file), mode='r+')
except:
self.MsgBox.setIcon(QtGui.QMessageBox.Warning)
self.MsgBox.setText('There was a problem opening the H5 file')
self.MsgBox.setWindowTitle('Warning')
self.MsgBox.show()
return
# set file loaded var = True
self.H5FileLoaded = True
self.FilePath = os.path.split(h5file)[0]
# REPAIR THE H5FILE STRUCTURE
if self.h5file.__contains__('/Chans'):
self.h5file.rename_node('/', 'Spikes', name='Chans')
chanNodes = self.h5file.list_nodes('/Spikes')
for k in chanNodes:
where = '/Spikes/%s' % k._v_name
for n in k:
if re.search('Unit[0-9]{2}(?!_isMultiunit)', n._v_name) and n._c_classId != 'GROUP':
unitName = re.search('Unit[0-9]{2}(?!_isMultiunit)', n._v_name).group()
self.h5file.create_group(where=where,
name=unitName + '_grp')
self.h5file.moveNode(where=where,
name=unitName,
newparent='/Spikes/%s/%s' % (k._v_name, unitName + '_grp'),
newname='Indx')
self.h5file.renameNode(where=where,
name=unitName + '_grp',
newname=unitName)
elif re.search('Unit[0-9]{2}_isMultiunit', n._v_name):
self.h5file.remove_node(where=where,
name=re.search('Unit[0-9]{2}_isMultiunit', n._v_name).group())
elif 'tmp' in n._v_name:
self.h5file.remove_node(where=where, name=n._v_name, recursive=True)
# CREATE 'isMultiunit' and 'isBursting' fields
chanNodes = self.h5file.list_nodes('/Spikes')
for k in chanNodes:
node = '/Spikes/%s' % k._v_name
for n in k:
if 'Unit' in n._v_name and n._c_classId == 'GROUP':
parent = node + '/' + n._v_name
if not self.h5file.__contains__(parent + '/' + 'isMultiunit'):
self.h5file.create_array(parent, 'isMultiunit', False)
if not self.h5file.__contains__(parent + '/' + 'isBursting'):
self.h5file.create_array(parent, 'isBursting', False)
# RENAME the "Indexes" field to "Indx"
chanNodes = self.h5file.list_nodes('/Spikes')
for k in chanNodes:
for n in k:
if 'Unit' in n._v_name:
nodeName = '/Spikes/%s/%s' % (k._v_name, n._v_name)
for l in n:
if l._v_name == 'Indexes':
self.h5file.renameNode(nodeName, 'Indx', 'Indexes')
# save changes to disk
self.h5file.flush()
# REPAIR UNIT NAMES #####
chanNodes = self.h5file.list_nodes('/Spikes')
for chan in chanNodes:
unitNames = [k for k in chan.__members__ if 'Unit' in k]
unitNames.sort()
for j, k in enumerate(unitNames):
if k != 'Unit%02d' % j:
self.h5file.renameNode('/Spikes/%s' % chan._v_name, name=k,
newname='Unit%02d' % j)
# save changes to disk
self.h5file.flush()
# clean the channel figures if something already plotted
if hasattr(self, 'ChanPlotted') and self.ChanPlotted:
self.ResetChannelTab_proc()
self.PlotOverview()
# clear the Log Browser and load Log info
self.LogCombo.clear()
self.LogTextBrowser.clear()
if self.h5file.__contains__('/Log'):
nodes = self.h5file.list_nodes('/Log')
nodeNames = [k._v_name for k in nodes]
self.LogCombo.addItems(nodeNames)
# set window title = to filename
self.setWindowTitle('Spike Sorter GUI ' + h5file)
#__________________________________________________________________________
def PlotOverview(self):
''' plot an overview of 1000 spikes per channel.
Also, fills the overview table with the general information about each
channel'''
# get the list of nodes inside the "Chans" group
chanNodes = self.h5file.list_nodes('/Spikes')
# get the number of the channels in the file
self.ChansList = [int(re.search('(?<=Chan_)[0-9]{3}', k._v_name).group()) for k in chanNodes]
# get the waveform size (number of points). X is for fast plotting
self.WfSize = self.h5file.root.Header.WaveformSize.read()
#x = range(self.WfSize)
# add items to the channel selector in the toolbar
self.ChanSelector.clear()
self.ChanSelector.addItems(['%d' % k for k in self.ChansList])
# clean the overview figure
self.OverviewTab1['Figure'].figure.clear()
# helper to calculate the geometry of the axes
figrows = np.ceil(len(chanNodes) / 10.0)
# clear contents of the overview table
self.OverviewTab2['OverviewTable'].clearContents()
c = range(self.OverviewTab2['OverviewTable'].rowCount())
c.reverse()
for k in c:
self.OverviewTab2['OverviewTable'].removeRow(k)
# iterate over the list of channels to add the information to the table
for j, k in enumerate(chanNodes):
# update overveiew table
self.OverviewTab2['OverviewTable'].insertRow(j)
self.OverviewTab2['OverviewTable'].setRowHeight(j, 20)
# add an event count
self.OverviewTab2['OverviewTable'].setItem(j, 0, QtGui.QTableWidgetItem(str(k.TimeStamp.nrows)))
# add an "isTrash" checkbox
check = QtGui.QCheckBox()
check.setProperty('Data', self.ChansList[j])
check.stateChanged.connect(self.setTrash_proc)
self.OverviewTab2['OverviewTable'].setCellWidget(j, 1, check)
# add an "isMultinunit" checkbox
isMultiunitCheck = QtGui.QCheckBox()
isMultiunitCheck.setObjectName(k._v_name)
isMultiunitCheck.stateChanged.connect(self.isMultiunit_proc)
self.OverviewTab2['OverviewTable'].setCellWidget(j, 2, isMultiunitCheck)
# add information about unsorted units
if k.__contains__('Unsorted'):
self.OverviewTab2['OverviewTable'].setItem(j, 4, QtGui.QTableWidgetItem(str(k.Unsorted.nrows)))
# add information about valif waveforms
if k.__contains__('ValidWFs'):
self.OverviewTab2['OverviewTable'].setItem(j, 5, QtGui.QTableWidgetItem(str(k.ValidWFs.nrows)))
# add info about each unit
units = [m for m in k.__members__ if re.search('Unit[0-9]{2}', m)] # obtain unit names
units.sort()
if units: # in case there are units
for m, n in enumerate(units):
if self.OverviewTab2['OverviewTable'].columnCount() <= (m + 6):
self.OverviewTab2['OverviewTable'].insertColumn(self.OverviewTab2['OverviewTable'].columnCount())
nCols = self.OverviewTab2['OverviewTable'].columnCount()
self.OverviewTab2['OverviewTable'].setColumnWidth(nCols - 1, 65)
self.OverviewTab2['OverviewTable'].setHorizontalHeaderItem(nCols - 1,
QtGui.QTableWidgetItem('Unit%02d' % m))
self.OverviewTab2['OverviewTable'].setItem(j, m + 6,
QtGui.QTableWidgetItem(str(k.__getattr__(n).Indx.nrows)))
# Create the axes to plot the waveforms
self.OverviewTab1['Figure'].figure.add_subplot(figrows, 10, j + 1)
self.OverviewTab1['Figure'].figure.axes[j].set_yticks([], []) # eliminate the ticks to have more space
self.OverviewTab1['Figure'].figure.axes[j].set_xticks([], []) # eliminate the ticks to have more space
self.OverviewTab1['Figure'].figure.axes[j].set_axis_off()
self.OverviewTab1['Figure'].figure.axes[j].set_title('Ch %d' % (self.ChansList[j]),
fontsize=10,
fontdict={'color': 'w'})
self.PlotChanOverview_proc(k, axes2Plot=self.OverviewTab1['Figure'].figure.axes[j])
# check the isTrash widgets and make the axes background yellow
if k.__contains__('isTrash'):
if k.isTrash.read():
check.setCheckState(2)
self.OverviewTab1['Figure'].figure.axes[j].set_axis_bgcolor([.5, .5, .5])
if k.__contains__('isMultiunit'):
if k.isMultiunit.read():
isMultiunitCheck.setCheckState(2)
# set the names of the vertical headers
self.OverviewTab2['OverviewTable'].setVerticalHeaderLabels(['Ch ' + str(k) for k in self.ChansList])
# set alternating row colors
self.OverviewTab2['OverviewTable'].setAlternatingRowColors(True)
# connect the clicks on this canvas with the channel select function
self.OverviewTab1['Figure'].figure.canvas.mpl_connect('button_release_event', self.SelChannel)
# tight layout and draw
self.OverviewTab1['Figure'].figure.tight_layout()
self.OverviewTab1['Figure'].figure.canvas.draw()
# get the sampling frequency
self.Sf = float(self.h5file.root.Header.TimeStamp_Res.read())
self.Step = self.WfSize + 1
# set boolean variable
#__________________________________________________________________________
def PlotChanOverview_proc(self, node, axes2Plot):
'''Helper function that plots the unsorted as well as the sorted events
in a given axes on the overview figure'''
# get the number of events to plot
nEvents = self.OverviewNEventsSpin.value()
Waveforms = node.Waveforms.read()
# iterate over the members of a node
for k in node:
if not re.search('Unsorted|Unit[0-9]{2}', k._v_name):
continue
# read the indices first:
if 'Unit' in k._v_name:
if k.Indx.nrows >= nEvents:
indx = k.Indx.read(start=0, stop=k.Indx.nrows, step=k.Indx.nrows / nEvents)
else:
indx = k.Indx.read()
elif 'Unsorted' in k._v_name:
if k.nrows >= nEvents:
indx = k.read(start=0, stop=k.nrows, step=k.nrows / nEvents)
else:
indx = k.read()
# obtain the waveforms
Wf = Waveforms[indx, :]
if not Wf.any():
continue
# faster plotting strategy:
# obtain the length of units to plot
n = len(indx)
# create an array of Nones to append
nones = np.array(n * [None], ndmin=2).T
# append nones to the waveforms array and reshape it to a vector
Wf = np.append(Wf, nones, axis=1).reshape((n * (self.WfSize + 1),))
# create the x axis
Ts = range(self.WfSize)
Ts.append(None)
# choose the color and the zorder according to the type of unit
if k._v_name == 'Unsorted':
color = 'w'
zorder = 1
elif 'Unit' in k._v_name:
# get the unit number
zorder = int(re.search('[0-9]{2}', k._v_name).group())
color = self.UnitColors[zorder, :]
zorder = 100 - zorder
# get the list of plots in the particular axes
l = [l for l in axes2Plot.lines if str(l.get_label()) == k._v_name]
# if a plot with a label equal to the name of the unit exist, the update the data
if len(l) > 0:
l[0].set_data(Ts * n, Wf)
# if not create one
else:
axes2Plot.plot(Ts * n, Wf, color=color, rasterized=True,
alpha=0.5, label=k._v_name, zorder=zorder)
# set the limits of the axes
axes2Plot.set_ylim(-self.OverviewYLimsSpin.value(), self.OverviewYLimsSpin.value())
# add a small text box with the event count
bbox_props = dict(boxstyle='round', fc='0.75', ec='0.25', alpha=0.8)
axes2Plot.text(0.5, 0, 'Count: %d' % node.TimeStamp.nrows, transform=axes2Plot.transAxes,
color='k', bbox=bbox_props, size=10, ha='center')
#__________________________________________________________________________
def ChangeOverviewYLim_Proc(self):
if not self.H5FileLoaded:
return
lim = self.OverviewYLimsSpin.value()
for k in self.OverviewTab1['Figure'].figure.axes:
k.set_ylim(-lim, lim)
self.OverviewTab1['Figure'].figure.canvas.draw()
#__________________________________________________________________________
def PlotChanProc(self):
# exit if ther is no H5 file loaded
if not self.H5FileLoaded:
return
# clean the channels tab
self.ResetChannelTab_proc()
# reset Units list
self.UnitsList = []
#pdb.set_trace()
# load waveforms for a specific channel
self.CurChan = int(self.ChanSelector.currentText())
#nspikes = self.NSpikesSlider.value()
self.CurNodeName = '/Spikes/Chan_%03d' % self.CurChan
self.CurNode = self.h5file.get_node(self.CurNodeName)
self.CurWaveforms = self.CurNode.Waveforms.read()
self.CurTs = self.CurNode.TimeStamp.read()
self.TimeScroll['HScroll'].setMaximum(int(self.CurTs[-1]))
self.TimeScroll['HSpin'].setMaximum(int(self.CurTs[-1]))
self.unitNodes = [k for k in self.h5file.list_nodes(self.CurNodeName) if re.search('Unit[0-9]{2}', k._v_name)]
# get the indices of the unsorted. If there are no, create one
if not self.CurNode.__contains__('Unsorted'):
self.Unsorted = np.arange(len(self.CurTs))
self.h5file.create_array(self.CurNodeName, 'Unsorted', self.Unsorted)
else:
self.Unsorted = self.h5file.get_node(self.CurNodeName, 'Unsorted').read()
#set the unit names in the combo box
self.What2Plot.clear()
self.What2Plot.addItems(['All Waveforms', 'Sorted', 'Unsorted'])
if self.unitNodes:
self.What2Plot.addItems([k._v_name for k in self.unitNodes])
# set the axis limits to apropriately view the unit
v = self.WaveAxYLim_Spin.value()
self.ChanTab['WavesFigure'].figure.axes[0].set_ylim(-v, v)
# get the waveform size for this specific waveform
self.WfSize = self.h5file.root.Header.WaveformSize.read()
# cheack whether to plot raw waveforms
'''
if self.PlotWaveformsCheck.checkState() == 2:
if self.CurWaveforms.shape[1] < 10000:
self.Waveforms2Plot = self.CurWaveforms
else:
indx = np.int32(np.linspace(0,self.CurWaveforms.shape[0]-1,10000))
self.Waveforms2Plot = self.CurWaveforms[indx,:]
for k in range(self.Waveforms2Plot.shape[0]):
self.Waveforms.set_data(range(self.WfSize), self.Waveforms2Plot[k,:])
self.ChanTab['WavesFigure'].figure.axes[0].draw_artist(self.Waveforms)
self.ChanTab['WavesFigure'].figure.canvas.blit(self.ChanTab['WavesFigure'].figure.axes[0].bbox)'''
# grab background from the Waveforms Figure to make animations
self.ChanTab['WavesFigBG'] = self.ChanTab['WavesFigure'].figure.canvas.copy_from_bbox(self.ChanTab['WavesFigure'].figure.axes[0].bbox)
self.MainFigTab.setTabText(2, 'Chan %02d' % self.CurChan)
# calculate PCA
pc = PCA(self.CurWaveforms)
# put data in a KDTree to easily calculate distance with the cursor
self.XYData = cKDTree(pc.Y[:, 0:2], 1000)
self.ChanTab['PCA'] = pc.Y
# set the internal variable to true
self.ChanPlotted = True
# copy the number of events in the channel into a label to see if the user
# wants to decimate or plot them all
self.nPtsLabel.setText(str(self.CurTs.size))
# read the plotting parameters in the "Chan Tools" tab to plot
# the selected feature
self.PlotFeatures()
if self.ChanTab['UnitTabsWidget'].count() > 0:
self.ChanTab['UnitTabsWidget'].setCurrentIndex(0)
self.CurUnitName = str(self.ChanTab['UnitTabsWidget'].tabText(0))
self.CurUnit = int(re.search('(?<=Unit)[0-9]{2}', self.CurUnitName).group())
self.WaveformXax = range(self.WfSize)
self.WaveformXax.append(None)
self.End = 1000 * self.WfSize / self.Sf
# save h5file changes to disk
self.h5file.flush()
#__________________________________________________________________________
def setTrash_proc(self):
sender = self.sender()
try:
chan = sender.property('Data').toPyObject()
except:
chan = sender.property('Data')
nodeName = '/Spikes/Chan_%03d' % chan
indx = self.ChansList.index(chan)
if self.h5file.get_node(nodeName).__contains__('isTrash'):
self.h5file.remove_node(nodeName, 'isTrash')
if sender.checkState() in [1, 2]:
self.h5file.create_array(nodeName, 'isTrash', True)
self.OverviewTab1['Figure'].figure.axes[indx].set_axis_bgcolor('y')
elif sender.checkState() == 0:
self.h5file.create_array(nodeName, 'isTrash', False)
self.OverviewTab1['Figure'].figure.axes[indx].set_axis_bgcolor('w')
# save changes to disk
self.h5file.flush()
#__________________________________________________________________________
def isMultiunit_proc(self):
sender = self.sender()
nodeName = '/Spikes/%s' % sender.objectName()
if self.h5file.get_node(nodeName).__contains__('isMultiunit'):
self.h5file.remove_node(nodeName, 'isMultiunit')
if sender.checkState() in [1, 2]:
self.h5file.create_array(nodeName, 'isMultiunit', True)
elif sender.checkState() == 0:
self.h5file.create_array(nodeName, 'isMultiunit', False)
# save changes to disk
self.h5file.flush()
#__________________________________________________________________________
def TrashChans_proc(self):
'''Utility function to mark the channels with fewer than a defined number
of units'''
# check whether an h5file has been loaded
if not self.H5FileLoaded:
return
# obtain parameters
n = self.MarkTrashSpin.value()
chans = self.h5file.list_nodes('/Spikes')
# iterate over nodes in h5file; if fewer than n mark as trash
for l, k in enumerate(chans):
j = int(re.search('(?<=Chan_)[0-9]{3}', k._v_name).group())
if k.TimeStamp.nrows < n:
self.OverviewTab1['Figure'].figure.axes[l].set_axis_bgcolor('y')
self.OverviewTab2['OverviewTable'].cellWidget(l, 1).setChecked(True)
if self.h5file.get_node('/Spikes', 'Chan_%03d' % j).__contains__('isTrash'):
self.h5file.remove_node('/Spikes/Chan_%03d' % j, 'isTrash')
self.h5file.create_array('/Spikes/Chan_%03d' % j, 'isTrash', True)
else:
self.OverviewTab1['Figure'].figure.axes[l].set_axis_bgcolor('w')
self.OverviewTab2['OverviewTable'].cellWidget(l, 1).setChecked(False)
if self.h5file.get_node('/Spikes', 'Chan_%03d' % j).__contains__('isTrash'):
self.h5file.remove_node('/Spikes/Chan_%03d' % j, 'isTrash')
self.h5file.create_array('/Spikes/Chan_%03d' % j, 'isTrash', False)
# save changes to disk
self.h5file.flush()
#update the overview
self.OverviewTab1['Figure'].figure.canvas.draw()
#__________________________________________________________________________
def DeleteTrashChans_proc(self):
# check whether an h5file has been loaded
if not self.H5FileLoaded:
return
chans = self.h5file.list_nodes('/Spikes')
chans.reverse()
n = range(len(chans))
n.reverse()
delchans = []
for j, k in zip(n, chans):
state = self.OverviewTab2['OverviewTable'].cellWidget(j, 1).checkState()
if state == 2:
delchans.append(k._v_name)
self.h5file.remove_node('/Spikes', k._v_name, recursive=True)
if len(delchans) > 0:
self.AddLog('Deleted channels: ' + str(delchans))
self.PlotOverview()
#__________________________________________________________________________
def ResetChan_proc(self):
''' check whether an h5file has been loaded '''
if not self.H5FileLoaded or not self.ChanPlotted:
return
for k in self.h5file.list_nodes(self.CurNodeName):
if k._v_name not in ['Waveforms', 'TimeStamp', 'isTrash']:
self.h5file.remove_node(self.CurNodeName, k._v_name, recursive=True)
self.PlotChanProc()
self.AddLog('%s resetted' % self.CurNodeName)
#__________________________________________________________________________
def AddLog(self, message):
''' add log to to keep a history of changes to the file '''
if not self.H5FileLoaded:
return
if not self.h5file.__contains__('/Log'):
self.h5file.create_group('/', 'Log', 'History of changes')
name = 'Entry_%s_%s_%s_%s_%s_%s' % datetime.datetime.now().timetuple()[0:6]
self.h5file.create_array('/Log', name, message)
# save changes to disk
self.h5file.flush()
#add the item to the log browser
self.LogCombo.addItem(name)
#__________________________________________________________________________
def SetLogText_proc(self):
if self.LogCombo.currentIndex == -1:
return
node = str(self.LogCombo.currentText())
if node:
log = self.h5file.get_node('/Log', node).read()
self.LogTextBrowser.setText(log)
#__________________________________________________________________________
def CloseFile(self):
''' close the h5 file'''
if not self.H5FileLoaded:
return
self.h5file.flush()
self.h5file.close()
self.H5FileLoaded = False
print 'h5 File closed'
#__________________________________________________________________________
def SelChannel(self, event):
''' selects a channel when axes are clicked'''
if event.inaxes:
chan = int(re.search('(?<=Ch )[0-9]{1,3}', event.inaxes.get_title()).group())
c = [int(self.ChanSelector.itemText(k)) for k in range(self.ChanSelector.count())].index(chan)
self.ChanSelector.setCurrentIndex(c)
#__________________________________________________________________________
def TableRowChanged_proc(self, sel):
self.ChanSelector.setCurrentIndex(sel)
#__________________________________________________________________________
def Settings(self):
''' edit paths'''
if settings.edit() == 1:
self.WorkingDir = settings.WorkingDir
#__________________________________________________________________________
def AutocorrOpts(self):
if autocorropts.edit() == 1:
pass
#__________________________________________________________________________
def About(self):
''' opens a small dialog with information about the software'''
self.AboutMsg.show()
#__________________________________________________________________________
def NearestPoint(self, event):
''' when right button clicked over the features window, calculates the closest
point and plots its corresponding waveform'''
if event.button == 3 and event.inaxes and self.ChanTab['FeaturesFigNtb'].mode == '':
featuresax = self.ChanTab['FeaturesFig'].figure.axes[0]
wavesax = self.ChanTab['WavesFigure'].figure.axes[0]
if self.PlotUnitCounter >= self.NUnitsSpin.value():
self.ChanTab['WavesFigure'].figure.canvas.restore_region(self.ChanTab['WavesFigBG'])
self.PlotUnitCounter = 0
for k in self.ChanTab['FeaturesFigBG']:
self.ChanTab['FeaturesFig'].figure.canvas.restore_region(k)
_, res = self.XYData.query([event.xdata, event.ydata], 1)
self.cursor.set_data(self.XYData.data[res, 0], self.XYData.data[res, 1])
self.SampleWaveform.set_data(range(self.WfSize), self.CurWaveforms[self.dataIndx[res], :])
featuresax.draw_artist(self.cursor)
wavesax.draw_artist(self.SampleWaveform)
self.ChanTab['FeaturesFig'].figure.canvas.blit(featuresax.bbox)
self.ChanTab['WavesFigure'].figure.canvas.blit(wavesax.bbox)
self.PlotUnitCounter += 1
#__________________________________________________________________________
def draw_callback(self, event):
''' any draw callback triggers the capture of the figure background for
using it in the animations '''
if not self.ChanPlotted:
return
if event.canvas == self.ChanTab['FeaturesFig'].figure.canvas:
bg = []
for k in self.ChanTab['FeaturesFig'].figure.axes:
bg.append(self.ChanTab['FeaturesFig'].figure.canvas.copy_from_bbox(k.bbox))
self.ChanTab['FeaturesFigBG'] = bg
elif event.canvas == self.ChanTab['WavesFigure'].figure.canvas:
self.ChanTab['WavesFigBG'] = self.ChanTab['WavesFigure'].figure.canvas.copy_from_bbox(self.ChanTab['WavesFigure'].figure.axes[0].bbox)
#__________________________________________________________________________
def PlotFeatures(self):
''' determines what 2 plot based on the user choices'''
# obtain labels and return if are the same
xlabel = self.XPlot.currentText()
ylabel = self.YPlot.currentText()
if xlabel == ylabel:
return
curchan = int(self.ChanSelector.currentText())
if self.PlotValidsOnlyCheck.checkState() == 2 and \
self.CurNode.__contains__('ValidWFs'):
print 'you selected to plot only the valid WFs'
What2Plot = str(self.What2Plot.currentText()) # string containing what to plot
self.CurNodeName = '/Spikes/Chan_%03d' % curchan
#unitNodes = [k for k in self.h5file.list_nodes(self.CurNodeName) if re.search('Unit[0-9]{2}', k._v_name)]
if What2Plot in ['All Waveforms', 'Sorted']:
self.dataIndx = range(self.CurTs.size)
pc = self.ChanTab['PCA']
elif What2Plot == 'Unsorted':
self.dataIndx = self.h5file.get_node(self.CurNodeName, 'Unsorted').read()
pc = PCA(self.CurWaveforms[self.dataIndx, :])
pc = pc.Y
elif re.search('Unit', What2Plot):
self.CurUnitName = What2Plot
self.dataIndx = self.h5file.get_node(self.CurNodeName, What2Plot).Indx.read()
pc = PCA(self.CurWaveforms[self.dataIndx, :])
pc = pc.Y
# save what is the feature
self.CurFeaturePlot = What2Plot
# get the choice for the x axis
if xlabel == 'PCA1':
x = pc[:, 0]
elif xlabel == 'PCA2':
x = pc[:, 1]
elif xlabel == 'PCA3':
x = pc[:, 2]
elif xlabel == 'Slice1':
x = self.CurWaveforms[self.dataIndx, self.SliceSpBx1.value()]
x = x / 100.0
elif xlabel == 'Slice2':
x = self.CurWaveforms[self.dataIndx, self.SliceSpBx2.value()]
x = x / 100.0
elif xlabel == 'Energy':
x = np.sum(np.power(self.CurWaveforms[self.dataIndx, :], 2), axis=1)
x = x / 1000000.0
elif xlabel == 'Peak':
x = self.CurWaveforms[self.dataIndx, :].max(axis=1)
x = x / 100.0
elif xlabel == 'Valley':
x = self.CurWaveforms[self.dataIndx, :].min(axis=1)
x = x / 100.0
elif xlabel == 'Peak Pt':
x = self.CurWaveforms[self.dataIndx, :].argmax(axis=1)
elif xlabel == 'Valley Pt':
x = self.CurWaveforms[self.dataIndx, :].argmin(axis=1)
elif xlabel == 'Pk2Pk Amp':
x = self.CurWaveforms[self.dataIndx, :].max(axis=1) - self.CurWaveforms[self.dataIndx, :].min(axis=1)
x = x / 100.0
elif xlabel == 'Time':
x = self.CurTs[self.dataIndx]
x = x / 60000.0
# get the choice for the y axis
if ylabel == 'PCA1':
y = pc[:, 0]
elif ylabel == 'PCA2':
y = pc[:, 1]
elif ylabel == 'PCA3':
y = pc[:, 2]
elif ylabel == 'Slice1':
y = self.CurWaveforms[self.dataIndx, self.SliceSpBx1.value()]
y = y / 100.0
elif ylabel == 'Slice2':
y = self.CurWaveforms[self.dataIndx, self.SliceSpBx2.value()]
y = y / 100.0
elif ylabel == 'Energy':
y = np.sum(np.power(self.CurWaveforms[self.dataIndx, :], 2), axis=1)
y = y / 1000000.0
elif ylabel == 'Peak':
y = self.CurWaveforms[self.dataIndx, :].max(axis=1)
y = y / 100.0
elif ylabel == 'Valley':
y = self.CurWaveforms[self.dataIndx, :].min(axis=1)
y = y / 100.0
elif ylabel == 'Peak Pt':
y = self.CurWaveforms[self.dataIndx, :].argmax(axis=1)
elif ylabel == 'Valley Pt':
y = self.CurWaveforms[self.dataIndx, :].argmin(axis=1)
elif ylabel == 'Pk2Pk Amp':
y = self.CurWaveforms[self.dataIndx, :].max(axis=1) - self.CurWaveforms[self.dataIndx, :].min(axis=1)
y = y / 100.0
elif ylabel == 'Time':
y = self.CurTs[self.dataIndx]
y = y / 60000.0
naxes = len(self.ChanTab['FeaturesFig'].figure.axes)
#nspikes = self.NSpikesSlider.value()
title = '%s: %s vs %s' % (What2Plot, xlabel, ylabel)
# obtain the axis limits if we are plotting the same variables
same_limits = False
if naxes > 0 and \
self.ChanTab['FeaturesFig'].figure.axes[0].get_title() == title:
same_limits = True
xlim = self.ChanTab['FeaturesFig'].figure.axes[0].get_xlim()
ylim = self.ChanTab['FeaturesFig'].figure.axes[0].get_ylim()
# plot only on one axes
if self.PlotDensityCheck.checkState() == 0:
if naxes == 0:
ax1 = self.ChanTab['FeaturesFig'].figure.add_subplot(111)
ax1.set_axis_bgcolor('k')
elif naxes == 1:
ax1 = self.ChanTab['FeaturesFig'].figure.axes[0]
ax1.cla()
ax1.set_axis_bgcolor('k')
elif naxes >= 2:
self.ChanTab['FeaturesFig'].figure.clear()
ax1 = self.ChanTab['FeaturesFig'].figure.add_subplot(111)
ax1.set_axis_bgcolor('k')
# create 2 subplots to host the density
elif self.PlotDensityCheck.checkState() == 2:
if naxes == 0:
ax1 = self.ChanTab['FeaturesFig'].figure.add_subplot(121)
ax2 = self.ChanTab['FeaturesFig'].figure.add_subplot(122, sharex=ax1, sharey=ax1)
elif naxes == 1:
self.ChanTab['FeaturesFig'].figure.clear()
ax1 = self.ChanTab['FeaturesFig'].figure.add_subplot(121)
ax2 = self.ChanTab['FeaturesFig'].figure.add_subplot(122, sharex=ax1, sharey=ax1)
elif naxes == 2:
ax1 = self.ChanTab['FeaturesFig'].figure.axes[0]
ax2 = self.ChanTab['FeaturesFig'].figure.axes[1]
ax1.cla()
ax2.cla()
ax2.set_axis_bgcolor('k')
# create and plot a 2d histogram
# setup the axes
ax1.set_title(title, fontdict={'color': 'w'})
ax1.tick_params(color=[.5, .5, .5])
for k in ax1.spines.values():
k.set_edgecolor([.5, .5, .5])
ax1.set_xticklabels([])
ax1.set_yticklabels([])
ax1.set_axis_bgcolor('k')
self.cursor, = ax1.plot([], 's', mfc='none', ms=6, mec='r',
animated=True, label='sample')
# iterate over the members of that channel
if What2Plot == 'All Waveforms':
nodes = self.h5file.list_nodes(self.CurNodeName)
for leaf in nodes:
if leaf._v_name == 'Unsorted':
# select only some indices to plot
if leaf.nrows > self.nPtsSpin.value():
indx = leaf.read(0, leaf.nrows, leaf.nrows / self.nPtsSpin.value())
else:
indx = leaf.read()
# plot unsorted
ax1.plot(x[indx], y[indx], ',',
color=[.5, .5, .5], label='data_Unsorted')
unit = re.search('(?<=Unit)[0-9]{2}', leaf._v_name)
if unit:
# select some units to plot
if leaf.Indx.nrows > self.nPtsSpin.value():
indx = leaf.Indx.read(0, leaf.Indx.nrows, leaf.Indx.nrows / self.nPtsSpin.value())
else:
indx = leaf.Indx.read()
ax1.plot(x[indx], y[indx], ',', label='data_' + leaf._v_name,
rasterized=True,
color=self.UnitColors[int(unit.group()), :],
mec=self.UnitColors[int(unit.group()), :])
# add unit to the tab widget
self.UnitsTable_AddUnit(leaf._v_name)
elif What2Plot == 'Sorted':
nodes = self.h5file.list_nodes(self.CurNodeName)
for leaf in nodes:
unit = re.search('(?<=Unit)[0-9]{2}', leaf._v_name)
if unit:
# select some units to plot
if leaf.Indx.nrows > self.nPtsSpin.value():
indx = leaf.Indx.read(0, leaf.Indx.nrows, leaf.Indx.nrows / self.nPtsSpin.value())
else:
indx = leaf.Indx.read()
ax1.plot(x[indx, :], y[indx, :], ',', label='data_' + leaf._v_name,
rasterized=True,
color=self.UnitColors[int(unit.group()), :],
mec=self.UnitColors[int(unit.group()), :],
zorder=10)
# add unit to the tab widget
self.UnitsTable_AddUnit(leaf._v_name)
# to plot the unsorted channels
elif What2Plot == 'Unsorted':
lx = len(x)
# select some units to plot
if lx > self.nPtsSpin.value():
indx = range(0, lx, lx / self.nPtsSpin.value())
else:
indx = range(lx)
ax1.plot(x[indx], y[indx], ',', color=[.5, .5, .5],
label='data_Unsorted',
rasterized=True,
zorder=10)
# plot a specific unit
elif re.search('Unit', What2Plot):
unit = re.search('(?<=Unit)[0-9]{0,2}', What2Plot).group()
lx = len(x)
# select some units to plot
if lx > self.nPtsSpin.value():
indx = range(0, lx, lx / self.nPtsSpin.value())
else:
indx = range(lx)
ax1.plot(x[indx], y[indx], ',', label='data_' + What2Plot,
rasterized=True,
color=self.UnitColors[int(unit), :],
mec=self.UnitColors[int(unit), :],
zorder=10)
# add unit to the tab widget
self.UnitsTable_AddUnit(What2Plot)
if same_limits:
ax1.set_ylim(ylim)
ax1.set_xlim(xlim)
else:
ax1.relim()
ax1.autoscale_view(True, True, True)
# vertical and horizontal lines @ x and y = 0
ax1.axvline(0, color=[.5, .5, .5], zorder=0)
ax1.axhline(0, color=[.5, .5, .5], zorder=0)
# 60 minute line
if xlabel == 'Time':
ax1.axvline(60, color='gray', zorder=0)
# create KDTree objet from the selected data for fast search
self.XYData = cKDTree(np.array([x, y]).transpose())
# connect figure to the motion notify function
if not ax1.callbacks.callbacks or not hasattr(self, 'axZoomCID'):
self.axZoomCID = ax1.callbacks.connect('ylim_changed', self.AxesZoom_proc)
# connect figure to the motion notify function
if not hasattr(self, 'motionCID'):
self.motionCID = self.ChanTab['FeaturesFig'].figure.canvas.mpl_connect('motion_notify_event',
self.NearestPoint)
# connect figure to the draw function
if not hasattr(self, 'drawCID'):
self.drawCID = self.ChanTab['FeaturesFig'].figure.canvas.mpl_connect('draw_event',
self.draw_callback)
# plot density if checked
if self.PlotDensityCheck.checkState() == 2:
self.ReplotDensity_proc()
# set tight layout and redraw figure
self.ChanTab['FeaturesFig'].figure.tight_layout()
self.ChanTab['FeaturesFig'].figure.canvas.draw()
#__________________________________________________________________________
def Plot3DFeatures(self):
# obtain labels and return if are the same
xlabel = self.XPlot.currentText()
ylabel = self.YPlot.currentText()
zlabel = self.ZPlot.currentText()
if xlabel == ylabel or xlabel == zlabel or ylabel == zlabel:
return
curchan = int(self.ChanSelector.currentText())
if self.PlotValidsOnlyCheck.checkState() and self.CurNode.__contains__('ValidWFs'):
print 'you selected to plot only the valid WFs'
What2Plot = str(self.What2Plot.currentText()) # string containing what to plot
self.CurNodeName = '/Spikes/Chan_%03d' % curchan
if What2Plot == 'All Waveforms':
self.dataIndx = range(self.CurTs.size)
pc = self.ChanTab['PCA']
elif What2Plot == 'Unsorted':
self.dataIndx = self.h5file.get_node(self.CurNodeName, 'Unsorted').read()
pc = PCA(self.CurWaveforms[self.dataIndx, :])
pc = pc.Y
elif re.search('Unit', What2Plot):
self.CurUnitName = What2Plot
self.dataIndx = self.h5file.get_node(self.CurNodeName, What2Plot).Indx.read()
pc = PCA(self.CurWaveforms[self.dataIndx, :])
pc = pc.Y
elif What2Plot == 'Sorted':
return
# save what is the feature
self.CurFeaturePlot = What2Plot
# get the choice for the x axis
if xlabel == 'PCA1':
x = pc[:, 0]
elif xlabel == 'PCA2':
x = pc[:, 1]
elif xlabel == 'PCA3':
x = pc[:, 2]
elif xlabel == 'Slice1':
x = self.CurWaveforms[self.dataIndx, self.SliceSpBx1.value()] / 100.0
x = x / 100.0
elif xlabel == 'Slice2':
x = self.CurWaveforms[self.dataIndx, self.SliceSpBx2.value()] / 100.0
x = x / 100.0
elif xlabel == 'Energy':
x = np.sum(np.power(self.CurWaveforms[self.dataIndx, :], 2), axis=1)
x = x / 1000000.0
elif xlabel == 'Peak':
x = self.CurWaveforms[self.dataIndx, :].max(axis=1)
x = x / 100.0
elif xlabel == 'Valley':
x = self.CurWaveforms[self.dataIndx, :].min(axis=1)
x = x / 100.0
elif xlabel == 'Pk2Pk Amp':
x = self.CurWaveforms[self.dataIndx, :].max(axis=1) - self.CurWaveforms[self.dataIndx, :].min(axis=1)
x = x / 100.0
elif xlabel == 'Time':
x = self.CurTs[self.dataIndx]
x = x / 60000.0
# get the choice for the y axis
if ylabel == 'PCA1':
y = pc[:, 0]
elif ylabel == 'PCA2':
y = pc[:, 1]
elif ylabel == 'PCA3':
y = pc[:, 2]
elif ylabel == 'Slice1':
y = self.CurWaveforms[self.dataIndx, self.SliceSpBx1.value()] / 100.0
y = y / 100.0
elif ylabel == 'Slice2':
y = self.CurWaveforms[self.dataIndx, self.SliceSpBx2.value()] / 100.0
y = y / 100.0
elif ylabel == 'Energy':
y = np.sum(np.power(self.CurWaveforms[self.dataIndx, :], 2), axis=1)
y = y / 1000000.0
elif ylabel == 'Peak':
y = self.CurWaveforms[self.dataIndx, :].max(axis=1)
y = y / 100.0
elif ylabel == 'Valley':
y = self.CurWaveforms[self.dataIndx, :].min(axis=1)
y = y / 100.0
elif ylabel == 'Pk2Pk Amp':
y = self.CurWaveforms[self.dataIndx, :].max(axis=1) - self.CurWaveforms[self.dataIndx, :].min(axis=1)
y = y / 100.0
elif ylabel == 'Time':
y = self.CurTs[self.dataIndx]
y = y / 60000.0
# get the choice for the z axis
if zlabel == 'PCA1':
z = pc[:, 0]
elif zlabel == 'PCA2':
z = pc[:, 1]
elif zlabel == 'PCA3':
z = pc[:, 2]
elif zlabel == 'Slice1':
z = self.CurWaveforms[self.dataIndx, self.SliceSpBx1.value()] / 100.0
z = z / 100.0
elif zlabel == 'Slice2':
z = self.CurWaveforms[self.dataIndx, self.SliceSpBx2.value()] / 100.0
z = z / 100.0
elif zlabel == 'Energy':
z = np.sum(np.power(self.CurWaveforms[self.dataIndx, :], 2), axis=1)
z = z / 1000000.0
elif zlabel == 'Peak':
z = self.CurWaveforms[self.dataIndx, :].max(axis=1)
z = z / 100.0
elif zlabel == 'Valley':
z = self.CurWaveforms[self.dataIndx, :].min(axis=1)
z = z / 100.0
elif zlabel == 'Pk2Pk Amp':
z = self.CurWaveforms[self.dataIndx, :].max(axis=1) - self.CurWaveforms[self.dataIndx, :].min(axis=1)
z = z / 100.0
elif zlabel == 'Time':
z = self.CurTs[self.dataIndx]
z = z / 60000.0
if What2Plot == 'All Waveforms' and self.CurNode.__contains__('ValidWFs'):
valid = self.CurNode.ValidWFs.read()
x = x[valid]
y = y[valid]
z = z[valid]
items = self.Widget3d.items
for i in items:
self.Widget3d.removeItem(i)
grid = gl.GLGridItem()
self.Widget3d.addItem(grid)
if zlabel != 'Density':
handle = gl.GLScatterPlotItem(pos=np.array([x, y, z]).T,
size=np.ones(x.size),
color=(1.0, 0.0, 0.0, 1.0),
pxMode=True)
self.Widget3d.addItem(handle)
else:
#pass
# obtain axes and first axes limits
ax1 = self.ChanTab['FeaturesFig'].figure.axes[0]
#xlim = ax1.get_xlim()
#ylim = ax1.get_ylim()
# search for the unsorted or the units plots to obatin data
xpts = []
ypts = []
for k in ax1.get_children():
if re.search('Unsorted|Unit', str(k.get_label())):
data = k.get_data()
xpts.extend(data[0])
ypts.extend(data[1])
xypoints = np.array([xpts, ypts]).T
# check wich points are inside the axes
verts = ax1.viewLim.corners()
verts[2] = ax1.viewLim.corners()[3]
verts[3] = ax1.viewLim.corners()[2]
inpoly = Path(verts).contains_points(xypoints)
# create a 2d histogram of the data and scale it logaritmically
h, _, _ = np.histogram2d(xypoints[inpoly, 0],
xypoints[inpoly, 1],
bins=self.PlotDensityBins.value(),
normed=False)
h[h <= 0] = 1
h = np.log10(h)
x, y = h.shape
x, y = np.arange(x), np.arange(y)
handle = gl.GLSurfacePlotItem(x, y, z=10 * h / h.max(),
shader='heightColor')
handle.translate(-x.size / 2.0, -y.size / 2.0, 0.0)
handle.scale(1, 1, 2)
self.Widget3d.addItem(handle)
#__________________________________________________________________________
def ValidateWFs_proc(self):
''' obtains the coordinates of the current feature axis, and uses it to
determine wich points lay inside it.
It also saves the data to the h5file'''
# exits if there is no h5 file loaded or channel plotted
if not self.H5FileLoaded or not self.ChanPlotted:
return
# get axes handle and limits
ax = self.ChanTab['FeaturesFig'].figure.axes[0]
xlim = ax.get_xlim()
ylim = ax.get_ylim()
# obtain coordinates of the current axes and uses that to build a polygon
xyverts = [[xlim[0], ylim[0]], [xlim[0], ylim[1]], [xlim[1], ylim[1]], [xlim[1], ylim[0]]]
# obtain the indices of the waveforms inside the polygon
p = Path(xyverts).contains_points(self.XYData.data)
# in case no points were inside the axes
if len(p) == 0:
self.MsgBox.setIcon(QtGui.QMessageBox.Warning)
self.MsgBox.setText('There were no selected points')
self.MsgBox.setwindowTitle('Warning')
self.MsgBox.show()
return
self.ValidWFs = np.flatnonzero(p)
self.InvalidWFs = np.flatnonzero(~p)
# remove the 'ValidWFs' field if it already exists
if self.h5file.get_node(self.CurNodeName).__contains__('ValidWFs'):
self.h5file.remove_node(self.CurNodeName, 'ValidWFs')
# remove the 'InvalidWFs' field if it already exists
if self.h5file.get_node(self.CurNodeName).__contains__('InvalidWFs'):
self.h5file.remove_node(self.CurNodeName, 'InvalidWFs')
# save the ValidWFs indices to the h5file
self.h5file.create_array(self.CurNodeName, 'ValidWFs', self.ValidWFs)
# save the InvalidWFs indices to the h5file
self.h5file.create_array(self.CurNodeName, 'InvalidWFs', self.InvalidWFs)
# save changes to disk
self.h5file.flush()
# update the information on the overview table
row = self.ChanSelector.currentIndex()
item = QtGui.QTableWidgetItem(str(self.ValidWFs.size))
self.OverviewTab2['OverviewTable'].takeItem(row, 5)
self.OverviewTab2['OverviewTable'].setItem(row, 5, item)
#__________________________________________________________________________
def ReplotDensity_proc(self):
''' replot density using all the resolution only to the visible points'''
# check whether the number of axes in the figure
if len(self.ChanTab['FeaturesFig'].figure.axes) != 2:
return
# obtain axes and first axes limits
ax1 = self.ChanTab['FeaturesFig'].figure.axes[0]
ax2 = self.ChanTab['FeaturesFig'].figure.axes[1]
xlim = ax1.get_xlim()
ylim = ax1.get_ylim()
# search for the unsorted or the units plots to obatin data
xpts = []
ypts = []
for k in ax1.get_children():
if re.search('Unsorted|Unit', str(k.get_label())):
data = k.get_data()
xpts.extend(data[0])
ypts.extend(data[1])
xypoints = np.array([xpts, ypts]).T
# check wich points are inside the axes
verts = ax1.viewLim.corners()
verts[2] = ax1.viewLim.corners()[3]
verts[3] = ax1.viewLim.corners()[2]
inpoly = Path(verts).contains_points(xypoints)
# create a 2d histogram of the data and scale it logaritmically
h, xd, yd = np.histogram2d(xypoints[inpoly, 0], xypoints[inpoly, 1],
bins=self.PlotDensityBins.value(),
normed=False)
h[h <= 0] = 1
h = np.log10(h)
# clean axes No2 and plot the 2d histogram
ax2.cla()
cmap = helper_widgets.colormaps[settings.DensityCM]
ax2.pcolormesh(xd, yd, h.transpose(), cmap=cmap)
# set axis limits
ax2.set_xlim(xlim)
ax2.set_ylim(ylim)
# remove tick labels
ax1.set_xticklabels([])
ax1.set_yticklabels([])
ax2.set_xticklabels([])
ax2.set_yticklabels([])
# create vertical and horizontal lines at 0
ax2.axvline(0, color=[.5, .5, .5])
ax2.axhline(0, color=[.5, .5, .5])
# redraw the figure
self.ChanTab['FeaturesFig'].figure.canvas.draw()
#__________________________________________________________________________
def AxesZoom_proc(self, ax):
xpts = []
ypts = []
for k in ax.get_children():
if re.search('Unsorted|Unit', str(k.get_label())):
data = k.get_data()
xpts.extend(data[0])
ypts.extend(data[1])
#xypoints = np.array([xpts,ypts]).transpose()
# check wich points are inside the axes
verts = ax.viewLim.corners()
verts[2] = ax.viewLim.corners()[3]
verts[3] = ax.viewLim.corners()[2]
# inpoly = points_inside_poly(xypoints, verts)
# w = self.CurWaveforms[inpoly,:]
# self.ChanTab['WavesFigure'].figure.axes[0].set_ylim(w.min(), w.max())
# self.ChanTab['WavesFigure'].figure.canvas.draw()
if len(self.ChanTab['FeaturesFig'].figure.axes) == 2:
self.ReplotDensity_proc()
#__________________________________________________________________________
def AutoClust_proc(self):
if not self.H5FileLoaded or not self.ChanPlotted:
return
if self.XYData.data.shape[1] > 2:
data = self.XYData.data[:, 0:2]
else:
data = self.XYData.data
clustIndx = KlustaKwik_call(data, self.MinClust.value(), self.MaxClust.value())
fig = self.ChanTab['FeaturesFig'].figure
fig.clear()
ax = fig.add_subplot(111)
ax.set_axis_bgcolor('k')
for k in range(len(clustIndx)):
ax.plot(data[clustIndx[k], 0], data[clustIndx[k], 1], '.',
label='clust %d' % k)
ax.legend(fancybox=True, mode='expand', ncol=len(clustIndx) / 2,
loc=9, prop={'size': 10})
ax.grid(color='grey')
fig.canvas.draw()
self.sender().parentWidget().close()
#__________________________________________________________________________
def TrimWaveforms_proc(self, eclick, erelease):
# first check whether there's any waveform plotted
# if it is visible, and if it is the current unit
for k in self.ChanTab['WavesFigure'].figure.axes[0].lines:
if 'Unit' in k.get_label() and k.get_visible() and \
k.get_label() == self.CurUnitName:
break
else:
print "No units found in the plot ..."
self.trimWaveformsRect.set_active(False)
return
# get the indices
indx = self.h5file.get_node('/Spikes/Chan_%03d/%s' % (self.CurChan, self.CurUnitName), 'Indx').read()
data = self.CurWaveforms[indx, :]
# get line equation y = mx + n
x1 = eclick.xdata
x2 = erelease.xdata
y1 = eclick.ydata
y2 = erelease.ydata
# return if is a point and not a line
if x1 == x2:
self.trimWaveformsRect.set_active(False)
return
m = (y2 - y1) / (x2 - x1)
n = y1 - m * x1
# get the y value of nearest integer x:
x = np.array([x1, x2])
x.sort()
xData = range(self.WfSize)
indx1 = np.flatnonzero(xData > x[0]).min()
indx2 = np.flatnonzero(xData < x[1]).max()
y = np.array([m * xData[k] + n for k in range(indx1, indx2)])
#print x, y
# get the data bounded by the indices
data2 = data[:, indx1:indx2]
#print data2.shape, y.shape
t = data2 - y
#print t
t = np.array(t)
# get the indices that intersect the line
intersect = []
for j, k in enumerate(t):
if not (np.all(k < 0) or np.all(k > 0)):
intersect.append(j)
# update the node containing the unit indices
self.h5file.remove_node(self.CurNodeName + '/' + self.CurUnitName, 'Indx')
self.h5file.create_array(self.CurNodeName + '/' + self.CurUnitName, 'Indx', np.delete(indx, intersect))
# add the remaining points to the unsorted indexes
self.Unsorted = self.h5file.get_node(self.CurNodeName, 'Unsorted').read()
self.Unsorted = np.append(self.Unsorted, indx[intersect])
self.Unsorted.sort()
# update the unsorted in the h5file
self.h5file.remove_node(self.CurNodeName, 'Unsorted')
self.h5file.create_array(self.CurNodeName, 'Unsorted', self.Unsorted)
# save changes to disk
self.h5file.flush()
# update the information in the overview table
row = self.ChanSelector.currentIndex()
self.OverviewTab2['OverviewTable'].takeItem(row, self.CurUnit + 6)
lbl = QtGui.QTableWidgetItem(str(self.h5file.get_node(self.CurNodeName, self.CurUnitName).Indx.nrows))
self.OverviewTab2['OverviewTable'].setItem(row, self.CurUnit + 6, lbl)
# update the information on the unit label
self.ChanTab['UnitCountLabel'][self.CurUnitName].setText(str(self.h5file.get_node(self.CurNodeName, self.CurUnitName).Indx.nrows))
# replot the features
self.PlotFeatures()
# replot waveforms
self.plot_unit_waveforms()
# replot the unit avg waveform, histogram and autocorrelation
self.PlotUnitFigure_proc()
eclick.inaxes.figure.canvas.draw()
self.trimWaveformsRect.set_active(False)
#__________________________________________________________________________
def ActivateTrimWaveforms_proc(self):
self.trimWaveformsRect.set_active(True)
#__________________________________________________________________________
def plot_unit_waveforms(self):
# get unit name and number
unitName = str(self.ChanTab['UnitTabsWidget'].tabText(self.ChanTab['UnitTabsWidget'].currentIndex()))
unitNo = int(re.search('(?<=Unit)[0-9]{2}', unitName).group())
# get axes handle and children labels
fig = self.ChanTab['WavesFigure'].figure
ax = fig.axes[0]
childrenLabels = [str(k.get_label()) for k in ax.get_children()]
# get the number of spikes to plot
nspikes = self.NSpikesSpin.value()
node = self.CurNode.__getattr__(self.CurUnitName)
nrows = node.Indx.nrows
if nrows > nspikes:
unitIndx = node.Indx.read(start=0, stop=nrows, step=nrows / nspikes)
else:
unitIndx = node.Indx.read()
# obtain the length of units to plot
n = len(unitIndx)
# create an array of Nones to append
nones = np.array(n * [None], ndmin=2).T
# create the x indexes
Ts = np.tile(np.arange(self.WfSize), (n, 1))
Ts = np.append(Ts, nones, axis=1).reshape((n * (self.WfSize + 1),))
# get the waveforms, append nones, and reshape it to a vector
Wf = self.CurNode.Waveforms[unitIndx, :]
Wf = np.append(Wf, nones, axis=1).reshape((n * (self.WfSize + 1),))
# create the plot if it doesn't exists
if unitName not in childrenLabels:
ax.plot(Ts, Wf, color=self.UnitColors[unitNo, :], alpha=0.7,
label=unitName)
# if exists update the data
elif unitName in childrenLabels:
for k in self.ChanTab['WavesFigure'].figure.axes[0].get_lines():
if k.get_label() == self.CurUnitName:
break
k.set_data(Ts, Wf)
k.set_visible(True)
fig.canvas.draw()
#__________________________________________________________________________
def AddUnit_proc(self):
''' starts a lasso instance to draw a line around a ROI'''
# check whether there is a channel ploted
if not self.ChanPlotted:
return
# check if what is plotted is all waveforms or unsorted
title = str(self.ChanTab['FeaturesFig'].figure.axes[0].get_title())
if not re.search('Waveforms|Unsorted', title):
return
# return if a tool is selected in the toolbar
if self.ChanTab['FeaturesFigNtb'].mode != '':
return
# create a new lasso instance
self.LassoCID = self.ChanTab['FeaturesFig'].figure.canvas.mpl_connect('button_press_event',
self.LassoAddUnit_proc)
#__________________________________________________________________________
def Keep_proc(self):
''' starts a lasso instance to draw a line around a ROI'''
# check whether there is a channel ploted
if not self.ChanPlotted:
return
# check if a unit is plotted
title = str(self.ChanTab['FeaturesFig'].figure.axes[0].get_title())
if not re.search('Unit', title):
return
self.What2Plot.count()
# return if a tool is selected in the toolbar
if self.ChanTab['FeaturesFigNtb'].mode != '':
return
# create a new lasso instance
self.LassoCID = self.ChanTab['FeaturesFig'].figure.canvas.mpl_connect('button_press_event',
self.LassoKeep_proc)
#__________________________________________________________________________
def AddRegion_proc(self):
''' starts a lasso instance to draw a line around a ROI'''
# check whether there is a channel ploted
if not self.ChanPlotted:
return
# check if what is plotted is all waveforms or unsorted
title = str(self.ChanTab['FeaturesFig'].figure.axes[0].get_title())
if not re.search('Waveforms|Unsorted', title):
return
# return if a tool is selected in the toolbar
if self.ChanTab['FeaturesFigNtb'].mode != '':
return
# create a new lasso instance
self.LassoCID = self.ChanTab['FeaturesFig'].figure.canvas.mpl_connect('button_press_event',
self.LassoAddRegion_proc)
#__________________________________________________________________________
def RemoveRegion_proc(self):
''' starts a lasso instance to draw a line around a ROI'''
# check whether there is a channel ploted
if not self.ChanPlotted:
return
# check if what is plotted is all waveforms or unsorted
title = str(self.ChanTab['FeaturesFig'].figure.axes[0].get_title())
if not re.search('Unit', title):
return
# return if a tool is selected in the toolbar
if self.ChanTab['FeaturesFigNtb'].mode != '':
return
# create a new lasso instance
self.LassoCID = self.ChanTab['FeaturesFig'].figure.canvas.mpl_connect('button_press_event',
self.LassoRemoveRegion_proc)
#__________________________________________________________________________
def LassoAddUnit_proc(self, event):
if self.ChanTab['FeaturesFig'].figure.canvas.widgetlock.locked():
if hasattr(self, 'LassoCID'):
self.ChanTab['FeaturesFig'].figure.canvas.mpl_disconnect(self.LassoCID)
del self.LassoCID
return
if event.inaxes is None or event.button != 1:
if hasattr(self, 'LassoCID'):
self.ChanTab['FeaturesFig'].figure.canvas.mpl_disconnect(self.LassoCID)
del self.LassoCID
return
# create a lasso instance
self.lasso = matplotlib_widgets.MyLasso(event.inaxes, (event.xdata, event.ydata),
self.LassoCallback_AddUnit,
color='gray', lw=1)
self.ChanTab['FeaturesFig'].figure.canvas.widgetlock(self.lasso)
#__________________________________________________________________________
def LassoKeep_proc(self, event):
if self.ChanTab['FeaturesFig'].figure.canvas.widgetlock.locked():
if hasattr(self, 'LassoCID'):
self.ChanTab['FeaturesFig'].figure.canvas.mpl_disconnect(self.LassoCID)
del self.LassoCID
return
if event.inaxes is None or event.button != 1:
if hasattr(self, 'LassoCID'):
self.ChanTab['FeaturesFig'].figure.canvas.mpl_disconnect(self.LassoCID)
del self.LassoCID
return
self.KeepBtn.setCheckable(True)
self.KeepBtn.setChecked(True)
self.lasso = matplotlib_widgets.MyLasso(event.inaxes, (event.xdata, event.ydata),
self.LassoCallback_Keep,
color='gray', lw=1)
self.ChanTab['FeaturesFig'].figure.canvas.widgetlock(self.lasso)
#__________________________________________________________________________
def LassoAddRegion_proc(self, event):
if self.ChanTab['FeaturesFig'].figure.canvas.widgetlock.locked():
if hasattr(self, 'LassoCID'):
self.ChanTab['FeaturesFig'].figure.canvas.mpl_disconnect(self.LassoCID)
del self.LassoCID
return
if event.inaxes is None or event.button != 1:
if hasattr(self, 'LassoCID'):
self.ChanTab['FeaturesFig'].figure.canvas.mpl_disconnect(self.LassoCID)
del self.LassoCID
return
self.lasso = matplotlib_widgets.MyLasso(event.inaxes, (event.xdata, event.ydata),
self.LassoCallback_AddRegion,
color='gray', lw=1)
self.ChanTab['FeaturesFig'].figure.canvas.widgetlock(self.lasso)
#__________________________________________________________________________
def LassoRemoveRegion_proc(self, event):
if self.ChanTab['FeaturesFig'].figure.canvas.widgetlock.locked():
if hasattr(self, 'LassoCID'):
self.ChanTab['FeaturesFig'].figure.canvas.mpl_disconnect(self.LassoCID)
del self.LassoCID
return
if event.inaxes is None or event.button != 1:
if hasattr(self, 'LassoCID'):
self.ChanTab['FeaturesFig'].figure.canvas.mpl_disconnect(self.LassoCID)
del self.LassoCID
return
self.lasso = matplotlib_widgets.MyLasso(event.inaxes, (event.xdata, event.ydata),
self.LassoCallback_RemoveRegion,
color='gray', lw=1)
self.ChanTab['FeaturesFig'].figure.canvas.widgetlock(self.lasso)
#__________________________________________________________________________
def LassoCallback_AddUnit(self, verts):
# disconnect Lasso callback
self.ChanTab['FeaturesFig'].figure.canvas.mpl_disconnect(self.LassoCID)
self.ChanTab['FeaturesFig'].figure.canvas.draw_idle()
del self.LassoCID
# release widget lock
self.ChanTab['FeaturesFig'].figure.canvas.widgetlock.release(self.lasso)
# delete lasso
del self.lasso
# copy the vertices of the polygon to the object and downsample them
n = len(verts)
self.verts = np.array(verts)
if n > 25:
self.verts = self.verts[range(0, n, n / 25)]
#pdb.set_trace()
# get the axes handle
ax = self.ChanTab['FeaturesFig'].figure.axes[0]
if re.search('Waveforms', ax.get_title()):
# test which points are inside the lasso
xypoints = self.XYData.data[self.Unsorted, :]
elif re.search('Unsorted', ax.get_title()):
xypoints = self.XYData.data
p = Path(self.verts).contains_points(xypoints)
# in case there were no points selected
if len(p) == 0:
self.MsgBox.setIcon(QtGui.QMessageBox.Warning)
self.MsgBox.setText('There were no selected points')
self.MsgBox.setwindowTitle('Warning')
self.MsgBox.show()
return
# set the unit name
self.NUnits = len(self.UnitsList)
self.CurUnitName = 'Unit%02d' % self.NUnits
# look for the unsorted plot handle in the axes
for k in self.ChanTab['FeaturesFig'].figure.axes[0].get_children():
if re.search('Unsorted', str(k.get_label())):
break
# obtain the unsorted points
unsortedData = xypoints[~p, :]
lunsort = len(unsortedData)
# select some indices to plot
if lunsort > self.nPtsSpin.value():
indx = range(0, lunsort, lunsort / self.nPtsSpin.value())
else:
indx = range(lunsort)
# replot the unsorted without the corresponding points to the new unit
k.set_data(unsortedData[:, 0][indx], unsortedData[:, 1][indx])
ax.draw_artist(k)
# select some indices to plot
unitData = xypoints[p, :]
lunit = len(unitData)
if lunit > self.nPtsSpin.value():
indx = range(0, lunit, lunit / self.nPtsSpin.value())
else:
indx = range(lunit)
ax.plot(unitData[:, 0][indx], unitData[:, 1][indx],
linestyle='',
marker=',',
mfc=self.UnitColors[self.NUnits],
mec=self.UnitColors[self.NUnits],
label='data_' + self.CurUnitName)
self.NUnits += 1
# if unit name not in combo box add it
if self.CurUnitName not in [str(self.What2Plot.itemText(k)) for k in range(self.What2Plot.count())]:
self.What2Plot.addItem(self.CurUnitName)
# add the indexes of the current unit to the h5file
if self.h5file.get_node(self.CurNodeName).__contains__(self.CurUnitName):
self.h5file.remove_node(self.CurNodeName, self.CurUnitName, recursive=True)
self.h5file.create_group(self.CurNodeName, self.CurUnitName)
self.h5file.create_array(self.CurNodeName + '/' + self.CurUnitName, 'Indx', self.Unsorted[p])
self.h5file.create_array(self.CurNodeName + '/' + self.CurUnitName, 'isMultiunit', False)
self.h5file.create_array(self.CurNodeName + '/' + self.CurUnitName, 'isBursting', False)
# update the list of unsorted indexes
self.Unsorted = self.Unsorted[~p]
# update the indexes of the unsorted units
if self.h5file.get_node(self.CurNodeName).__contains__('Unsorted'):
self.h5file.remove_node(self.CurNodeName, 'Unsorted')
self.h5file.create_array(self.CurNodeName, 'Unsorted', self.Unsorted)
# save changes to disk
self.h5file.flush()
# add log
self.AddLog('%s %s added' % (self.CurNodeName, self.CurUnitName))
# add unit to the units tab widget
self.UnitsTable_AddUnit(self.CurUnitName)
self.ChanTab['UnitFigures'][self.CurUnitName].figure.tight_layout()
self.ChanTab['UnitFigures'][self.CurUnitName].figure.canvas.draw()
# update the overview figure
for k in self.OverviewTab1['Figure'].figure.axes:
if str(self.CurChan) in k.get_title():
break
self.PlotChanOverview_proc(self.CurNode, axes2Plot=k)
for l in k.lines:
k.draw_artist(l)
#__________________________________________________________________________
def LassoCallback_Keep(self, verts):
# disconnect Lasso callback from figure
self.ChanTab['FeaturesFig'].figure.canvas.mpl_disconnect(self.LassoCID)
self.ChanTab['FeaturesFig'].figure.canvas.draw_idle()
del self.LassoCID
# release the lock from the lasso
self.ChanTab['FeaturesFig'].figure.canvas.widgetlock.release(self.lasso)
# erase lasso
del self.lasso
# copy the vertices of the polygon to the object and downsample them
n = len(verts)
self.verts = np.array(verts)
if n > 25:
self.verts = self.verts[range(0, n, n / 25)]
# test which points lay inside the polygon
p = Path(self.verts).contains_points(self.XYData.data)
# change to not checked
self.KeepBtn.setChecked(False)
self.KeepBtn.setCheckable(False)
# check how many points were selected
if len(np.flatnonzero(p)) <= self.WfSize:
print "Didn't doo anything: Too few points selected"
return
# create a KDTree object for efficient neighbor search
self.XYData = cKDTree(self.XYData.data[p, :])
# get unitname and number from the axes title
ax = self.ChanTab['FeaturesFig'].figure.axes[0]
self.CurUnitName = re.search('Unit[0-9]{2}', ax.get_title()).group()
self.CurUnit = int(re.search('(?<=Unit)[0-9]{2}', ax.get_title()).group())
# update plot:
for k in ax.get_children():
if re.search(str(k.get_label), self.CurUnitName):
k.set_data(self.XYData.data[:, 0], self.XYData.data[:, 1])
ax.draw_artist(k)
break
# return if no points selected
if len(p) < 1:
return
nodeName = self.CurNodeName + '/' + self.CurUnitName
# obtain the unit data
unitPts = self.h5file.get_node(nodeName, 'Indx').read()
# update the node containing the unit indices
self.h5file.remove_node(nodeName, 'Indx')
self.h5file.create_array(nodeName, 'Indx', unitPts[p])
# add the remaining points to the unsorted indexes
self.Unsorted = self.h5file.get_node(self.CurNodeName, 'Unsorted').read()
self.Unsorted = np.append(self.Unsorted, unitPts[~p])
self.Unsorted.sort()
# update the unsorted in the h5file
self.h5file.remove_node(self.CurNodeName, 'Unsorted')
self.h5file.create_array(self.CurNodeName, 'Unsorted', self.Unsorted)
# save changes to disk
self.h5file.flush()
# replot the unit avg waveform, histogram and autocorrelation
self.PlotUnitFigure_proc()
# replot the waveforms
self.plot_unit_waveforms()
# replot the features
self.PlotFeatures()
# update the information in the overview table
row = self.ChanSelector.currentIndex()
self.OverviewTab2['OverviewTable'].takeItem(row, self.CurUnit + 6)
lbl = QtGui.QTableWidgetItem(str(self.h5file.get_node(self.CurNodeName, self.CurUnitName).Indx.nrows))
self.OverviewTab2['OverviewTable'].setItem(row, self.CurUnit + 6, lbl)
# update the information on the unit label
self.ChanTab['UnitCountLabel'][self.CurUnitName].setText(str(self.h5file.get_node(self.CurNodeName, self.CurUnitName).Indx.nrows))
#__________________________________________________________________________
def LassoCallback_AddRegion(self, verts):
# disconnect the lasso from the canvas and redraw the figure
self.ChanTab['FeaturesFig'].figure.canvas.mpl_disconnect(self.LassoCID)
self.ChanTab['FeaturesFig'].figure.canvas.draw_idle()
del self.LassoCID
# release widget lock
self.ChanTab['FeaturesFig'].figure.canvas.widgetlock.release(self.lasso)
# delete lasso handle
del self.lasso
# get the vertices of the polygon to the object and downsample them if too many
n = len(verts)
self.verts = np.array(verts)
if n > 25:
self.verts = self.verts[range(0, n, n / 25)]
# check whether there is any unit
if not hasattr(self, 'CurUnitName') or not self.CurNode.__contains__(self.CurUnitName):
return
# get the axes handle
ax = self.ChanTab['FeaturesFig'].figure.axes[0]
# get the unsorted
self.Unsorted = self.h5file.get_node(self.CurNodeName, 'Unsorted').read()
# check what is plotted on the axes
if re.search('Waveforms', str(ax.get_title())):
# test which points are inside the lasso
p = Path(self.verts).contains_points(self.XYData.data[self.Unsorted, :])
self.XYData = cKDTree(self.XYData.data[self.Unsorted, :][p])
elif re.search('Unsorted', str(ax.get_title())):
# test which points are inside the lasso
p = Path(self.verts).contains_points(self.XYData.data)
self.XYData = cKDTree(self.XYData.data[p, :])
# update plot:
for k in ax.get_children():
if re.search(str(k.get_label), self.CurUnitName):
k.set_data(self.XYData.data[:, 0], self.XYData.data[:, 1])
ax.draw_artist(k)
break
# if more than 0 selected points
if len(p) > self.WfSize:
indx = self.Unsorted[p]
else:
print "Didn't add any unit: Too few points selected"
return
# update the unsorted
self.Unsorted = self.Unsorted[~p]
self.h5file.remove_node(self.CurNodeName, 'Unsorted')
self.h5file.create_array(self.CurNodeName, 'Unsorted', self.Unsorted)
# update the plots
for k in ax.get_children():
if re.search('Unsorted', str(k.get_label())):
pass
elif re.search(self.CurUnitName, str(k.get_label())):
pass
# update the unit information in the file
unit = self.h5file.get_node(self.CurNodeName + '/' + self.CurUnitName, 'Indx').read()
self.h5file.remove_node(self.CurNodeName + '/' + self.CurUnitName, 'Indx')
# append the new indexes to the waveform and sort
unit = np.append(unit, indx)
unit.sort()
# create a new array in the h5file to hold the updated unit information
self.h5file.create_array(self.CurNodeName + '/' + self.CurUnitName, 'Indx', unit)
# save changes to disk
self.h5file.flush()
# update the information in the overview table
row = self.ChanSelector.currentIndex()
self.OverviewTab2['OverviewTable'].takeItem(row, self.CurUnit + 6)
lbl = QtGui.QTableWidgetItem(str(self.h5file.get_node(self.CurNodeName, self.CurUnitName).Indx.nrows))
self.OverviewTab2['OverviewTable'].setItem(row, self.CurUnit + 6, lbl)
# update the information on the unit label
self.ChanTab['UnitCountLabel'][self.CurUnitName].setText(str(self.h5file.get_node(self.CurNodeName, self.CurUnitName).Indx.nrows))
# replot the unit avg waveform, histogram and autocorrelation
self.PlotUnitFigure_proc()
# replot the features
self.PlotFeatures()
#__________________________________________________________________________
def LassoCallback_RemoveRegion(self, verts):
# disconnect Lasso callback from figure
self.ChanTab['FeaturesFig'].figure.canvas.mpl_disconnect(self.LassoCID)
self.ChanTab['FeaturesFig'].figure.canvas.draw_idle()
del self.LassoCID
# release the lock from the lasso
self.ChanTab['FeaturesFig'].figure.canvas.widgetlock.release(self.lasso)
# copy the vertices of the polygon to the object and downsample them
n = len(verts)
self.verts = np.array(verts)
if n > 25:
self.verts = self.verts[range(0, n, n / 25)]
# test which points lay inside the polygon
p = Path(self.verts).contains_points(self.XYData.data)
# return if no points selected
if len(p) < 1:
return
# get unitname and number from the axes title
ax = self.ChanTab['FeaturesFig'].figure.axes[0]
self.CurUnitName = re.search('Unit[0-9]{2}', ax.get_title()).group()
self.CurUnit = int(re.search('(?<=Unit)[0-9]{2}', ax.get_title()).group())
# obtain the unit data
unitPts = self.h5file.get_node(self.CurNodeName, self.CurUnitName).Indx.read()
# update the node containing the unit indexes
self.h5file.remove_node(self.CurNodeName + '/' + self.CurUnitName, 'Indx')
self.h5file.create_array(self.CurNodeName + '/' + self.CurUnitName, 'Indx', unitPts[~p])
# add the remaining points to the unsorted indexes
self.Unsorted = self.h5file.get_node(self.CurNodeName, 'Unsorted').read()
self.Unsorted = np.append(self.Unsorted, unitPts[p])
self.Unsorted.sort()
# update the unsorted in the h5file
self.h5file.remove_node(self.CurNodeName, 'Unsorted')
self.h5file.create_array(self.CurNodeName, 'Unsorted', self.Unsorted)
# save changes to disk
self.h5file.flush()
# update the information in the overview table
row = self.ChanSelector.currentIndex()
self.OverviewTab2['OverviewTable'].takeItem(row, self.CurUnit + 6)
lbl = QtGui.QTableWidgetItem(str(self.h5file.get_node(self.CurNodeName, self.CurUnitName).Indx.nrows))
self.OverviewTab2['OverviewTable'].setItem(row, self.CurUnit + 6, lbl)
# update the information on the unit label
self.ChanTab['UnitCountLabel'][self.CurUnitName].setText(str(self.h5file.get_node(self.CurNodeName, self.CurUnitName).Indx.nrows))
# replot the features
self.PlotFeatures()
# replot the waveforms
self.plot_unit_waveforms()
# replot the unit avg waveform, histogram and autocorrelation
self.PlotUnitFigure_proc()
#PlotChanOverview_proc(self.CurNode, axes2Plot)
# erase lasso
del self.lasso
#__________________________________________________________________________
def UnitsTable_AddUnit(self, unitName):
''' creates a new tab per each new unit'''
# check whether that tab already exists
for k in range(self.ChanTab['UnitTabsWidget'].count()):
if unitName == self.ChanTab['UnitTabsWidget'].tabText(k):
return
self.CurUnitName = unitName
# create a widget and a layout
widget = QtGui.QWidget()
vlay = QtGui.QVBoxLayout()
vlay.setSpacing(2)
vlay.setMargin(0)
# get a unit number
unitNo = int(re.search('(?<=Unit)[0-9]{2}', unitName).group())
# add the unit number to a list
self.UnitsList.append(unitNo)
# create a btn to change unit color
hlay = QtGui.QHBoxLayout()
hlay.setMargin(0)
hlay.addStretch(1)
self.ChanTab['UnitBtns'][unitName] = QtGui.QPushButton('Unit %02d' % unitNo)
self.ChanTab['UnitBtns'][unitName].setMaximumHeight(20)
self.ChanTab['UnitBtns'][unitName].clicked.connect(self.ChangeUnitColor_proc)
self.ChanTab['UnitBtns'][unitName].setStyleSheet('QPushButton {background: rgb%s}' % str(tuple(np.int16(255 * self.UnitColors[unitNo]))))
hlay.addWidget(self.ChanTab['UnitBtns'][unitName])
hlay.addStretch(1)
# plot-raw check button
self.ChanTab['PlotRawCheck'][unitName] = QtGui.QCheckBox()
self.ChanTab['PlotRawCheck'][unitName].setObjectName(str(unitNo))
self.ChanTab['PlotRawCheck'][unitName].setChecked(False)
self.ChanTab['PlotRawCheck'][unitName].setMaximumHeight(20)
self.ChanTab['PlotRawCheck'][unitName].stateChanged.connect(self.SetWaveformVisible_proc)
lbl = QtGui.QLabel('Plot Raw ?')
lbl.setMaximumHeight(20)
hlay.addWidget(lbl)
hlay.addWidget(self.ChanTab['PlotRawCheck'][unitName])
hlay.addStretch(1)
# is Multiunit check button
self.ChanTab['isMultiunitCheck'][unitName] = QtGui.QCheckBox()
self.ChanTab['isMultiunitCheck'][unitName].setObjectName(str(unitNo))
self.ChanTab['isMultiunitCheck'][unitName].setChecked(False)
self.ChanTab['isMultiunitCheck'][unitName].setMaximumHeight(20)
self.ChanTab['isMultiunitCheck'][unitName].stateChanged.connect(self.SetisMultiunit_proc)
lbl = QtGui.QLabel('isMultiunit ?')
lbl.setMaximumHeight(20)
hlay.addWidget(lbl)
hlay.addWidget(self.ChanTab['isMultiunitCheck'][unitName])
hlay.addStretch(1)
# set the checkstate of the 'isMultiunit' check according to what is saved in the h5file
if self.h5file.get_node('/Spikes/Chan_%03d/Unit%02d' % (self.CurChan, unitNo)).__contains__('isMultiunit'):
isMultiunit = self.h5file.get_node('/Spikes/Chan_%03d/Unit%02d' % (self.CurChan, unitNo),
'isMultiunit').read()
if isMultiunit:
self.ChanTab['isMultiunitCheck'][unitName].setChecked(True)
else:
self.ChanTab['isMultiunitCheck'][unitName].setChecked(False)
else:
self.h5file.create_array('/Spikes/Chan_%03d/Unit%02d' % (self.CurChan, unitNo), 'isMultiunit', False)
# add a label with the waveform count
lbl = QtGui.QLabel('Count')
lbl.setMaximumHeight(20)
hlay.addWidget(lbl)
self.ChanTab['UnitCountLabel'][unitName] = QtGui.QLabel('%d' % self.h5file.get_node(self.CurNodeName, unitName).Indx.nrows)
self.ChanTab['UnitCountLabel'][unitName].setMaximumHeight(20)
hlay.addWidget(self.ChanTab['UnitCountLabel'][unitName])
hlay.addStretch(1)
# add delete-unit button
self.ChanTab['DelUnitBtns'][unitName] = QtGui.QPushButton('Del Unit')
self.ChanTab['DelUnitBtns'][unitName].setObjectName(unitName)
self.ChanTab['DelUnitBtns'][unitName].setMaximumHeight(20)
self.ChanTab['DelUnitBtns'][unitName].clicked.connect(self.DelUnit_proc)
hlay.addWidget(self.ChanTab['DelUnitBtns'][unitName])
hlay.addStretch(1)
vlay.addLayout(hlay)
# add the figure widget
self.ChanTab['UnitFigures'][unitName] = matplotlib_widgets.MplWidget()
self.ChanTab['UnitFigures'][unitName].setObjectName(unitName) # set the name of the object
self.ChanTab['UnitFigures'][unitName].figure.set_facecolor('k')
n = matplotlib_widgets.NavToolbar(self.ChanTab['UnitFigures'][unitName].figure.canvas,
widget, coordinates=False)
n.setIconSize(QtCore.QSize(12, 12))
n.setOrientation(QtCore.Qt.Vertical)
vlay.addWidget(self.ChanTab['UnitFigures'][unitName])
#vlay.addWidget(n)
hlay = QtGui.QHBoxLayout()
hlay.setSpacing(0)
hlay.setMargin(2)
hlay.addLayout(vlay)
hlay.addWidget(n)
widget.setLayout(hlay)
# Plot the data
self.PlotUnitFigure_proc()
#if unitName == 'Unit00':
# self.ChanTab['UnitTabsWidget'].removeTab(0)
self.ChanTab['UnitTabsWidget'].addTab(widget, unitName)
indx = self.ChanTab['UnitTabsWidget'].count() - 1
color = QtGui.QColor(*np.int32(self.UnitColors[indx] * 255))
self.ChanTab['UnitTabsWidget'].tabBar().setTabTextColor(indx, color)
# update the information in the overview table
row = self.ChanSelector.currentIndex()
if self.OverviewTab2['OverviewTable'].columnCount() <= (unitNo + 6):
self.OverviewTab2['OverviewTable'].insertColumn(self.OverviewTab2['OverviewTable'].columnCount())
nCols = self.OverviewTab2['OverviewTable'].columnCount()
self.OverviewTab2['OverviewTable'].setColumnWidth(nCols - 1, 65)
self.OverviewTab2['OverviewTable'].setHorizontalHeaderItem(nCols - 1,
QtGui.QTableWidgetItem('Unit%02d' % unitNo))
self.OverviewTab2['OverviewTable'].takeItem(row, unitNo + 6)
lbl = QtGui.QTableWidgetItem(str(self.h5file.get_node(self.CurNodeName, unitName).Indx.nrows))
self.OverviewTab2['OverviewTable'].setItem(row, unitNo + 6, lbl)
# update the unsorted number in the overview table
self.OverviewTab2['OverviewTable'].takeItem(row, 4)
lbl = QtGui.QTableWidgetItem(str(len(self.Unsorted)))
self.OverviewTab2['OverviewTable'].setItem(row, 4, lbl)
#__________________________________________________________________________
def PlotUnitFigure_proc(self):
# get a unit name and number
unitNo = int(re.search('(?<=Unit)[0-9]{2}', self.CurUnitName).group())
# find the figure that has a particular name
fig = self.ChanTab['UnitFigures'][self.CurUnitName].figure
# check whether we have to create axes
if len(fig.axes) == 0:
ax0 = fig.add_subplot(131)
ax1 = fig.add_subplot(132)
ax2 = fig.add_subplot(133)
else:
ax0 = fig.axes[0]
ax0.cla()
ax1 = fig.axes[1]
ax1.cla()
ax2 = fig.axes[2]
ax2.cla()
# set the axis background color
ax0.set_axis_bgcolor('k')
ax1.set_axis_bgcolor('k')
ax2.set_axis_bgcolor('k')
# PLOT AVERAGE WAVEFORM #####
x = range(self.WfSize)
p = self.h5file.get_node(self.CurNodeName, self.CurUnitName).Indx.read()
m = self.CurWaveforms[p, :].mean(axis=0)
s = self.CurWaveforms[p, :].std(axis=0)
mn = self.CurWaveforms[p, :].min(axis=0)
mx = self.CurWaveforms[p, :].max(axis=0)
# plot average waveform
ax0.plot(x, m, color=self.UnitColors[unitNo], lw=2, label=self.CurUnitName)
#plot shaded area of 3 standard devoations around it
ax0.fill_between(x, m + 3 * s, m - 3 * s, color=self.UnitColors[unitNo],
alpha=0.5, label=self.CurUnitName)
#plot maximum and minimum boundaries
ax0.fill_between(x, mx, mn, color=self.UnitColors[unitNo], alpha=0.35,
label=self.CurUnitName)
ax0.set_xlim(0, self.WfSize - 1)
ax0.set_yticklabels([])
ax0.grid(color=[.5, .5, .5])
ax0.tick_params(color=[.5, .5, .5], labelcolor=[.5, .5, .5])
for k in ax0.spines.values():
k.set_edgecolor([.5, .5, .5])
# PLOT ISI HISTOGRAM #####
dts = np.diff(self.CurTs[p])
dts = dts[dts < 100]
ld = len(dts)
if ld > 1000:
indx = range(0, 1000)
else:
indx = range(ld)
if len(dts[indx]) > 0:
ax1.hist(dts[indx], bins=100, range=[0, 100], ec='none',
color=self.UnitColors[unitNo], label=self.CurUnitName)
ax1.tick_params(color=[.5, .5, .5], labelcolor=[.5, .5, .5])
for k in ax1.spines.values():
k.set_edgecolor([.5, .5, .5])
WfWidth = self.WfSize * 1000 / self.Sf
try:
collision = 100 * np.flatnonzero(dts < 1.5 * WfWidth).size / np.float(dts.size)
# put a "percentage of collision" label
ax1.text(0.5, 0.01, u'Collision = %0.2f %%' % collision,
transform=ax1.transAxes, color='w', size=10, ha='center')
except:
pass
ax1.set_xlim(0, 100)
# PLOT AUTOCORRELATION #####
ts = self.CurTs[p]
time = 25000
ts = ts[np.flatnonzero(ts < time)]
ts11 = np.tile(ts, (ts.size, 1))
ts22 = np.tile(ts, (ts.size, 1)).transpose()
x = ts11 - ts22
ac, lags = np.histogram(x.flatten(), bins=100, range=(-500, 500),
normed=True)
ac[np.flatnonzero(lags == 0)] = 0.0
ax2.bar(lags[0:-1], ac, width=np.diff(lags)[0], edgecolor='none',
color=self.UnitColors[unitNo])
'''if ts.size > 1000: ts = ts[0:1000]
ac, x = autocorr(ts, binSize = 20, Win = [0,10000],
mode = 'fft', Range = [-150, 150])
ac[ac.argmax()] = 0
ax2.plot(x, ac, color = self.UnitColors[unitNo], lw = 2)'''
ax2.set_xlim(-500, 500)
#ax2.set_ylim(0, ac.max())
ax2.tick_params(color=[.5, .5, .5], labelcolor=[.5, .5, .5])
for k in ax2.spines.values():
k.set_edgecolor([.5, .5, .5])
ax2.set_yticklabels([])
self.ChanTab['UnitFigures'][self.CurUnitName].figure.tight_layout()
self.ChanTab['UnitFigures'][self.CurUnitName].figure.canvas.draw()
#__________________________________________________________________________
def SetWaveformVisible_proc(self):
''' makes the raw waveform of each unit visible or invisible'''
sender = self.sender()
state = sender.checkState()
name = int(sender.objectName())
# get unit name and number
unitName = str(self.ChanTab['UnitTabsWidget'].tabText(self.ChanTab['UnitTabsWidget'].currentIndex()))
unitNo = int(re.search('(?<=Unit)[0-9]{2}', unitName).group())
# get axes handle and children labels
ax = self.ChanTab['WavesFigure'].figure.axes[0]
childrenLabels = [str(k.get_label()) for k in ax.get_children()]
# get the node to read from
node = self.h5file.get_node(self.CurNodeName + '/' + unitName, 'Indx')
# get the number of spikes to plot
nspikes = self.NSpikesSpin.value()
if state == 2: # if checked
nrows = node.nrows
if nrows > nspikes:
unitIndx = node.read(start=0, stop=nrows, step=nrows / nspikes)
else:
unitIndx = node.read()
# obtain the length of units to plot
n = len(unitIndx)
# create an array of Nones to append
nones = np.array(n * [None], ndmin=2).T
# create the x indexes
Ts = np.tile(np.arange(self.WfSize), (n, 1))
Ts = np.append(Ts, nones, axis=1).reshape((n * (self.WfSize + 1),))
# get the waveforms, append nones, and reshape it to a vector
Wf = self.CurNode.Waveforms[unitIndx, :]
Wf = np.append(Wf, nones, axis=1).reshape((n * (self.WfSize + 1),))
# create the plot if it doesn't exists
if unitName not in childrenLabels:
ax.plot(Ts, Wf, color=self.UnitColors[unitNo, :],
alpha=0.7, label=unitName)
# if exists update the data
elif unitName in childrenLabels:
for k in self.ChanTab['WavesFigure'].figure.axes[0].get_children():
if k.get_label() == 'Unit%02d' % name:
break
k.set_data(Ts, Wf)
k.set_visible(True)
elif state == 0: # if unchecked
for k in ax.get_children():
if str(k.get_label()) == unitName:
k.set_visible(False)
# set axes limit
lim = self.WaveAxYLim_Spin.value()
ax.set_ylim(-lim, lim)
# finally redraw the figure
self.ChanTab['WavesFigure'].figure.canvas.draw()
#__________________________________________________________________________
def SetisMultiunit_proc(self):
sender = self.sender()
#state = sender.checkState()
unitNo = int(sender.objectName())
# current node name
nodeName = '/Spikes/Chan_%03d/Unit%02d' % (self.CurChan, unitNo)
# eliminate 'isMultiunit' f already exists
if self.h5file.get_node(nodeName).__contains__('isMultiunit'):
self.h5file.remove_node(nodeName, 'isMultiunit')
# create a new "isMultiunt" array to hold the value of the cehckbox
self.h5file.create_array(nodeName, 'isMultiunit', bool(sender.checkState()))
# save changes to disk
self.h5file.flush()
#__________________________________________________________________________
def ExchangeUnitName_proc(self, initial, final):
tb = self.ChanTab['UnitTabBarWidget']
# get the names of the changed tabs
oldNameBgTab = tb.tabText(final)
newNameBgTab = tb.tabText(initial)
# change the name of the background tab
tb.setTabText(final, newNameBgTab)
# change the name of the front tab the oldname of the unit
tb.setTabText(tb.currentIndex(), oldNameBgTab)
# PROPAGATE CHANGES TO THE H5FILE #####
# first change the background moved unit name to "tmpUnitData"
self.h5file.renameNode(where='/Spikes/Chan_%03d' % self.CurChan,
name=oldNameBgTab, newname='tmpUnitData',
overwrite=True)
# second change the front moved unit name to the old name of the background unit
self.h5file.renameNode(where='/Spikes/Chan_%03d' % self.CurChan,
name=newNameBgTab, newname=oldNameBgTab,
overwrite=True)
# third change the background moved unit name to its new name
self.h5file.renameNode(where='/Spikes/Chan_%03d' % self.CurChan,
name='tmpUnitData', newname=newNameBgTab,
overwrite=True)
# CHANGE THE NAME OF THE FIGURES #####
# first change the figure name of the background unit to "tmpFigName"
for k in self.ChanTab['UnitFigures']:
if k.objectName() == oldNameBgTab:
k.setObjectName('tmpFigName')
break
# second, change the front tab figure name to the old background tab name
for k in self.ChanTab['UnitFigures']:
if k.objectName() == newNameBgTab:
k.setObjectName(oldNameBgTab)
break
# third, change the figname of the background tab to the new one
for k in self.ChanTab['UnitFigures']:
if k.objectName() == 'tmpFigName':
k.setObjectName(newNameBgTab)
break
# CHANGE UNIT COLOR #####
self.ChangeUnitColor_proc(unitName=newNameBgTab,
color=tuple(np.append(np.int32(self.UnitColors[final] * 255), 255)))
self.ChangeUnitColor_proc(unitName=oldNameBgTab,
color=tuple(np.append(np.int32(self.UnitColors[initial] * 255), 255)))
#__________________________________________________________________________
def RepairUnitNames_proc(self):
unitNames = [k for k in self.CurNode.__members__ if 'Unit' in k]
for j, k in enumerate(unitNames):
if k != 'Unit%02d' % j:
self.h5file.renameNode(self.CurChan, k, 'Unit%02d' % j)
self.h5file.flush()
#__________________________________________________________________________
def CallMergeUnits_proc(self):
if not self.H5FileLoaded:
return
self.MergeUnitsWidget.list1.clear()
self.MergeUnitsWidget.list2.clear()
unitsList = [k for k in self.CurNode.__members__ if 'Unit' in k]
unitsList.sort()
self.MergeUnitsWidget.list1.addItems(unitsList)
self.MergeUnitsWidget.show()
#__________________________________________________________________________
def MergeUnits_proc(self):
# get the list of units to merge
units2Merge = [str(self.MergeUnitsWidget.list2.item(k).text()) for k in range(self.MergeUnitsWidget.list2.count())]
# sort the names
units2Merge.sort()
# if fewer than 2 return
if len(units2Merge) < 2:
return
# store the unit indexes in a list, sort them, and trnasform it into an array
newUnit = []
for k in units2Merge:
newUnit.extend(self.CurNode.__getattr__(k).Indx.read())
newUnit.sort()
newUnit = np.array(newUnit)
# remove all the listed units from the h5file
for k in units2Merge:
self.h5file.remove_node(self.CurNodeName, k, recursive=True)
# create a group with the name of the first unit in the list, and
# add all the indices of that
self.h5file.create_group(self.CurNodeName, units2Merge[0])
self.h5file.create_array(self.CurNodeName + '/' + units2Merge[0], 'Indx', newUnit)
self.h5file.create_array(self.CurNodeName + '/' + units2Merge[0], 'isMultiunit', False)
self.h5file.create_array(self.CurNodeName + '/' + units2Merge[0], 'isBursting', False)
# save changes to disk
self.h5file.flush()
# add log
self.AddLog('%s %s merged' % (self.CurNodeName, str(units2Merge)))
# REMOVE ALL THE GRAPHICAL ELEMENTS #####
# get the axes to remove from
ax = self.ChanTab['WavesFigure'].figure.axes[0]
for k in units2Merge[1:]:
# remove the tabs
for tabIndx in range(self.ChanTab['UnitTabsWidget'].count()):
if str(self.ChanTab['UnitTabsWidget'].tabText(tabIndx)) == k:
self.ChanTab['UnitTabsWidget'].removeTab(tabIndx)
# remove unit figure
self.ChanTab['UnitFigures'][k].figure.clear()
self.ChanTab['UnitFigures'][k].close()
self.ChanTab['UnitFigures'].pop(k, 0)
# removes the unitname from the what2 plot list
for n in range(self.What2Plot.count()):
if self.What2Plot.itemText(n) == k:
self.What2Plot.removeItem(n)
# eliminate the raw waveforms from the plot
for line in ax.lines:
if k in line.get_label():
line.remove()
# remove the unit from the list
unitNo = int(re.search('[0-9]{2}', k).group())
self.UnitsList.remove(unitNo)
# update the information in the overview table
#self.OverviewTab2['OverviewTable'].takeItem(self.ChansList.index(self.CurChan),
# unitNo+4)
# redraw the waveforms figure
self.ChanTab['WavesFigure'].figure.canvas.draw()
# replot features
self.PlotFeatures()
# add the merged unit to the table
self.UnitsTable_AddUnit(units2Merge[0])
#__________________________________________________________________________
def CallMoveUnits_proc(self):
if not self.H5FileLoaded:
return
self.MoveUnitsWidget.list.clear()
unitsList = [k for k in self.CurNode.__members__ if 'Unit' in k]
unitsList.sort()
self.MoveUnitsWidget.list.addItems(unitsList)
self.MoveUnitsWidget.show()
#__________________________________________________________________________
def MoveUnits_proc(self):
# first get the needed changes
old = []
new = []
for k in range(self.MoveUnitsWidget.list.count()):
if 'Unit%02d' % k != str(self.MoveUnitsWidget.list.item(k).text()):
old.append(str(self.MoveUnitsWidget.list.item(k).text()))
new.append('Unit%02d' % k)
# in case no changes are needed
if len(old) == 0:
return
# RENAME ALL THE UNITS AND GRAPHICAL ELEMENTS TO "_tmp" #####
for k in self.CurNode.__members__:
if 'Unit' in k:
# rename the nodes
self.h5file.renameNode(self.CurNodeName, name=k, newname=k + '_tmp')
for key in ['UnitFigures', 'UnitCountLabel', 'DelUnitBtns',
'PlotRawCheck', 'UnitBtns', 'isMultiunitCheck']:
self.ChanTab[key][k + '_tmp'] = self.ChanTab[key][k]
self.ChanTab[key].pop(k, 0) # remove
for j, k in zip(old, new):
self.ChangeUnitName_proc(j + '_tmp', k)
# move everything back
for k in self.CurNode.__members__:
if '_tmp' in k:
if k.replace('_tmp', '') in self.CurNode.__members__:
self.h5file.remove_node(self.CurNodeName, name=k)
for key in ['UnitFigures', 'UnitCountLabel', 'DelUnitBtns',
'PlotRawCheck', 'UnitBtns', 'isMultiunitCheck']:
self.ChanTab[key].deleteLater()
self.ChanTab[key].pop(k, 0)
else:
self.h5file.renameNode(self.CurNodeName, name=k, newname=k.replace('_tmp', ''))
for key in ['UnitFigures', 'UnitCountLabel', 'DelUnitBtns',
'PlotRawCheck', 'UnitBtns', 'isMultiunitCheck']:
self.ChanTab[key][k.replace('_tmp', '')] = self.ChanTab[key][k]
self.ChanTab[key].pop(k, 0)
# save changes to disk
self.h5file.flush()
#__________________________________________________________________________
def ChangeUnitName_proc(self, oldName, newName):
# rename node
self.h5file.renameNode(self.CurNodeName, name=oldName, newname=newName,
overwrite=True)
# get the unit numbers from the names
oldUnitNo = int(re.search('[0-9]{2}', oldName).group())
newUnitNo = int(re.search('[0-9]{2}', newName).group())
# move the tab and change its name
self.ChanTab['UnitTabBarWidget'].setTabText(newUnitNo, newName)
self.ChanTab['UnitTabBarWidget'].moveTab(oldUnitNo, newUnitNo)
for key in ['UnitFigures', 'UnitCountLabel', 'DelUnitBtns',
'PlotRawCheck', 'UnitBtns', 'isMultiunitCheck']:
self.ChanTab[key][newName] = self.ChanTab[key][oldName]
self.ChanTab[key][newName].setObjectName(newName)
self.ChanTab[key].pop(oldName, 0)
# change color of the unit
self.ChangeUnitColor_proc(newName, color=255 * self.UnitColors[newUnitNo])
#__________________________________________________________________________
def CleanWavesFigure_proc(self):
self.ChanTab['WavesFigure'].figure.canvas.draw()
#__________________________________________________________________________
def UnitsTable_AddRow(self):
self.CurUnit = self.ChanTab['UnitTabsWidget'].currentIndex()
self.CurUnitName = self.ChanTab['UnitTabsWidget'].tabText(self.CurUnit)
#__________________________________________________________________________
def DelUnit_proc(self):
if not self.H5FileLoaded or not self.ChanPlotted:
return
# get sender
sender = self.sender()
# get unit name and number
unitName = str(sender.objectName())
unitNo = int(re.search('(?<=Unit)[0-9]{2}', unitName).group())
# remove the unit from the list
self.UnitsList.remove(unitNo)
# get the indexes of the unit
indx = self.h5file.get_node(self.CurNodeName, unitName).Indx.read()
# get unsorted, append the indexes from the unit and update that
# to the h5file
self.Unsorted = self.h5file.get_node(self.CurNodeName, 'Unsorted').read()
self.Unsorted = np.append(self.Unsorted, indx)
self.Unsorted.sort()
self.h5file.remove_node(self.CurNodeName, 'Unsorted')
self.h5file.remove_node(self.CurNodeName, unitName, recursive=True)
self.h5file.create_array(self.CurNodeName, 'Unsorted', self.Unsorted)
# add log
self.AddLog('%s %s deleted' % (self.CurNodeName, unitName))
# remove the tab
for tabIndx in range(self.ChanTab['UnitTabsWidget'].count()):
if str(self.ChanTab['UnitTabsWidget'].tabText(tabIndx)) == unitName:
break
self.ChanTab['UnitTabsWidget'].removeTab(tabIndx)
# close and remove unit figure
plt.close(self.ChanTab['UnitFigures'][unitName].figure)
self.ChanTab['UnitFigures'].pop(unitName, 0)
# removes the unitname from the what2 plot list
for n in range(self.What2Plot.count()):
if self.What2Plot.itemText(n) == unitName:
self.What2Plot.removeItem(n)
# update the information in the overview table
self.OverviewTab2['OverviewTable'].takeItem(self.ChansList.index(self.CurChan),
unitNo + 4)
# eliminate the raw waveforms from the plot
ax = self.ChanTab['WavesFigure'].figure.axes[0]
for line in ax.lines:
if unitName in line.get_label():
line.remove()
break
# redraw the waveforms figure
self.ChanTab['WavesFigure'].figure.canvas.draw()
# replot features
self.PlotFeatures()
#__________________________________________________________________________
def ChangeUnitColor_proc(self, unitName=None, color=None):
''' Change unit color utility function
inputs:
unitName : string containing the unit name
color : must be a four element RGB tuple from 0 to 255, for example,
the output of getRgB() output from a Qt Color instance.
The fourth element is the alpha (usually = to 255)'''
if unitName in [None, False]:
sender = self.sender()
unitName = str(sender.text()).replace(' ', '')
unitNo = int(re.search('[0-9]{1,3}', unitName).group())
if not np.any(color):
c = QtGui.QColorDialog()
color = c.getColor(sender.palette().color(1))
if not color.isValid():
return
if isinstance(color, QtGui.QColor):
qtColor = color
else:
qtColor = QtGui.QColor(*color)
mplColor = np.array(qtColor.getRgb()[0:3]) / 255.0
if isinstance(self.sender(), QtGui.QPushButton) and \
'Unit' in self.sender().text():
self.sender().setStyleSheet('QPushButton {background: rgb%s}' % str(qtColor.getRgb()[0:3]))
self.UnitColors[unitNo, :] = mplColor
# get the figure with a name equal to the current unit
ax = self.ChanTab['UnitFigures'][unitName].figure.axes
# iterate over axes to change colors
for k in ax:
for j in k.lines:
j.set_color(mplColor)
for j in k.collections:
j.set_color(mplColor)
for j in k.patches:
j.set_color(mplColor)
# search a figure with a specific name
self.ChanTab['UnitFigures'][unitName].figure.canvas.draw()
# change the color of the raw waveforms
for k in self.ChanTab['WavesFigure'].figure.axes[0].lines:
if re.search('Unit%02d' % unitNo, str(k.get_label())):
k.set_color(mplColor)
self.ChanTab['WavesFigure'].figure.canvas.draw()
# change the color in the features plot
for k in self.ChanTab['FeaturesFig'].figure.axes[0].lines:
if re.search('Unit%02d' % unitNo, str(k.get_label())):
k.set_color(mplColor)
self.ChanTab['FeaturesFig'].figure.canvas.draw()
self.ChanTab['UnitTabsWidget'].tabBar().setTabTextColor(unitNo, qtColor)
#__________________________________________________________________________
def ResetChannelTab_proc(self):
''' reset the units tab'''
self.NUnits = 0
# clear the unit figures
for k in self.ChanTab['UnitFigures']:
plt.close(self.ChanTab['UnitFigures'][k].figure)
self.ChanTab['UnitFigures'] = {}
# clean the button dictionaries
for key in ['DelUnitBtns', 'UnitCountLabel', 'UnitBtns', 'PlotRawCheck', 'isMultiunitCheck']:
for k in self.ChanTab[key].keys():
self.ChanTab[key][k].deleteLater()
self.ChanTab[key] = {}
# Reset WavesFigure canvas
ax = self.ChanTab['WavesFigure'].figure.axes[0]
ax.cla()
self.SampleWaveform, = ax.plot([], color=[.5, .5, .5], lw=2,
animated=True)
ax.set_ylim(-1000, 1000)
ax.set_xlim(-2, self.WfSize + 1)
ax.tick_params(color=[.5, .5, .5], labelcolor=[.5, .5, .5])
for k in ax.spines.values():
k.set_edgecolor([.5, .5, .5])
self.Slice1Ln = ax.axvline(0, color=[.5, .5, .5])
self.Slice2Ln = ax.axvline(0, color=[.5, .5, .5], linestyle='--')
ax.grid(color=[.5, .5, .5])
self.ChanTab['WavesFigure'].figure.tight_layout()
self.ChanTab['WavesFigure'].figure.canvas.draw()
# clean the 3d widget
for k in self.Widget3d.items:
self.Widget3d.removeItem(k)
#self.Fig3d.clf()
# set the current indexes of the X and Y variable-selecting comboboxes
self.XPlot.setCurrentIndex(0)
self.YPlot.setCurrentIndex(1)
self.ZPlot.setCurrentIndex(2)
# reset Units list
self.UnitsList = []
# reset the units tabbed widget
tabs = range(self.ChanTab['UnitTabsWidget'].count())
tabs.reverse()
if len(tabs) > 0:
for k in tabs:
self.ChanTab['UnitTabsWidget'].removeTab(k)
# reset the time scroll widget and axes
self.TimeScroll['VZoom'].setValue(1000)
self.TimeScroll['HZoom'].setValue(500)
self.TimeScroll['HScroll'].setValue(0)
self.TimeScroll['Figure'].figure.axes[0].cla()
self.TimeScroll['Ax'].set_xticklabels([])
self.TimeScroll['Ax'].set_yticklabels([])
self.TimeScroll['Figure'].figure.canvas.draw()
# reset label
self.nPtsLabel.setText('')
# reset the features figure:
self.ChanTab['FeaturesFig'].figure.clf()
self.ChanTab['FeaturesFig'].figure.canvas.draw()
# delete KDTree object
if hasattr(self, 'XYData'):
del self.XYData
if hasattr(self, 'CurWaveforms'):
del self.CurWaveforms
if hasattr(self, 'CurTs'):
del self.CurTs
# remove the PCA from the dictionarys
self.ChanTab.pop('PCA', 0)
# reset the channel tab name
self.MainFigTab.setTabText(2, 'Channel Tab')
#__________________________________________________________________________
def SliceDraw(self):
sender = self.sender()
fig = self.ChanTab['WavesFigure'].figure
ax = fig.axes[0]
if sender.objectName() == 'Slice1':
self.Slice1Ln.set_xdata(sender.value())
elif sender.objectName() == 'Slice2':
self.Slice2Ln.set_xdata(sender.value())
ax.draw_artist(ax.patch)
for k in ax.get_lines():
ax.draw_artist(k)
for k in ax.get_xgridlines():
ax.draw_artist(k)
for k in ax.get_ygridlines():
ax.draw_artist(k)
ax.draw_artist(ax.spines['top'])
ax.draw_artist(ax.spines['left'])
fig.canvas.update()
fig.canvas.flush_events()
#self.ChanTab['WavesFigBG'] = fig.canvas.copy_from_bbox(ax.bbox)
#__________________________________________________________________________
def ChangeCurrentUnit_proc(self):
'''set the current unit'''
self.CurUnit = self.ChanTab['UnitTabsWidget'].currentIndex()
self.CurUnitName = str(self.ChanTab['UnitTabsWidget'].tabText(self.CurUnit))
for k in range(self.What2Plot.count()):
if str(self.What2Plot.itemText(k)) == self.CurUnitName:
self.What2Plot.setCurrentIndex(k)
break
#__________________________________________________________________________
def MainFigTabProc(self):
'''Change the toolbar tab acording to the selected view'''
curtab = self.MainFigTab.currentIndex()
curtabname = str(self.MainFigTab.tabText(curtab))
if curtabname == 'Channels Overview' or curtabname == 'Summary Table':
self.ToolsTab.setCurrentIndex(0)
elif re.search('Chan [0-9]{1,2}', curtabname):
self.ToolsTab.setCurrentIndex(1)
#__________________________________________________________________________
def closeEvent(self, *event):
''' reimplementation of the closeEvent that closes the h5file before killing the window'''
if self.H5FileLoaded:
self.h5file.close()
self.deleteLater()
#==============================================================================
if __name__ == '__main__':
if not QtGui.QApplication.instance():
app = QtGui.QApplication(sys.argv)
else:
app = QtGui.QApplication.instance()
spikesorter = SpikeSorter()
#sys.exit(app.exec_())
| [
"[email protected]"
] | |
420dbb78c75611b7d1e8bd3addfb16fb0a70735f | c08b5c091f40eed4c6ded8a5ecfad5105f57d6d3 | /users/migrations/0001_initial.py | 8e01acce9a47f448025659bda39472937f1830c0 | [] | no_license | fofochi1/book_project | e0f5e1d1b390261d0853d7fc2fc6a5d4febca27e | 3a8cff45743fac1b0b5c90c04783d40b5c9bd8a9 | refs/heads/main | 2023-04-22T00:57:37.566751 | 2021-05-12T20:03:50 | 2021-05-12T20:14:27 | 366,843,151 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 778 | py | # Generated by Django 3.2 on 2021-05-03 21:01
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Profile',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(default='default.jpg', upload_to='profile_pics')),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
| [
"[email protected]"
] | |
648959589e6ed0862ce63ccc8803fe68dd14dc8b | 6518c74441a68fc99b2b08423b5ea11480806499 | /mlflow/entities/model_registry/model_version.py | 51f05ad4b0f376817479b3c2724d0c4345121abb | [
"Apache-2.0"
] | permissive | criteo-forks/mlflow | da58e64d09700623810da63999a1aca81b435b90 | 499284d8dc9e9ec79d8d9dbd03c58d162a2b7eaa | refs/heads/master | 2023-04-14T17:59:29.997458 | 2022-01-11T09:50:26 | 2022-01-11T09:50:26 | 191,391,769 | 5 | 4 | Apache-2.0 | 2023-04-07T15:16:20 | 2019-06-11T14:44:00 | Python | UTF-8 | Python | false | false | 5,635 | py | from mlflow.entities.model_registry._model_registry_entity import _ModelRegistryEntity
from mlflow.entities.model_registry.model_version_tag import ModelVersionTag
from mlflow.entities.model_registry.model_version_status import ModelVersionStatus
from mlflow.protos.model_registry_pb2 import (
ModelVersion as ProtoModelVersion,
ModelVersionTag as ProtoModelVersionTag,
)
class ModelVersion(_ModelRegistryEntity):
"""
MLflow entity for Model Version.
"""
def __init__(
self,
name,
version,
creation_timestamp,
last_updated_timestamp=None,
description=None,
user_id=None,
current_stage=None,
source=None,
run_id=None,
status=ModelVersionStatus.to_string(ModelVersionStatus.READY),
status_message=None,
tags=None,
run_link=None,
):
super().__init__()
self._name = name
self._version = version
self._creation_time = creation_timestamp
self._last_updated_timestamp = last_updated_timestamp
self._description = description
self._user_id = user_id
self._current_stage = current_stage
self._source = source
self._run_id = run_id
self._run_link = run_link
self._status = status
self._status_message = status_message
self._tags = {tag.key: tag.value for tag in (tags or [])}
@property
def name(self):
"""String. Unique name within Model Registry."""
return self._name
@property
def version(self):
"""version"""
return self._version
@property
def creation_timestamp(self):
"""Integer. Model version creation timestamp (milliseconds since the Unix epoch)."""
return self._creation_time
@property
def last_updated_timestamp(self):
"""Integer. Timestamp of last update for this model version (milliseconds since the Unix
epoch)."""
return self._last_updated_timestamp
@property
def description(self):
"""String. Description"""
return self._description
@property
def user_id(self):
"""String. User ID that created this model version."""
return self._user_id
@property
def current_stage(self):
"""String. Current stage of this model version."""
return self._current_stage
@property
def source(self):
"""String. Source path for the model."""
return self._source
@property
def run_id(self):
"""String. MLflow run ID that generated this model."""
return self._run_id
@property
def run_link(self):
"""String. MLflow run link referring to the exact run that generated this model version."""
return self._run_link
@property
def status(self):
"""String. Current Model Registry status for this model."""
return self._status
@property
def status_message(self):
"""String. Descriptive message for error status conditions."""
return self._status_message
@property
def tags(self):
"""Dictionary of tag key (string) -> tag value for the current model version."""
return self._tags
@classmethod
def _properties(cls):
# aggregate with base class properties since cls.__dict__ does not do it automatically
return sorted(cls._get_properties_helper())
def _add_tag(self, tag):
self._tags[tag.key] = tag.value
# proto mappers
@classmethod
def from_proto(cls, proto):
# input: mlflow.protos.model_registry_pb2.ModelVersion
# returns: ModelVersion entity
model_version = cls(
proto.name,
proto.version,
proto.creation_timestamp,
proto.last_updated_timestamp,
proto.description,
proto.user_id,
proto.current_stage,
proto.source,
proto.run_id,
ModelVersionStatus.to_string(proto.status),
proto.status_message,
run_link=proto.run_link,
)
for tag in proto.tags:
model_version._add_tag(ModelVersionTag.from_proto(tag))
return model_version
def to_proto(self):
# input: ModelVersion entity
# returns mlflow.protos.model_registry_pb2.ModelVersion
model_version = ProtoModelVersion()
model_version.name = self.name
model_version.version = str(self.version)
model_version.creation_timestamp = self.creation_timestamp
if self.last_updated_timestamp is not None:
model_version.last_updated_timestamp = self.last_updated_timestamp
if self.description is not None:
model_version.description = self.description
if self.user_id is not None:
model_version.user_id = self.user_id
if self.current_stage is not None:
model_version.current_stage = self.current_stage
if self.source is not None:
model_version.source = str(self.source)
if self.run_id is not None:
model_version.run_id = str(self.run_id)
if self.run_link is not None:
model_version.run_link = str(self.run_link)
if self.status is not None:
model_version.status = ModelVersionStatus.from_string(self.status)
if self.status_message:
model_version.status_message = self.status_message
model_version.tags.extend(
[ProtoModelVersionTag(key=key, value=value) for key, value in self._tags.items()]
)
return model_version
| [
"[email protected]"
] | |
06df3ab7ee3b32c6ad05329bec7d6a3373fe7fc5 | 7fa90c8825c16b07309e295b003fe3173eae3d73 | /(N^K).py | 4ed6c01e914dc71e51476737a07dbbc74e733a00 | [] | no_license | chanduvenkyteju/pythonprogramming | e3e86309af0819e70a3f2af1c7b8cf524274d978 | 7ee742a205d8ae25b1d96f9cf5d1416d5460672a | refs/heads/master | 2021-06-17T21:41:44.368019 | 2021-02-02T08:54:22 | 2021-02-02T08:54:22 | 145,967,045 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 47 | py | N,K=map(int,(raw_input()).split())
print(N**K)
| [
"[email protected]"
] | |
e09d9ca19abc53c9973aa8bba0419f984ea718e2 | b2c070e09bff49241fcff98bcde825cfa96e93ca | /Coding Club India/Asked Amazon Interview Questions/DeleteMidLL.py | 8a324900c83b3a9a801c9d8e8677d1381ac50b54 | [
"MIT"
] | permissive | Beryl2208/CI-2 | dcb1b923f9c4f1f8b167c36c8b22a80522322c53 | f671292dad2695e37458866442a6b951ba4e1a71 | refs/heads/master | 2022-12-26T19:11:28.559911 | 2020-10-06T06:27:51 | 2020-10-06T06:27:51 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 475 | py | '''
class Node:
def __init__(self, data):
self.data = data
self.next = None
'''
def deleteMid(head):
'''
head: head of given linkedList
return: head of resultant llist
'''
if head == None or head.next == None:
return None
slow = fast = prev = head
while slow and fast and fast.next:
prev = slow
slow = slow.next
fast = fast.next.next
prev.next = slow.next
return head | [
"[email protected]"
] | |
46533526bd0e768c9966f1d4fbb49fbbcd2c703f | 4fb0e63c0170aa6ddac587e8ad367be96702d682 | /bookmarks/account/views.py | 89b26c5387719ad63da597b9ea026082142b05df | [] | no_license | devvourer/bookmarks | 3984d21e42022284c0348a9350b05a12e0755a61 | 9b6dd1600a9813fe5c2e613e0e1de82d4861dcb2 | refs/heads/master | 2023-04-12T22:58:12.803811 | 2021-04-27T12:40:58 | 2021-04-27T12:40:58 | 360,501,787 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,063 | py | from django.shortcuts import render
from django.http import HttpResponse, JsonResponse
from django.contrib.auth import authenticate, login
from django.contrib.auth.decorators import login_required
from .forms import LoginForm, UserRegistrationForm, UserEditForm, ProfileEditForm
from .models import Profile, Contact
from django.contrib import messages
from django.shortcuts import get_object_or_404
from django.contrib.auth.models import User
from django.views.decorators.http import require_POST
from bookmarks.common.decorators import ajax_required
from actions.utils import create_action
from actions.models import Actions
from django.core.paginator import Paginator, PageNotAnInteger, EmptyPage
def user_login(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
cd = form.cleaned_data
user = authenticate(request,
username=cd['username'],
password=cd['password'])
if user is not None:
if user.is_active:
login(request, user)
return HttpResponse('Authenticate successfully')
else:
return HttpResponse('Disabled account')
else:
return HttpResponse('Invalid login')
else:
form = LoginForm()
return render(request, 'account/login.html', {'form': form})
def register(request):
if request.method == 'POST':
user_form = UserRegistrationForm(request.POST)
print(request)
if user_form.is_valid():
new_user = user_form.save(commit=False)
new_user.set_password(user_form.cleaned_data['password'])
new_user.save()
Profile.objects.create(user=new_user)
create_action(new_user, 'has created an account')
return render(request, 'account/register_done.html', {'new_user': new_user})
else:
user_form = UserRegistrationForm()
return render(request, 'account/register.html', {'user_form': user_form})
@login_required
def edit(request):
if request.method == 'POST':
user_form = UserEditForm(instance=request.user, data=request.POST)
profile_form = ProfileEditForm(instance=request.user.profile, data=request.POST, files=request.FILES)
if user_form.is_valid() and profile_form.is_valid():
user_form.save()
profile_form.save()
messages.success(request, 'Profile updated successfully')
else:
messages.error(request, 'Error updating your profile')
else:
user_form = UserEditForm(instance=request.user)
profile_form = ProfileEditForm(instance=request.user.profile)
return render(request, 'account/edit.html', {'user_form': user_form, 'profile_form': profile_form})
@login_required
def user_list(request):
users = User.objects.filter(is_active=True)
return render(request, 'account/user/list.html', {'section': 'people', 'users': users})
@login_required
def user_detail(request, username):
user = get_object_or_404(User, username=username, is_active=True)
return render(request, 'account/user/detail.html', {'section': 'people', 'user': user})
@ajax_required
@require_POST
@login_required
def user_follow(request):
user_id = request.POST.get('id')
action = request.POST.get('action')
if user_id and action:
try:
user = User.objects.get(id=user_id)
if action == 'follow':
Contact.objects.get_or_create(user_from=request.user, user_to=user)
create_action(request.user, 'is following', user)
else:
Contact.objects.filter(user_from=request.user, user_to=user).delete()
return JsonResponse({'status': 'ok'})
except User.DoesNotExist:
return JsonResponse({'status': 'ok'})
return JsonResponse({'status': 'ok'})
@login_required
def dashboard(request):
actions = Actions.objects.exclude(user=request.user)
following_ids = request.user.following.values_list('id', flat=True)
if following_ids:
actions = actions.filter(user_id__in=following_ids)
actions = actions.select_related('user', 'user__profile').prefetch_related('target')
paginator = Paginator(actions, 5)
page = request.GET.get('page')
try:
actions = paginator.page(page)
except PageNotAnInteger:
actions = paginator.page(1)
except EmptyPage:
if request.is_ajax():
return HttpResponse('')
actions = paginator.page(paginator.num_pages)
if request.is_ajax():
return render(request, 'actions/action/action_list_ajax.html', {'section': 'dashboard', 'actions': actions})
return render(request, 'account/dashboard.html', {'section': 'dashboard', 'actions': actions})
#
# @login_required
# def action_list(request):
# actions = Actions.objects.exclude(user=request.user)
# following_ids = request.user.following.values_list('id', flat=True)
# | [
"[email protected]"
] | |
310d15eb8903a5656daba033737ea2e55507c9f0 | df20a12d6ab69a4e35b4aa3150650cb499a30125 | /python/pcs_api/oauth/session_managers.py | 1dfe4a2c8221ef48d9a4294194b06d290af6604c | [
"Apache-2.0"
] | permissive | clinthidinger/pcs_api | a46077a473ae49dbbf8889afe4022f2b2ae059c7 | 20691e52e144014f99ca75cb7dedc7ba0c18586c | refs/heads/master | 2021-05-28T19:15:32.512734 | 2015-06-14T06:48:44 | 2015-06-14T06:48:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,075 | py | # -*- coding: utf-8 -*-
#
# Copyright (c) 2014 Netheos (http://www.netheos.net)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import absolute_import, unicode_literals, print_function
import time
import urlparse
import requests
from oauthlib.oauth2 import TokenExpiredError
from requests_oauthlib import OAuth2Session
import logging
import threading
from ..credentials.app_info import AppInfo
from ..credentials.user_credentials import UserCredentials
from ..cexceptions import CStorageError
logger = logging.getLogger(__name__)
class AbstractSessionManager(object):
def __init__(self, user_credentials):
self._user_credentials = user_credentials
def get_session(self):
"""Return a requests session-like object suitable to issue authenticated http requests to provider"""
raise NotImplementedError
class BasicAuthSessionManager(AbstractSessionManager):
"""Add http basic authentication header: yandex, ...
Note: this is actually NOT an oauth manager !"""
def __init__(self, user_credentials):
super(BasicAuthSessionManager, self).__init__(user_credentials)
# Some checks:
if user_credentials.user_id is None:
raise ValueError("Undefined user_id in user_credentials")
creds = self._user_credentials.credentials();
if not 'password' in creds:
raise ValueError("User credentials do not contain user password")
def get_session(self):
session = requests.Session()
session.auth = requests.auth.HTTPBasicAuth(self._user_credentials.user_id,
self._user_credentials.credentials().get('password'))
return session
class DigestAuthSessionManager(AbstractSessionManager):
"""Handle http digest authentication: CloudMe, ..."""
def __init__(self, user_credentials):
super(DigestAuthSessionManager, self).__init__(user_credentials)
# Some checks:
if user_credentials.user_id is None:
raise ValueError("Undefined user_id in user_credentials")
creds = self._user_credentials.credentials();
if not 'password' in creds:
raise ValueError("User credentials do not contain user password")
# HTTPDigestAuth objects: in order to avoid double requests each time,
# such objects must survive between requests.
# However they can not be shared between threads, so we keep a cache of them for each thread:
self._digests_auth = threading.local()
def get_session(self):
session = requests.Session()
try:
digest_auth = self._digests_auth.digest_auth
except:
digest_auth = requests.auth.HTTPDigestAuth(self._user_credentials.user_id,
self._user_credentials.credentials().get('password'))
self._digests_auth.digest_auth = digest_auth
session.auth = digest_auth
return session
class OAuth2SessionManager(AbstractSessionManager):
"""OAuth2 authorization manager (used by many providers)
"""
def __init__(self, oauth2_provider_params, app_info,
user_credentials_repository=None,
user_credentials=None):
super(OAuth2SessionManager, self).__init__(user_credentials)
self._oauth2_provider_params = oauth2_provider_params
self._app_info = app_info
self._user_credentials_repository = user_credentials_repository
self._refresh_lock = threading.RLock()
# Some checks if we already have user_credentials:
if user_credentials is not None:
creds = self._user_credentials.credentials()
if not 'access_token' in creds:
raise ValueError("User credentials do not contain any access token")
def get_authorize_url(self):
oauth = OAuth2Session(client_id=self._app_info.app_id,
redirect_uri=self._app_info.redirect_url,
scope=self._oauth2_provider_params.scope_for_authorization(self._app_info.scope))
url, state = oauth.authorization_url(self._oauth2_provider_params.authorize_url)
return url, state
def fetch_user_credentials(self, code_or_url, state):
"""This is for bootstrapping Oauth2 and getting an initial refresh token."""
oauth = OAuth2Session(client_id=self._app_info.app_id,
redirect_uri=self._app_info.redirect_url,
scope=self._oauth2_provider_params.scope_for_authorization(self._app_info.scope),
state=state)
if code_or_url.startswith('http://') or code_or_url.startswith('https://'):
# It is an URL:
url = code_or_url
code = None
# URL may contain granted scope:
query = urlparse.urlparse(url).query
params = dict(urlparse.parse_qsl(query))
granted_scope_str = params.get('scope', None)
if granted_scope_str is not None:
#logger.debug("granted scope str= %s", granted_scope_str)
granted_scope = self._oauth2_provider_params.granted_scope(granted_scope_str)
logger.debug("granted scope = %s", granted_scope)
else:
# It is a code:
url = None
code = code_or_url
token = oauth.fetch_token(self._oauth2_provider_params.access_token_url,
authorization_response=url,
code=code,
client_secret=self._app_info.app_secret)
if self._user_credentials is None:
self._user_credentials = UserCredentials(self._app_info, None, token)
else:
self._user_credentials.set_new_credentials(token)
return self._user_credentials
def do_request(self, *args, **kwargs):
already_refreshed_token = False
while True:
# We always take a new session: required to get fresh access_token
session = OAuth2Session(client_id=self._app_info.app_id,
token=self._user_credentials.credentials())
try:
return session.request(*args, **kwargs)
except TokenExpiredError as tee:
# If we didn't try already, get a new access_token, we'll refresh it.
# this may be a no-op if another thread has just done the same thing:
if not already_refreshed_token:
logger.debug('Expired access_token: will refresh')
self.refresh_token()
already_refreshed_token = True
# And we'll request again
else:
# We have refreshed already: this is strange
raise CStorageError('Expired token after refresh ? Giving up', tee)
def refresh_token(self):
"""Access tokens are refreshed after expiration (before sending request).
This method refreshes token from the given session and stores new token
in this session manager object.
Method is synchronized so that no two threads will attempt to refresh
at the same time. If a locked thread sees that token has already been
refreshed, no refresh is attempted either.
Not all providers support tokens refresh (ex: Dropbox)."""
if not self._oauth2_provider_params.refresh_token_url:
# Provider does not support token refresh: we are dead
raise CStorageError('Invalid or expired token ; provider does not support token refresh')
current_creds = self._user_credentials.credentials()
with self._refresh_lock:
after_lock_creds = self._user_credentials.credentials()
if after_lock_creds == current_creds:
logger.debug('This thread will actually refresh token: %r', threading.current_thread())
session = OAuth2Session(client_id=self._app_info.app_id,
token=self._user_credentials.credentials())
extra = {'client_id': self._app_info.app_id,
'client_secret': self._app_info.app_secret}
new_token = session.refresh_token(self._oauth2_provider_params.refresh_token_url,
**extra)
self._token_saver(new_token)
else:
logger.debug('Not refreshed token in this thread, already done')
def _token_saver(self, new_token):
"""callback of requests-oauthlib: called when token has been refreshed.
In case no refresh_token has been given by provider, the old one has been kept
by the framework so this method only needs to update and persist given token.
:param new_token: json dictionary containing access_token, etc."""
logger.debug("Will persist refreshed token: %s", new_token)
if 'expires_in' in new_token:
# If token contains expiration time,
# convert relative time to absolute timestamp.
# Used by oauthlib when token will be read again in the future
new_token['expires_at'] = time.time() + int(new_token['expires_in'])
# Update current user credentials:
self._user_credentials.set_new_credentials(new_token)
# And save this information:
self._user_credentials_repository.save(self._user_credentials)
| [
"[email protected]"
] | |
242f93be836d60b8ec00df8a2ef34c03e9bf5c74 | 9f28ddc579e52a0491e599cfcd29c044ef230a2f | /sudoku/classifier/classifier.py | 9bbb53cdca8220b054449f5123634291ccb5c52c | [] | no_license | Blitzliner/sudokuSolver | d596005f13b45af27c8fe66554c73d684920bd99 | f2ef0c01d62b3968a6fd8c7ad6e48442c7d70a50 | refs/heads/master | 2022-04-28T17:19:06.906979 | 2020-04-28T15:11:31 | 2020-04-28T15:11:31 | 259,655,030 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,955 | py | import os
from keras.models import model_from_json
import cv2
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout
from keras.layers import Flatten
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import MaxPooling2D
from keras.utils import np_utils
from keras import backend as K
class DigitClassifier:
def __init__(self):
self._model = None
def load(self, model_dir, model_structure="model.json", model_data="model.h5"):
model_structure_path = os.path.join(model_dir, model_structure)
model_data_path = os.path.join(model_dir, model_structure)
if os.path.isfile(model_structure_path) and os.path.isfile(model_data_path):
with open(model_structure_path, 'r') as file:
json_content = file.read()
self._model = model_from_json(json_content)
self._model.load_weights(os.path.join(model_dir, model_data))
else:
raise FileNotFoundError(F"Model files not found: {model_structure_path} and {model_data_path}")
def train(self, x_train, y_train, x_test, y_test):
K.image_data_format()
# One Hot encode outputs
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
num_classes = y_test.shape[1]
# Create model
model = Sequential()
model.add(Conv2D(32, (5, 5), input_shape=(1, 28, 28), activation='relu', data_format='channels_first'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Conv2D(16, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.2))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dense(64, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
model.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=10, batch_size=200)
self._model = model
def save(self, model_dir="model", model_structure="model.json", model_data="model.h5"):
model_structure_path = os.path.join(model_dir, model_structure)
model_data_path = os.path.join(model_dir, model_data)
model_json = self._model.to_json()
with open(model_structure_path, "w") as json_file:
json_file.write(model_json) # serialize model to JSON
self._model.save_weights(model_data_path) # serialize weights to HDF5
def get_error(self, x_test, y_test):
if self._model is not None:
scores = self._model.evaluate(x_test, y_test, verbose=0)
return 100 - scores[1] * 100
else:
raise Exception("Model does not exist. Please load a model first.")
return 0
def predict(self, image):
if self._model is not None:
resized = cv2.resize(image, (28, 28))
reshaped = resized.reshape(1, 1, 28, 28)
prediction = self._model.predict_classes(reshaped, verbose=0)
return prediction[0]
else:
raise Exception("Model does not exist. Please load a model first.")
return 0
if __name__ == '__main__':
import data
model_dir = "models"
x_train, y_train, x_test, y_test = data.get_all_data()
#x_train, y_train, x_test, y_test = get_all_data()
print(F"train/test shape: {x_train.shape}/{x_test.shape}")
classifier = DigitClassifier()
train = True
if train:
classifier.train(x_train, y_train, x_test, y_test)
classifier.save(model_dir)
else:
classifier.load(model_dir)
for idx in range(10):
image = x_test[idx][0]
result = classifier.predict(image)
print(F"Prediction is: {result}")
cv2.imshow("test image", image)
cv2.waitKey(0)
| [
"[email protected]"
] | |
9e5c388fb096b78adf854e6c99a848f76cb54e7e | 580e92fe97ccf6fac70e483230429d6aaf31edc4 | /constellation.py | cc419d942b3ef0f2123de7b1f24fba61606918bd | [] | no_license | mitooos/orion-constellation | c9aa4fd751eb38e9893669075a485ce91bd80c09 | b0bed824db676f041435a517210b8ae61f44d475 | refs/heads/master | 2020-03-21T06:29:15.311245 | 2018-06-21T21:38:58 | 2018-06-21T21:38:58 | 138,223,704 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,474 | py |
# coding: utf-8
# ## Project: Visualizing the Orion Constellation
#
# In this project you are Dr. Jillian Bellovary, a real-life astronomer for the Hayden Planetarium at the American Museum of Natural History. As an astronomer, part of your job is to study the stars. You've recently become interested in the constellation Orion, a collection of stars that appear in our night sky and form the shape of [Orion](https://en.wikipedia.org/wiki/Orion_(constellation)), a warrior God from ancient Greek mythology.
#
# As a researcher on the Hayden Planetarium team, you are in charge of visualizing the Orion constellation in 3D using the Matplotlib function `.scatter()`. To learn more about the `.scatter()` you can see the Matplotlib documentation [here](https://matplotlib.org/api/_as_gen/matplotlib.pyplot.scatter.html).
#
# You will create a rotate-able visualization of the position of the Orion's stars and get a better sense of their actual positions. To achieve this, you will be mapping real data from outer space that maps the position of the stars in the sky
#
# The goal of the project is to understand spatial perspective. Once you visualize Orion in both 2D and 3D, you will be able to see the difference in the constellation shape humans see from earth versus the actual position of the stars that make up this constellation.
#
# <img src="https://upload.wikimedia.org/wikipedia/commons/9/91/Orion_constellation_with_star_labels.jpg" alt="Orion" style="width: 400px;"/>
#
#
# ## 1. Set-Up
# The following set-up is new and specific to the project. It is very similar to the way you have imported Matplotlib in previous lessons.
#
# + Add `%matplotlib notebook` in the cell below. This is a new statement that you may not have seen before. It will allow you to be able to rotate your visualization in this jupyter notebook.
#
# + We will be using a subset of Matplotlib: `matplotlib.pyplot`. Import the subset as you have been importing it in previous lessons: `from matplotlib import pyplot as plt`
#
#
# + In order to see our 3D visualization, we also need to add this new line after we import Matplotlib:
# `from mpl_toolkits.mplot3d import Axes3D`
#
# In[22]:
get_ipython().run_line_magic('matplotlib', 'notebook')
from matplotlib import pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
# ## 2. Get familiar with real data
#
# Astronomers describe a star's position in the sky by using a pair of angles: declination and right ascension. Declination is similar to longitude, but it is projected on the celestian fear. Right ascension is known as the "hour angle" because it accounts for time of day and earth's rotaiton. Both angles are relative to the celestial equator. You can learn more about star position [here](https://en.wikipedia.org/wiki/Star_position).
#
# The `x`, `y`, and `z` lists below are composed of the x, y, z coordinates for each star in the collection of stars that make up the Orion constellation as documented in a paper by Nottingham Trent Univesity on "The Orion constellation as an installation" found [here](https://arxiv.org/ftp/arxiv/papers/1110/1110.3469.pdf).
#
# Spend some time looking at `x`, `y`, and `z`, does each fall within a range?
# In[23]:
# Orion
x = [-0.41, 0.57, 0.07, 0.00, -0.29, -0.32,-0.50,-0.23, -0.23]
y = [4.12, 7.71, 2.36, 9.10, 13.35, 8.13, 7.19, 13.25,13.43]
z = [2.06, 0.84, 1.56, 2.07, 2.36, 1.72, 0.66, 1.25,1.38]
# ## 3. Create a 2D Visualization
#
# Before we visualize the stars in 3D, let's get a sense of what they look like in 2D.
#
# Create a figure for the 2d plot and save it to a variable name `fig`. (hint: `plt.figure()`)
#
# Add your subplot `.add_subplot()` as the single subplot, with `1,1,1`.(hint: `add_subplot(1,1,1)`)
#
# Use the scatter [function](https://matplotlib.org/api/_as_gen/matplotlib.pyplot.scatter.html) to visualize your `x` and `y` coordinates. (hint: `.scatter(x,y)`)
#
# Render your visualization. (hint: `plt.show()`)
#
# Does the 2D visualization look like the Orion constellation we see in the night sky? Do you recognize its shape in 2D? There is a curve to the sky, and this is a flat visualization, but we will visualize it in 3D in the next step to get a better sense of the actual star positions.
# In[24]:
fig = plt.figure()
fig.add_subplot(1,1,1)
plt.scatter(x,y, color = 'black')
plt.title('2D Representation Of The Orion Constellation')
plt.xlabel('x Coordinates')
plt.ylabel('y Coordinates')
plt.show()
# ## 4. Create a 3D Visualization
#
# Create a figure for the 3D plot and save it to a variable name `fig_3d`. (hint: `plt.figure()`)
#
#
# Since this will be a 3D projection, we want to make to tell Matplotlib this will be a 3D plot.
#
# To add a 3D projection, you must include a the projection argument. It would look like this:
# ```py
# projection="3d"
# ```
#
# Add your subplot with `.add_subplot()` as the single subplot `1,1,1` and specify your `projection` as `3d`:
#
# `fig_3d.add_subplot(1,1,1,projection="3d")`)
#
# Since this visualization will be in 3D, we will need our third dimension. In this case, our `z` coordinate.
#
# Create a new variable `constellation3d` and call the scatter [function](https://matplotlib.org/api/_as_gen/matplotlib.pyplot.scatter.html) with your `x`, `y` and `z` coordinates.
#
# Include `z` just as you have been including the other two axes. (hint: `.scatter(x,y,z)`)
#
# Render your visualization. (hint `plt.show()`.)
#
# In[25]:
fig_3d = plt.figure()
fig_3d.add_subplot(1,1,1,projection='3d')
constellation3d = plt.scatter(x,y,z)
plt.title('Orion Constellation 3D Representation')
plt.show()
# ## 5. Rotate and explore
#
# Use your mouse to click and drag the 3D visualization in the previous step. This will rotate the scatter plot. As you rotate, can you see Orion from different angles?
#
# Note: The on and off button that appears above the 3D scatter plot allows you to toggle rotation of your 3D visualization in your notebook.
#
# Take your time, rotate around! Remember, this will never look exactly like the Orion we see from Earth. The visualization does not curve as the night sky does.
# There is beauty in the new understanding of Earthly perspective! We see the shape of the warrior Orion because of Earth's location in the universe and the location of the stars in that constellation.
#
# Feel free to map more stars by looking up other celestial x, y, z coordinates [here](http://www.stellar-database.com/).
#
| [
"[email protected]"
] | |
417726b5fa7e74d09c1f9cb39aada7b694389d42 | 0170a7ddf23644ab0eb071356609cd894b5d6c80 | /structure.py | 226ce3d0b708331160c524dfcb4a1eb8b6e6b29c | [] | no_license | jakobgager/Exp_Struct | 07d2a8935cb50aaeb23e7f759539ae2797058a1e | 7a8e6ba1d0f57b9f6398a4a2d788f13fdb91a672 | refs/heads/master | 2021-01-10T21:01:50.306877 | 2013-05-17T22:31:18 | 2013-05-17T22:31:18 | 10,111,090 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,240 | py | #! /usr/bin/env python
from math import sqrt, ceil
def fbd_template(tempdict):
return """
# Crosssection
Pnt P1 -{l2} -{w2} -{h2}
Pnt P2 -{l2} -{w2} {PC1z}
Pnt P3 -{l2} -{PC1y} {h2}
Pnt PC1 -{l2} -{PC1y} {PC1z}
Pnt P4 -{l2} 0 {h2}
Line L1 P1 P2 {L1div}
Line L2 P2 P3 PC1 {L2div}
Line L3 P3 P4 {L3div}
Seta CS1 l L1 L2 L3
# Flaeche 1
Seto Struct1
Swep CS1 CS2 tra {la} 0 0 {ladiv}
Setc Struct1
# Flaeche 2
Seto Struct2
Swep CS2 CS3 tra {lcl} 0 0 {lcldiv}
Setc Struct2
# Flaeche 3
Seto Struct3
Pnt P5 0 -{w2} {P5z}
Pnt PC2 0 -{w2} {PC2z}
Seta CS3a l L00B L00A
Swep CS3a CS4a tra {wc} 0 0 {wcdiv}
Line L4 D006 P5 PC2 {wcdiv}
Line L5 P5 D00B {L5div}
Surf S1 L4 L5 L00J L009
Setc Struct3
Seta struct_all se Struct1 Struct2 Struct3
# Load block
Seto block1
Pnt P8 -{l2} 0 -{h2}
Pnt P11 -{l2} 0 {PC1z}
Line L6 P1 P8 {L6div}
Line L7 P8 P11 {L1div}
Line L9 P11 P4 {L2div}
Line L11 P2 P11 {L6div}
Surf S2 L1 L11 L7 L6
Surf S3 L11 L2 L3 L9
Swep block1 temp tra {la} 0 0 {ladiv}
Setc block1
# Load block 2
Seto block2
Pnt P9 -{P9x} 0 {h2}
Pnt P10 -{P9x} 0 {P10z}
Line L8 P9 P10 2
Swep block2 temp1 tra 0 -{w2} 0 {L6div}
Swep block2 temp2 tra {wd} 0 0 {wddiv}
Setc block2
# support
Seto support
Pnt P12 {P12x} -{w2e} {P12z}
Pnt P12c -{ls2} -{w2e} -{h2}
Pnt P13 {P13x} -{w2e} {P12z}
Pnt PC3 -{ls2} -{w2e} {P12z}
Pnt P14 {P13x} -{w2e} {P14z}
Pnt P15 {P12x} -{w2e} {P14z}
Line L12 P12 P12c PC3 {sdiv}
Line L13 P12c P13 PC3 {sdiv}
Line L14 P13 P14 {sdiv}
Line L15 P14 P15 {sdiv}
Line L16 P15 P12 {sdiv}
Surf S4 L12 L13 L14 L15 L16
Swep support temp tra 0 {w2e} 0 {L6div}
Setc support
Node 1 -{ls2} -{w2} {P12z}
Node 2 -{ls2} -{w2} {P12z}
# peen
Seto peen
Pnt P16 {P16x} -{w2e} {P16z}
Pnt P16c -{lf2} -{w2e} {P10z}
Pnt P17 {P17x} -{w2e} {P16z}
Pnt PC4 -{lf2} -{w2e} {P16z}
Pnt P18 {P17x} -{w2e} {P18z}
Pnt P19 {P16x} -{w2e} {P18z}
Line L17 P16 P16c PC4 {pdiv}
Line L18 P16c P17 PC4 {pdiv}
Line L19 P17 P18 {pdiv}
Line L20 P18 P19 {pdiv}
Line L21 P19 P16 {pdiv}
Surf S5 L17 L18 L19 L20 L21
Swep peen temp tra 0 {w2e} 0 {L6div}
Setc peen
Node 3 -{lf2} -{w2} {P16z}
Node 4 -{lf2} -{w2} {P16z}
# Element types
Elty struct_all qu8
Elty block1 he20
Elty block2 he20
Elty support he8
Elty peen he8
Mesh struct_all
Mesh block1
Merg n all
Mesh block2
Mesh support
Mesh peen
Seta Sym_X l L00I L00H L5
Seta Sym_Y l L00G L00M
Seta Sym_Y s A00F A00G A00K
Comp Sym_X d
Comp Sym_Y d
# Change mesh order to quadratic
#Mids all gen
plot e all
view elem
plus e block1 m
plus e block2 b
plus e struct_all g
plus e support k
plus e peen k
# write mesh
send struct_all abq
send block1 abq
send block2 abq
send support abq
send peen abq
send Sym_X abq names
send Sym_Y abq names
""".format(**tempdict)
def comp_variables():
# compute helpers
globs = {}
execfile('dimensions.py', globs)
# crosssection 1
st = globs['structure']
st['l2'] = st['l']/2.
st['w2'] = st['w']/2.
st['h2'] = st['h']/2.
st['lf2'] = st['lf']/2.
st['PC1z'] = st['h2'] - st['r']
st['PC1y'] = st['w2'] - st['r']
# cut out
st['wc'] = sqrt(st['rc']**2 - st['lc']**2)
st['lcl'] = st['l2'] - st['la'] - st['wc']
st['PC2z'] = -st['h2'] - st['lc']
st['P5z'] = st['PC2z'] + st['rc']
# load block 2
st['P9x'] = st['lf2'] + st['wd']/2.
st['P10z'] = st['h2'] + st['td']
# support
st['ls2'] = st['ls']/2.
st['P12x'] = -st['ls2'] - st['rs']
st['P12z'] = -st['h2'] - st['rs']
st['P13x'] = -st['ls2'] + st['rs']
st['P14z'] = -st['h2'] - 2*st['rs']
st['w2e'] = st['w2']*1.1
# peen
st['lf2'] = st['lf']/2.
st['P16x'] = -st['lf2'] - st['rf']
st['P16z'] = st['h2'] + st['td'] + st['rf']
st['P17x'] = -st['lf2'] + st['rf']
st['P18z'] = st['P16z'] + st['rf']
# linedivisions
st['L1div'] = evenint((st['h']-st['r'])/st['es'])
st['L2div'] = evenint(ceil((st['r']*1.5)/st['es']))
st['L3div'] = evenint((st['w2']-st['r'])/st['es'])
st['ladiv'] = evenint(st['la']/st['es'])
st['lcldiv'] = evenint(st['lcl']/st['es'])
st['wcdiv'] = evenint(st['wc']/st['es'])
st['L5div'] = evenint((st['h2'] - st['P5z'])/st['es'])
st['L6div'] = evenint(st['w2']/st['es'])
st['L7div'] = evenint(st['h']/st['es'])
st['wddiv'] = evenint(st['wd']/st['es'])
st['tddiv'] = evenint(ceil(st['td']/st['es']))
st['sdiv'] = evenint(ceil((st['rs']*1.5)/st['es']))
st['pdiv'] = evenint(ceil((st['rf']*1.5)/st['es']))
return st
def evenint(x):
x = int(x)
if x%2 == 0:
return x
else:
return x+1
def main():
struct = comp_variables()
fbd_data = fbd_template(struct)
with open('exp_struct.fbd','w') as fil:
fil.writelines(fbd_data)
if __name__=='__main__':
main()
| [
"[email protected]"
] | |
baf1cf3480c7a29e01545d0f677b8586e5dcb3ef | d7a68c636e6128533b17975655bd6b46ed222916 | /adapter-transformers-adapters3.1.0/src/transformers/adapters/models/distilbert/adapter_model.py | 4f8c9fa7becdd8743a176268a5067ccb1cabde93 | [
"Apache-2.0"
] | permissive | cambridgeltl/autopeft | 69179f8faf2cc4d2164ff78e544dc3fe2d39c331 | d8ad6bea93aa413a54d0e09fe25bdd62b46cfcf5 | refs/heads/main | 2023-05-23T09:21:59.912941 | 2023-04-25T14:35:31 | 2023-04-25T14:35:31 | 594,316,585 | 26 | 4 | Apache-2.0 | 2023-04-25T14:35:32 | 2023-01-28T06:39:25 | Python | UTF-8 | Python | false | false | 10,915 | py | import warnings
import torch.nn as nn
from ....models.distilbert.modeling_distilbert import (
DISTILBERT_INPUTS_DOCSTRING,
DISTILBERT_START_DOCSTRING,
DistilBertModel,
DistilBertPreTrainedModel,
)
from ....utils import add_start_docstrings, add_start_docstrings_to_model_forward
from ...heads import (
BertStyleMaskedLMHead,
BiaffineParsingHead,
CausalLMHead,
ClassificationHead,
ModelWithFlexibleHeadsAdaptersMixin,
MultiLabelClassificationHead,
MultipleChoiceHead,
QuestionAnsweringHead,
TaggingHead,
)
from ...model_mixin import EmbeddingAdaptersWrapperMixin
@add_start_docstrings(
"""DistilBert Model transformer with the option to add multiple flexible heads on top.""",
DISTILBERT_START_DOCSTRING,
)
class DistilBertAdapterModel(
EmbeddingAdaptersWrapperMixin, ModelWithFlexibleHeadsAdaptersMixin, DistilBertPreTrainedModel
):
def __init__(self, config):
super().__init__(config)
self.distilbert = DistilBertModel(config)
self._init_head_modules()
self.init_weights()
def get_position_embeddings(self) -> nn.Embedding:
"""
Returns the position embeddings
"""
return self.distilbert.get_position_embeddings()
def resize_position_embeddings(self, new_num_position_embeddings: int):
"""
Resizes position embeddings of the model if :obj:`new_num_position_embeddings !=
config.max_position_embeddings`.
Arguments:
new_num_position_embeddings (:obj:`int`):
The number of new position embedding matrix. If position embeddings are learned, increasing the size
will add newly initialized vectors at the end, whereas reducing the size will remove vectors from the
end. If position embeddings are not learned (*e.g.* sinusoidal position embeddings), increasing the
size will add correct vectors at the end following the position encoding algorithm, whereas reducing
the size will remove vectors from the end.
"""
self.distilbert.resize_position_embeddings(new_num_position_embeddings)
@add_start_docstrings_to_model_forward(DISTILBERT_INPUTS_DOCSTRING.format("batch_size, num_choices"))
def forward(
self,
input_ids=None,
attention_mask=None,
head_mask=None,
inputs_embeds=None,
output_attentions=None,
output_hidden_states=None,
return_dict=None,
head=None,
output_adapter_gating_scores=False,
output_adapter_fusion_attentions=False,
**kwargs
):
return_dict = return_dict if return_dict is not None else self.config.use_return_dict
input_ids = input_ids.view(-1, input_ids.size(-1)) if input_ids is not None else None
attention_mask = attention_mask.view(-1, attention_mask.size(-1)) if attention_mask is not None else None
inputs_embeds = (
inputs_embeds.view(-1, inputs_embeds.size(-2), inputs_embeds.size(-1))
if inputs_embeds is not None
else None
)
distilbert_output = self.distilbert(
input_ids=input_ids,
attention_mask=attention_mask,
head_mask=head_mask,
inputs_embeds=inputs_embeds,
output_attentions=output_attentions,
output_hidden_states=output_hidden_states,
return_dict=return_dict,
output_adapter_gating_scores=output_adapter_gating_scores,
output_adapter_fusion_attentions=output_adapter_fusion_attentions,
)
outputs = self.forward_head(
distilbert_output, head_name=head, attention_mask=attention_mask, return_dict=return_dict, **kwargs
)
return outputs
head_types = {
"classification": ClassificationHead,
"multilabel_classification": MultiLabelClassificationHead,
"tagging": TaggingHead,
"multiple_choice": MultipleChoiceHead,
"question_answering": QuestionAnsweringHead,
"dependency_parsing": BiaffineParsingHead,
"masked_lm": BertStyleMaskedLMHead,
"causal_lm": CausalLMHead,
}
def add_classification_head(
self,
head_name,
num_labels=2,
layers=2,
activation_function="tanh",
overwrite_ok=False,
multilabel=False,
id2label=None,
use_pooler=False,
):
"""
Adds a sequence classification head on top of the model.
Args:
head_name (str): The name of the head.
num_labels (int, optional): Number of classification labels. Defaults to 2.
layers (int, optional): Number of layers. Defaults to 2.
activation_function (str, optional): Activation function. Defaults to 'tanh'.
overwrite_ok (bool, optional): Force overwrite if a head with the same name exists. Defaults to False.
multilabel (bool, optional): Enable multilabel classification setup. Defaults to False.
"""
if multilabel:
head = MultiLabelClassificationHead(
self, head_name, num_labels, layers, activation_function, id2label, use_pooler
)
else:
head = ClassificationHead(self, head_name, num_labels, layers, activation_function, id2label, use_pooler)
self.add_prediction_head(head, overwrite_ok)
def add_multiple_choice_head(
self,
head_name,
num_choices=2,
layers=2,
activation_function="tanh",
overwrite_ok=False,
id2label=None,
use_pooler=False,
):
"""
Adds a multiple choice head on top of the model.
Args:
head_name (str): The name of the head.
num_choices (int, optional): Number of choices. Defaults to 2.
layers (int, optional): Number of layers. Defaults to 2.
activation_function (str, optional): Activation function. Defaults to 'tanh'.
overwrite_ok (bool, optional): Force overwrite if a head with the same name exists. Defaults to False.
"""
head = MultipleChoiceHead(self, head_name, num_choices, layers, activation_function, id2label, use_pooler)
self.add_prediction_head(head, overwrite_ok)
def add_tagging_head(
self, head_name, num_labels=2, layers=1, activation_function="tanh", overwrite_ok=False, id2label=None
):
"""
Adds a token classification head on top of the model.
Args:
head_name (str): The name of the head.
num_labels (int, optional): Number of classification labels. Defaults to 2.
layers (int, optional): Number of layers. Defaults to 1.
activation_function (str, optional): Activation function. Defaults to 'tanh'.
overwrite_ok (bool, optional): Force overwrite if a head with the same name exists. Defaults to False.
"""
head = TaggingHead(self, head_name, num_labels, layers, activation_function, id2label)
self.add_prediction_head(head, overwrite_ok)
def add_qa_head(
self, head_name, num_labels=2, layers=1, activation_function="tanh", overwrite_ok=False, id2label=None
):
head = QuestionAnsweringHead(self, head_name, num_labels, layers, activation_function, id2label)
self.add_prediction_head(head, overwrite_ok)
def add_dependency_parsing_head(self, head_name, num_labels=2, overwrite_ok=False, id2label=None):
"""
Adds a biaffine dependency parsing head on top of the model. The parsing head uses the architecture described
in "Is Supervised Syntactic Parsing Beneficial for Language Understanding? An Empirical Investigation" (Glavaš
& Vulić, 2021) (https://arxiv.org/pdf/2008.06788.pdf).
Args:
head_name (str): The name of the head.
num_labels (int, optional): Number of labels. Defaults to 2.
overwrite_ok (bool, optional): Force overwrite if a head with the same name exists. Defaults to False.
id2label (dict, optional): Mapping from label ids to labels. Defaults to None.
"""
head = BiaffineParsingHead(self, head_name, num_labels, id2label)
self.add_prediction_head(head, overwrite_ok)
def add_masked_lm_head(self, head_name, activation_function="gelu", overwrite_ok=False):
"""
Adds a masked language modeling head on top of the model.
Args:
head_name (str): The name of the head.
activation_function (str, optional): Activation function. Defaults to 'gelu'.
overwrite_ok (bool, optional): Force overwrite if a head with the same name exists. Defaults to False.
"""
head = BertStyleMaskedLMHead(self, head_name, activation_function=activation_function)
self.add_prediction_head(head, overwrite_ok=overwrite_ok)
def add_causal_lm_head(self, head_name, activation_function="gelu", overwrite_ok=False):
"""
Adds a causal language modeling head on top of the model.
Args:
head_name (str): The name of the head.
activation_function (str, optional): Activation function. Defaults to 'gelu'.
overwrite_ok (bool, optional): Force overwrite if a head with the same name exists. Defaults to False.
"""
head = CausalLMHead(
self, head_name, layers=2, activation_function=activation_function, layer_norm=True, bias=True
)
self.add_prediction_head(head, overwrite_ok=overwrite_ok)
class DistilBertModelWithHeads(DistilBertAdapterModel):
def __init__(self, *args, **kwargs):
warnings.warn(
"This class has been renamed to `{}` in v3. "
"Please use the new class instead as this class might be removed in a future version.".format(
self.__class__.__bases__[0].__name__
),
FutureWarning,
)
super().__init__(*args, **kwargs)
@classmethod
def from_config(cls, config):
warnings.warn(
"This class has been renamed to `{}` in v3. "
"Please use the new class instead as this class might be removed in a future version.".format(
cls.__bases__[0].__name__
),
FutureWarning,
)
return super().from_config(config)
@classmethod
def from_pretrained(cls, pretrained_model_name_or_path, *model_args, **kwargs):
warnings.warn(
"This class has been renamed to `{}` in v3. "
"Please use the new class instead as this class might be removed in a future version.".format(
cls.__bases__[0].__name__
),
FutureWarning,
)
return super().from_pretrained(pretrained_model_name_or_path, *model_args, **kwargs)
| [
"[email protected]"
] | |
0e8e1d1fa3c2116e867073461282f3f321b8439b | ac1b2a94702f2a46435d11c529dc70145093320d | /snake python/snake.py | 59e5c487cca36390398b0b168d2c1d107441aac3 | [] | no_license | Savagekenya/snake-game---python | b788524bb850ab0be086acb52f3193b70dcaf216 | b411b38030abe8a757cb276b17b6b6b6380741df | refs/heads/main | 2023-08-14T03:43:16.641891 | 2021-09-27T18:02:55 | 2021-09-27T18:02:55 | 410,990,618 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,700 | py | import curses
from random import randint
# setup window
curses.initscr()
win = curses.newwin(20,60,0,0) # y,x
win.keypad(1)
curses.noecho()
curses.curs_set
win.border(0)
win.nodelay(1) # -1
# snake and food
snake = [(4, 10), (4,9), (4,8)]
food = (10,20)
win.addch(food[0], food[1], '#')
# game logic
score = 0
ESC = 27
key = curses.KEY_RIGHT
while key != ESC:
win.addstr(0,2,'Score' +str(score)+' ')
win.timeout(150 - (len(snake)) // 5 + len(snake)//10 % 120) #increase speed
prev_key = key
event = win.getch()
key = event if event != -1 else prev_key
if key not in [curses.KEY_LEFT, curses.KEY_RIGHT, curses.KEY_UP,curses.KEY_DOWN, ESC]:
key = prev_key
y = snake[0][0]
x = snake[0][1]
if key == curses.KEY_DOWN:
y += 1
if key == curses.KEY_UP:
y -=1
if key == curses.KEY_LEFT:
x -=1
if key == curses.KEY_RIGHT:
x += 1
snake.insert(0 ,(y, x)) #append 0(n)
# check if we hit the border
if y == 0: break
if y == 19: break
if x == 0: break
if x == 59: break
# if snake runs over itself
if snake[0] in snake[1:]: break
if snake [0] == food:
# eat the food
score +=1
food = ()
while food == ():
food =(randint(1, 18), randint(1,58))
if food in snake:
food = ()
win.addch(food[0], food[1], '#')
else:
last = snake.pop()
win.addch(last[0], last[1], ' ')
win.addch(snake[0][0], snake[0][1], '*')
curses.endwin()
print(f"Final score = {score}") | [
"[email protected]"
] | |
34c6b5a3e9929c4caefdb3d38794d5be3e66dadb | 7671e76cc3abb76f910eacb9b8782e0cc19b871e | /app/paciente/admin.py | 42170f575e46a591ef8f460741450a64dcf45013 | [] | no_license | bruno-ralmeida/PI_2020.1 | 28b2378efa203945987ae95605efa31b75fcb9a3 | c97e3579c20d5022ff6bc89b617e19d73a78b5b0 | refs/heads/master | 2022-09-22T07:13:09.427756 | 2020-06-01T02:01:00 | 2020-06-01T02:01:00 | 246,153,264 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 311 | py | from django.contrib import admin
from paciente.models import *
class lista_pacientes(admin.ModelAdmin):
list_display = ('id', 'nome', 'sexo', 'data_nascimento', 'cpf', 'rg', 'carteira_convenio', 'peso', 'altura')
list_display_links = ('id', 'nome')
admin.site.register(Paciente, lista_pacientes) | [
"[email protected]"
] | |
7906f052dd2f4b5c78b0ad9a075bbf3e537a9015 | 0657bf55957f17eed000e41d08c04e3a5d29c637 | /user/views.py | a788ac703d5deba12e28964256055118bb7d3c95 | [] | no_license | ckant96/UserRegistrationApp | feb11439bf87d1b8c6ae6076c95aba07425fb501 | 2d377b09c46ddd90190341e3f1f8fefcf17930d4 | refs/heads/master | 2022-12-24T14:45:56.149559 | 2020-10-03T12:41:17 | 2020-10-03T12:41:17 | 300,821,509 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,969 | py | from django.shortcuts import render
from django.http import HttpResponse
from django.http import HttpResponseRedirect
from django.template import loader
from django.db import connection
from django.contrib import auth
from django.contrib.auth import authenticate,login,logout
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
def UserLogin(request):
if request.method == 'POST':
username = request.POST.get('username')
password = request.POST.get('password')
authent = authenticate(username=username,password=password)
if authent:
with connection.cursor() as cursor:
cursor.execute("SELECT name FROM User where user_id=%s and password=%s", [username, password])
row = cursor.fetchall()
if row :
login(request,authent)
return render(request=request, template_name="UserPage.html", context={'Status': 'successful','userId':'Invalid Credentials'})
else:
return render(request=request, template_name="Login.html",context={'Status': 'Unsuccessful','userId':'Invalid Credentials'})
else:
return render(request, 'Login.html', {'Status': 'Unsuccessful','userId':'Invalid Credentials'})
return render(request=request, template_name="Login.html", context={'Status': 'Unsuccessful','userId':'Invalid Credentials'})
def UserRegister(request):
if request.method == 'POST':
uname = request.POST.get('username')
upass = request.POST.get('password')
cname = request.POST.get('confirm_password')
if not uname or not upass or not cname :
return render(request=request, template_name="Registration.html",context={'Status':'Invalid entries'})
if uname != cname :
return render(request=request, template_name="Registration.html",context={'Status':'Password Mismatch.'})
with connection.cursor() as cursor:
cursor.execute("SELECT username FROM auth_user where username = %s ",[uname])
already=cursor.fetchall()
if already :
return render(request=request, template_name="Registration.html",context={'Status':'UserName Exists.'})
else:
with connection.cursor() as cursor:
cursor.execute("INSERT into User (username,password) VALUES(%s,%s)", [uname, upass])
User.objects.create_user(uname,None,upass)
return render(request=request, template_name="Registration.html",context={'Status': 'account Created'})
return render(request=request, template_name="Registration.html" , context={'Status': 'account Creation failed'})
@login_required()
def SaveUserNote(request,user):
if request.method == 'POST':
notes = request.POST.get('user_notes')
with connection.cursor() as cursor:
cursor.execute("SELECT * from UserNotes where username= %s ", [user])
row = cursor.fetchall()
if row :
newnote=row[0]+notes # append new note to old note string
cursor.execute(" UPDATE UserNotes set notes=%s",[newnote])
else:
cursor.execute(" INSERT into UserNotes(userId,notes) VALUES(%s,%s)", [user,notes])
return render(request=request, template_name="UserNotes.html" , context={'Status': 'Success'})
return render(request=request, template_name="UserNotes.html" , context={'Status': 'UnSuccess'})
@login_required()
def ShowUserNote(request,user):
if request.method == 'GET':
with connection.cursor() as cursor:
cursor.execute("SELECT * from UserNotes where username= %s ", [user])
row = cursor.fetchall()
note_list=[]
if row :
note_list = row[0].split('+')
return render(request=request, template_name="UserNotes.html" , context={'Notes': note_list})
| [
"[email protected]"
] | |
8f36b2278c2882473204ef1853a2761c11c5d2c3 | a05fc652bbc854b39767d7fd56116bf775b471c9 | /modules/entitysets/_genericblock.py | 233662b0c7299cc210cf71befdef18cc6623b435 | [
"Zlib"
] | permissive | Occuliner/ThisHackishMess | 20235a2fe23f5a54f7eff495242d6c58dd3e2eda | a801d6a5988705b0d77a4c57b0737012f244fa1d | refs/heads/master | 2020-11-26T20:55:56.391909 | 2013-12-27T23:35:59 | 2013-12-27T23:35:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,188 | py | # Copyright (c) 2013 Connor Sherson
#
# This software is provided 'as-is', without any express or implied
# warranty. In no event will the authors be held liable for any damages
# arising from the use of this software.
#
# Permission is granted to anyone to use this software for any purpose,
# including commercial applications, and to alter it and redistribute it
# freely, subject to the following restrictions:
#
# 1. The origin of this software must not be misrepresented; you must not
# claim that you wrote the original software. If you use this software
# in a product, an acknowledgment in the product documentation would be
# appreciated but is not required.
#
# 2. Altered source versions must be plainly marked as such, and must not be
# misrepresented as being the original software.
#
# 3. This notice may not be removed or altered from any source
# distribution.
from entity import Entity
import pygame
from imageload import loadImage, loadImageNoAlpha
from masterentityset import *
import math
class GenericBlock( Entity ):
scale = 2
width = 32
height = 32
bWidth = width
bHeight = height
bdx = 0
bdy = 0
wbWidth = 32
wbHeight = 16
wbdx = 0
wbdy = 16
playStateGroup = "genericStuffGroup"
setName = "genericstuff"
sheetFileName = "block.png"
sheet = loadImage( sheetFileName, scale )
specialCollision = None
collidable = True
solid = True
mass = 20
instanceSpecificVars = None
def __init__( self, pos = [0,0], vel = [0,0], group=None, **kwargs ):
Entity.__init__( self, pos, [0,0], None, group, pygame.Rect( 0, 0, self.width, self.height ), animated=False, **kwargs )
if GenericBlock.instanceSpecificVars is None:
attrList = list( self.__dict__.keys() )
if GenericBlock.instanceSpecificVars is None:
GenericBlock.instanceSpecificVars = dict( [ ( eachKey, eachVal ) for eachKey, eachVal in self.__dict__.items() if eachKey not in attrList ] )
def update( self, dt ):
Entity.update( self, dt )
#MasterEntitySet.entsToLoad.append( GenericBlock )
entities = { "GenericBlock":GenericBlock }
| [
"[email protected]"
] | |
cfbc87a6930bb46dff53098c47e8ea101e0f2f22 | 9931cff13b996620f7f486fa38e441ca5096aa16 | /starter.py | 81f2ca11aa8a198e249d5f2e246cf3441d7352d6 | [] | no_license | andregossip/D7048E | 429e3cdb5d41e9339371a4ea708302b4960cd058 | 35f98652c5aa7323abdf0cfd5c9a8d574b3f76ae | refs/heads/main | 2023-03-12T22:12:59.275467 | 2021-03-05T13:31:50 | 2021-03-05T13:31:50 | 337,079,827 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 133 | py | import os
import subprocess
if __name__ == '__main__':
subprocess.Popen("python main.py")
subprocess.Popen("python menu.py")
| [
"[email protected]"
] | |
bcd3a9bfd5974d395977712da8223f748238c14e | 9da620b96436873caa4d0739d0ea1d062d4f1869 | /Código/P1.py | ab19a55de6693687b58eb5e5f44562f62922fb2e | [
"MIT"
] | permissive | RodrigoHevia/03Tarea | 744dd1ce855354f95a4d6f023575ce35aef75a77 | 50a20b5864946092573ef91dc0b375cad3aff7f6 | refs/heads/master | 2021-01-15T12:10:40.788090 | 2015-10-11T02:45:58 | 2015-10-11T02:45:58 | 43,832,852 | 0 | 0 | null | 2015-10-07T17:30:44 | 2015-10-07T17:30:44 | null | UTF-8 | Python | false | false | 1,886 | py | #! /usr/bin/env python
'''
El siguiente script resuelve la ecuacion diferencial del oscilador de Van der Pol
luego de un cambio de variable, mediante el metodo de Runge Kutta de orden 3.
'''
import matplotlib.pyplot as plt
import numpy as np
fig = plt.figure(1)
plt.clf()
def rk3_2(f, s0, sf, y0, v0, n) :
'''
Esta funcion resuelve la ecuacion diferencial de segundo orden "f" mediante
el metodo de Runge-Kutta de orden 3, con condiciones iniciales "y0" y "v0"
en el intervalo [s0,sf] con resolucion "n"
'''
s = np.linspace(s0, sf, n, endpoint = True)
y = np.zeros(n)
v = np.zeros(n)
h = (sf - s0)/n
y[0] = y0
v[0] = v0
for i in range(1, n) :
k1 = h*v[i-1]
l1 = h*f(s[i-1], y[i-1], v[i-1])
k2 = h*(v[i-1] + 0.5*l1)
l2 = h*f(s[i-1] + 0.5*h, y[i-1] + 0.5*k1, v[i-1] + 0.5*l1)
k3 = h*(v[i-1] + 0.5*l2)
l3 = h*f(s[i-1]+0.5*h, y[i-1]+0.5*k2, v[i-1]+0.5*l2)
y[i] = y[i-1] + (k1 + 2*k2 + 2*k3)/5
v[i] = v[i-1] + (l1 + 2*l2)/3
return s, y, v
def f(s, y, v) :
u2 = 1.699
dv = - y - u2 * ( y**2 - 1) * v
return dv
s1, y1, v1 = rk3_2(f, 0, 20*np.pi, 0.1, 0, 1000) # dice t no s
s2, y2, v1 = rk3_2(f, 0, 20*np.pi, 4.0, 0, 1000)
plt.subplot("221")
plt.plot(s1, y1, color = 'b')
plt.xlabel('s')
plt.ylabel('y')
plt.title('dy/ds=0.0;y=0.1')
plt.axhline(0, color = 'k')
plt.legend()
plt.subplot("222")
plt.plot(s2, y2, color = 'b')
plt.xlabel('s')
plt.ylabel('y')
plt.title('dy/ds=0.0;y=4.0')
plt.axhline(0, color = 'k')
plt.legend()
plt.subplot("223")
plt.plot(y1, v1, color = 'r')
plt.xlabel('y')
plt.ylabel('dy/ds')
plt.axhline(0, color = 'k')
plt.legend()
plt.subplot("224")
plt.plot(y2, v1, color = 'r')
plt.xlabel('y')
plt.ylabel('dy/ds')
plt.axhline(0, color = 'k')
plt.draw()
plt.show()
fig.savefig('Oscilador de Van der Pol u*=1.699.jpg')
| [
"[email protected]"
] | |
6737c474b191d70378ee488dbc6534c3ce98e435 | cefda53c9bc29fbb3a1994126c859dd26f884ebe | /quantumcomputation.py | 849fd1a8d74ebe6f530b88b02f6710e396e6f45a | [] | no_license | trammell/qc-sim | 588620462041654e034582889e4e009f93891e61 | 86d648795ffcc01c971602ed1168c8a869ef122c | refs/heads/master | 2020-04-26T02:11:50.176485 | 2016-10-14T13:08:44 | 2016-10-14T13:08:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,200 | py | from google.appengine.ext import db
import math
import random
class QC:
status = 0.0
guid = ''
def __init__(self, guid):
self.status = 0.0
self.guid = guid
def getStatus(self):
return str(self.status)
'''
' Creates input state
'
'''
def createState(self, schema):
x = int(schema.split('}')[0][1:].replace(',', '')[::-1], base = 2)
n = len(schema.split('}')[0][1:].replace(',', ''))
N = 2 ** n
state = []
for i in range(N):
state.append(complex(0, 0))
state[x] = complex(1, 0)
self.status = 0
return state, n, N
'''
' Applies cirsuit scheme to the input
'
'''
def applyScheme(self, state, schema, n):
gates = schema.split('}')[1][1:].split(',')
for t in range(0, len(gates), n):
tact = gates[t : t + n]
# find the control bit if exists
control = []
for i in range(len(tact)):
if tact[i] == '*':
control.append(i)
# Apply gates in tact
qbit = 0
for gate in tact:
state = self.applyGate(state, qbit, n, gate, control)
qbit += 1
self.status += 100.0 / len(gates)
return state
'''
' Applies the gate
'
'''
def qubitGate(self, c_i, result, i, n, qbit, A, B, C, D):
base = 2 ** qbit
if i & base: # qubit is |1> so B,D column is applied
result[i] = result[i] + c_i * D
i2 = i - base
result[i2] = result[i2] + c_i * B
else: # qubit is |0> so A,C column is applied
result[i] = result[i] + c_i * A
i2 = i + base
result[i2] = result[i2] + c_i * C
return result
def applyGate(self, state, qbit, n, gate, control):
# skip on identity and control symbol
if gate == 'I' or gate == '*':
return state
# main cycle
N = 2 ** n
result = []
for i in range(N):
result.append(complex(0,0))
for i in range(N):
# check if basis vector i is in superposition
if state[i] == complex(0, 0):
continue
# check if gate is controlled and if any of control bits is 0, then do nothing
res_control = True
for c in control:
if not (2 ** c & i):
res_control = False
if not res_control:
result[i] = state[i]
continue
# at last, do the computation
if gate == 'H':
sqrt2 = complex(1 / math.sqrt(2), 0)
result = self.qubitGate(state[i], result, i, n, qbit, sqrt2, sqrt2, sqrt2, -sqrt2)
if gate[0] == 'R':
angle = 2 * math.pi / 2 ** int(gate[1])
phase = complex(math.cos(angle), math.sin(angle))
result = self.qubitGate(state[i], result, i, n, qbit, complex(1,0), complex(0,0), complex(0,0), phase)
if gate[0] == 'Z':
result = self.qubitGate(state[i], result, i, n, qbit, complex(1,0), complex(0,0), complex(0,0), complex(-1,0))
if gate[0] == 'X':
result = self.qubitGate(state[i], result, i, n, qbit, complex(0,0), complex(1,0), complex(1,0), complex(0,0))
return result
def measure(self, state):
for i in range(len(state)):
state[i] = state[i].real * state[i].real + state[i].imag * state[i].imag
r = random.random()
temp = 0
for i in range(len(state)):
temp += state[i]
if temp >= r:
return bin(i)
return bin(len(state) - 1)
'''
' Measurement
'
'''
def bin(n):
return "".join([["0", "1"][(n >> i) & 1] for i in reversed(range(20))])
| [
"[email protected]"
] | |
35cbb93289fa49079d18580c55167b424379240f | 163889b6dae2de23d34b11c0f75874365b672ee1 | /Gaming Laptop Battery Life.py | 58d86fb63c571735b8787194c88a05971e213ad9 | [] | no_license | LawrenceAD/HackerRank-Certification | fbb40821bd0c9abbcfab01febf4778329f87566e | 2ba84eaa11c9782544a599b510549087b05af166 | refs/heads/master | 2023-06-19T21:25:17.858046 | 2021-06-13T07:22:35 | 2021-06-13T07:22:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 205 | py | def getBattery(events):
c=50
for i in events:
if (i<0):
c+=i
else:
c+=i
if c>100:
c=100
print(c)
return c
| [
"[email protected]"
] | |
7533be52505a6d76dd6507b4fea541ecd3e82986 | b803dd98721dcf0af04bf6f322c82a90254d7bf1 | /test HW/calcu.py | aed15807bbe856e38ac6b6e86a63af00604a2172 | [] | no_license | DanTeegan/python_tdd_pytest | f418ac6d5cf0dd771caaa7df1054c689c1caf0d7 | f966269e552dbc7a5ffd4e5950c67de5de749af1 | refs/heads/master | 2022-11-13T10:37:57.645505 | 2020-07-07T19:21:10 | 2020-07-07T19:21:10 | 277,508,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 663 | py | # Here we are importing math into our file
import math
# Here we are creating a class called Calcu
class Calcu:
# Here we make a method to work out the sq root
def find_sqrt(self, num2):
return math.sqrt(num2)
# Here we make a method to round up
def find_ceil(self, num2):
return math.ceil(num2)
# Here we make a method to round down
def find_floor(self, num2):
return math.floor(num2)
# Here we are creating an object from the Calcu class
simple_calcu = Calcu()
# Here we are just printing using the methods created to see if they work.
print(simple_calcu.find_ceil(102.8))
print(simple_calcu.find_floor(1001.4)) | [
"[email protected]"
] | |
a5dad3f329e90d477f7e4c2267e433ba19d67069 | 1bf7be4ba56a2f08ceb2c3f28612e597e9948ef2 | /python/8kyu/Returning Strings.py | 565514860e0cd68a904534739b7e1970d82b8386 | [] | no_license | SaraKenig/codewars-solutions | c5fd499118ff006d5c420e292f33f3f0a05f8681 | 5c63a2387e2d86debce5f78e3d7fb387c87f998e | refs/heads/master | 2022-12-19T21:33:48.056749 | 2020-10-01T09:08:43 | 2020-10-01T09:08:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 306 | py | # Make a function that will return a greeting statement that uses an input; your program should return, "Hello, <name> how are you doing today?".
# [Make sure you type the exact thing I wrote or the program may not execute properly]
def greet(name):
return f'Hello, {name} how are you doing today?'
| [
"[email protected]"
] | |
6604e29b58bc7c32c533d2d6036d69e717c399fb | c22e094912582f99a96f2a63cfd1f266111239db | /config.py | bd8b59c17e54b6d7817e939698a2cddfac46d9f4 | [] | no_license | syned13/ProjectManagementAPI | 42cab4660227f9afb48bcca410ecc858c0f702ad | f30ba766efc2c1a29bf17f03d2f5dc7f3cd97b35 | refs/heads/master | 2021-05-17T17:20:57.674142 | 2020-04-05T23:08:47 | 2020-04-05T23:08:47 | 250,892,968 | 1 | 0 | null | 2021-05-06T20:01:40 | 2020-03-28T21:03:02 | Python | UTF-8 | Python | false | false | 168 | py | class Config():
pass
class DevelopmentConfig(Config):
DEBUG = True
#dict para las diferentes configuraciones
config = {
'development': DevelopmentConfig
} | [
"[email protected]"
] | |
0ffc66b60d96357a82eedef5d3aebf8cd7c2ad4a | 0e1b7780cfc63579dd1f0a12cfc0d92317b7227b | /test.py | 9a3a6c7a4d9eab9a44dfae1588676f36a698bc9c | [] | no_license | psicktrick/weights_optimizer | 3d30e0224d1a9dc79d641a3784397d435de9d81b | 821202698a96af25b8bf1ea0c65c06684169016d | refs/heads/master | 2023-05-03T21:12:18.507593 | 2021-05-24T06:08:32 | 2021-05-24T06:08:32 | 370,034,424 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,194 | py | import pandas as pd
import numpy as np
from ga_weights_optimizer import WeightsOptimizer
from sklearn.linear_model import LinearRegression, LassoCV, Ridge, LassoLarsCV,ElasticNetCV
from sklearn.model_selection import cross_val_score
import warnings
warnings.filterwarnings('ignore')
from sklearn.model_selection import train_test_split
from copy import deepcopy
class CreateModels:
def __init__(self):
self.train = pd.read_csv(r"C:\Users\sickt\Downloads\data\train.csv")
self.test = pd.read_csv(r"C:\Users\sickt\Downloads\data\test.csv")
self.X_train, self.X_test, self.y_train, self.y_test = self.preprocess_data()
self.predictions = self.get_predictions()
def get_predictions(self):
lassocv = LassoCV(eps=1e-7)
ridge = Ridge(alpha=1e-6)
lassolarscv = LassoLarsCV()
elasticnetcv = ElasticNetCV(eps=1e-15)
lassocv.fit(self.X_train, self.y_train)
ridge.fit(self.X_train, self.y_train)
lassolarscv.fit(self.X_train, self.y_train)
elasticnetcv.fit(self.X_train, self.y_train)
lassocv_pred = lassocv.predict(self.X_test)
ridge_pred = ridge.predict(self.X_test)
lassolarscv_pred = lassolarscv.predict(self.X_test)
elasticnetcv_pred = elasticnetcv.predict(self.X_test)
df=pd.DataFrame()
df["Lasso"] = lassocv_pred
df["Ridge"] = ridge_pred
df["LassoLars"] = lassolarscv_pred
df["Elasticnetcv"] = elasticnetcv_pred
df["Y"] = self.y_test.reset_index(drop=True)
return df
def preprocess_data(self):
train = self.train.drop(labels=["Id"], axis=1)
# test = self.test.drop(labels=["Id"], axis=1)
train = train.drop(train[(train['GrLivArea'] > 4000) & (train['SalePrice'] < 300000)].index).reset_index(drop=True)
train_len = len(train)
# dataset = pd.concat(objs=[train, test], axis=0).reset_index(drop=True)
dataset = train.fillna(np.nan)
dataset["Alley"] = dataset["Alley"].fillna("No")
dataset["MiscFeature"] = dataset["MiscFeature"].fillna("No")
dataset["Fence"] = dataset["Fence"].fillna("No")
dataset["PoolQC"] = dataset["PoolQC"].fillna("No")
dataset["FireplaceQu"] = dataset["FireplaceQu"].fillna("No")
dataset["Utilities"] = dataset["Utilities"].fillna("AllPub")
dataset["BsmtCond"] = dataset["BsmtCond"].fillna("No")
dataset["BsmtQual"] = dataset["BsmtQual"].fillna("No")
dataset["BsmtFinType2"] = dataset["BsmtFinType2"].fillna("No")
dataset["BsmtFinType1"] = dataset["BsmtFinType1"].fillna("No")
dataset.loc[dataset["BsmtCond"] == "No", "BsmtUnfSF"] = 0
dataset.loc[dataset["BsmtFinType1"] == "No", "BsmtFinSF1"] = 0
dataset.loc[dataset["BsmtFinType2"] == "No", "BsmtFinSF2"] = 0
dataset.loc[dataset["BsmtQual"] == "No", "TotalBsmtSF"] = 0
dataset.loc[dataset["BsmtCond"] == "No", "BsmtHalfBath"] = 0
dataset.loc[dataset["BsmtCond"] == "No", "BsmtFullBath"] = 0
dataset["BsmtExposure"] = dataset["BsmtExposure"].fillna("No")
dataset["SaleType"] = dataset["SaleType"].fillna("WD")
dataset["MSZoning"] = dataset["MSZoning"].fillna("RL")
dataset["KitchenQual"] = dataset["KitchenQual"].fillna("TA")
dataset["GarageType"] = dataset["GarageType"].fillna("No")
dataset["GarageFinish"] = dataset["GarageFinish"].fillna("No")
dataset["GarageQual"] = dataset["GarageQual"].fillna("No")
dataset["GarageCond"] = dataset["GarageCond"].fillna("No")
dataset.loc[dataset["GarageType"] == "No", "GarageYrBlt"] = dataset["YearBuilt"][dataset["GarageType"] == "No"]
dataset.loc[dataset["GarageType"] == "No", "GarageCars"] = 0
dataset.loc[dataset["GarageType"] == "No", "GarageArea"] = 0
dataset["GarageArea"] = dataset["GarageArea"].fillna(dataset["GarageArea"].median())
dataset["GarageCars"] = dataset["GarageCars"].fillna(dataset["GarageCars"].median())
dataset["GarageYrBlt"] = dataset["GarageYrBlt"].fillna(dataset["GarageYrBlt"].median())
dataset["Functional"] = dataset["Functional"].fillna("Typ")
dataset["Exterior2nd"] = dataset["Exterior2nd"].fillna("VinylSd")
dataset["Exterior1st"] = dataset["Exterior1st"].fillna("VinylSd")
dataset["Electrical"] = dataset["Electrical"].fillna("SBrkr")
dataset["MasVnrType"] = dataset["MasVnrType"].fillna("None")
dataset.loc[dataset["MasVnrType"] == "None", "MasVnrArea"] = 0
dataset = dataset.replace({'MSSubClass': {20: 'SubClass_20', 30: 'SubClass_30', 40: 'SubClass_40',
45: 'SubClass_45', 50: 'SubClass_50', 60: 'SubClass_60',
70: 'SubClass_70',
75: 'SubClass_75', 80: 'SubClass_80', 85: 'SubClass_85',
90: 'SubClass_90',
120: 'SubClass_120', 150: 'SubClass_150', 160: 'SubClass_160',
180: 'SubClass_180',
190: 'SubClass_190'}})
dataset = dataset.replace({'MoSold': {1: 'Jan', 2: 'Feb', 3: 'Mar',
4: 'Apr', 5: 'May', 6: 'Jun', 7: 'Jul', 8: 'Aug', 9: 'Sep', 10: 'Oct',
11: 'Nov', 12: 'Dec'}})
dataset['YrSold'] = dataset['YrSold'].astype(str)
dataset["BsmtCond"] = dataset["BsmtCond"].astype("category", categories=['No', 'Po', 'Fa', 'TA', 'Gd', 'Ex'],
ordered=True).cat.codes
dataset["BsmtExposure"] = dataset["BsmtExposure"].astype("category", categories=['No', 'Mn', 'Av', 'Gd'],
ordered=True).cat.codes
dataset["BsmtFinType1"] = dataset["BsmtFinType1"].astype("category",
categories=['No', 'Unf', 'LwQ', 'Rec', 'BLQ', 'ALQ',
'GLQ'], ordered=True).cat.codes
dataset["BsmtFinType2"] = dataset["BsmtFinType2"].astype("category",
categories=['No', 'Unf', 'LwQ', 'Rec', 'BLQ', 'ALQ',
'GLQ'], ordered=True).cat.codes
dataset["BsmtQual"] = dataset["BsmtQual"].astype("category", categories=['No', 'Po', 'Fa', 'TA', 'Gd', 'Ex'],
ordered=True).cat.codes
dataset["ExterCond"] = dataset["ExterCond"].astype("category", categories=['Po', 'Fa', 'TA', 'Gd', 'Ex'],
ordered=True).cat.codes
dataset["ExterQual"] = dataset["ExterQual"].astype("category", categories=['Po', 'Fa', 'TA', 'Gd', 'Ex'],
ordered=True).cat.codes
dataset["Fence"] = dataset["Fence"].astype("category", categories=['No', 'MnWw', 'GdWo', 'MnPrv', 'GdPrv'],
ordered=True).cat.codes
dataset["FireplaceQu"] = dataset["FireplaceQu"].astype("category", categories=['No', 'Po', 'Fa', 'TA', 'Gd', 'Ex'],
ordered=True).cat.codes
dataset["Functional"] = dataset["Functional"].astype("category",
categories=['Sal', 'Sev', 'Maj2', 'Maj1', 'Mod', 'Min2',
'Min1', 'Typ'], ordered=True).cat.codes
dataset["GarageCond"] = dataset["GarageCond"].astype("category", categories=['No', 'Po', 'Fa', 'TA', 'Gd', 'Ex'],
ordered=True).cat.codes
dataset["GarageFinish"] = dataset["GarageFinish"].astype("category", categories=['No', 'Unf', 'RFn', 'Fin'],
ordered=True).cat.codes
dataset["GarageQual"] = dataset["GarageQual"].astype("category", categories=['No', 'Po', 'Fa', 'TA', 'Gd', 'Ex'],
ordered=True).cat.codes
dataset["HeatingQC"] = dataset["HeatingQC"].astype("category", categories=['Po', 'Fa', 'TA', 'Gd', 'Ex'],
ordered=True).cat.codes
dataset["KitchenQual"] = dataset["KitchenQual"].astype("category", categories=['Po', 'Fa', 'TA', 'Gd', 'Ex'],
ordered=True).cat.codes
dataset["PavedDrive"] = dataset["PavedDrive"].astype("category", categories=['N', 'P', 'Y'], ordered=True).cat.codes
dataset["PoolQC"] = dataset["PoolQC"].astype("category", categories=['No', 'Fa', 'TA', 'Gd', 'Ex'],
ordered=True).cat.codes
dataset["Utilities"] = dataset["Utilities"].astype("category", categories=['ELO', 'NoSeWa', 'NoSewr', 'AllPub'],
ordered=True).cat.codes
dataset = pd.get_dummies(dataset, columns=["Alley", "BldgType", "CentralAir",
"Condition1", "Condition2", "Electrical", "Exterior1st", "Exterior2nd",
"Foundation",
"GarageType", "Heating", "HouseStyle", "LandContour", "LandSlope",
"LotConfig", "LotShape",
"MSZoning", "MasVnrType", "MiscFeature", "Neighborhood", "RoofMatl",
"RoofStyle",
"SaleCondition", "SaleType", "Street", "MSSubClass", 'MoSold', 'YrSold'],
drop_first=True)
dataset = dataset.drop(labels=[ 'Condition2_PosN',
'MSSubClass_SubClass_160'], axis=1)
skewed_features = ["BsmtFinSF1", "BsmtFinSF2", "BsmtUnfSF", "GarageArea", "MasVnrArea"
, "TotalBsmtSF", "1stFlrSF", "2ndFlrSF", "3SsnPorch", "EnclosedPorch",
"GrLivArea", "LotArea", "LowQualFinSF", "OpenPorchSF", "PoolArea",
"ScreenPorch", "WoodDeckSF"]
for feature in skewed_features:
dataset[feature] = np.log1p(dataset[feature])
dataset["SalePrice"] = np.log1p(dataset["SalePrice"])
y = dataset["SalePrice"]
X = dataset.drop(labels="SalePrice", axis=1)
X = X.drop(labels="LotFrontage", axis=1)
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.33, random_state = 42)
return X_train, X_test, y_train, y_test
def RMSE(self, estimator,X_train, Y_train, cv=5,n_jobs=4):
cv_results = cross_val_score(estimator,X_train,Y_train,cv=cv,scoring="neg_mean_squared_error",n_jobs=n_jobs)
return (np.sqrt(-cv_results)).mean()
def objective_function(self, weights):
pred = deepcopy(self.predictions)
pred["Lasso"] = weights[0]*pred["Lasso"]
pred["Ridge"] = weights[0] * pred["Ridge"]
pred["LassoLars"] = weights[0] * pred["LassoLars"]
pred["Elasticnetcv"] = weights[0] * pred["Elasticnetcv"]
pred["Y_pred"] = pred.iloc[:, 0:4].sum(axis=1)
fitness = -(sum((pred["Y_pred"] - pred["Y"])**2))**0.5
return fitness,
if __name__ == "__main__":
n=4
model = CreateModels()
simple_avg = model.objective_function([0.25, 0.25, 0.25, 0.25])
print("Simple Average Result :", [0.25, 0.25, 0.25, 0.25], simple_avg)
wo = WeightsOptimizer(n, model)
optimized_weights = wo.ga()
print("Optimized Average Result")
print(optimized_weights)
| [
"[email protected]"
] | |
a193d412aaca282768f56eb475eb1ea80e4e01b8 | 27e9b46526fcebdcb4de34b8e5bade7708f1ce35 | /examples/bigquery_country_codes.py | ad76e3e0c044a4a1ee2fe18a83b304f903e0bd1c | [
"MIT"
] | permissive | openknowledge-archive/datapackage-storage-py | 66b75f1eae6292348e479b4d51dd1aea5a09fc8d | 379acadf49b8382c9acc6ed54b3f9a2dbf221e05 | refs/heads/master | 2021-05-31T00:37:12.330573 | 2016-02-28T17:18:09 | 2016-02-28T18:52:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 621 | py | # -*- coding: utf-8 -*-
from __future__ import division
from __future__ import print_function
from __future__ import absolute_import
from __future__ import unicode_literals
import sys
from pprint import pprint
sys.path.insert(0, '.')
from examples import storages
# Fixtures
dataset = 'datapackage'
prefix = 'country_codes_%s_%s_' % (sys.version_info.major, sys.version_info.minor)
source = 'examples/packages/country-codes/datapackage.json'
target = 'tmp/bigquery/packages/country-codes/datapackage.json'
# Execution
if __name__ == '__main__':
storages.bigquery.run(dataset, prefix, source, target, 'package')
| [
"[email protected]"
] | |
60a1f5d492ff853f1d32e4a985007dd7221a56f2 | 7f95850837db3baf1d85d857b57fb35130fc94a3 | /venv/Scripts/easy_install-script.py | 5458acc5b655a87ac5b112bed4bc7ad68cfa7550 | [] | no_license | SNGxx/tencent-ai-demo | 94c0616fdf6b5ec84a8bb4cbd0791de5e94e7ade | dea999e7642d999bff143e87a3c33bbec48c356d | refs/heads/master | 2020-03-22T14:23:29.889868 | 2018-07-08T14:36:14 | 2018-07-08T14:36:14 | 140,175,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 436 | py | #!E:\PyCharm-wokspace\test\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install')()
)
| [
"[email protected]"
] | |
30f6f989789276a07e60d2fe5e33cb7751012f7a | 0599162ae41c54db4514a6929cbf8a9a410beb43 | /src/ophys_etl/schemas/fields.py | 3de11173fad0b623967a857fecdda984835f548c | [
"BSD-2-Clause"
] | permissive | Matyasz/ophys_etl_pipelines | 26ca0e42781109a839e25497d3e73c3be30cfc72 | e8473fff85cfba816ac17a661783b52f9de8ac8b | refs/heads/main | 2023-02-17T20:29:45.102503 | 2020-11-05T23:54:56 | 2020-11-05T23:54:56 | 327,538,144 | 0 | 0 | null | 2021-01-07T07:37:47 | 2021-01-07T07:37:46 | null | UTF-8 | Python | false | false | 832 | py | from marshmallow import fields, ValidationError
import h5py
class H5InputFile(fields.Str):
"""
H5InputFile is a subclass of :class:`marshmallow.fields.Str` which
is a path to an h5 file location. The file must end with an extension
of '.h5' or '.hdf5' and must be able to be opened by `h5py.File`.
"""
def _validate(self, value):
if not (str(value).endswith(".h5") or str(value).endswith(".hdf5")):
raise ValidationError("H5 input file must have extension '.h5' "
f"or '.hdf5'. Input file = {value}")
try:
with h5py.File(value, "r"):
pass
except OSError as e:
raise ValidationError(f"Error occurred loading file {value}. "
f"Underlying error: \nOSError: - {e}")
| [
"[email protected]"
] | |
498011cf5a5816384528de6b8fdeb73f62a829c6 | 5c13c04df863cb13f9fa28a815c75865a464978c | /rosout_clean | 2c099a79fb386804484d6a0de8c70695b6be50aa | [] | no_license | COD3BOY/probablyscripts | 116532efdd8059ecdad1879130519f0d4a3d3cc6 | 451bc079ea1f55f8594f085285d0457bff2be233 | refs/heads/master | 2020-03-18T04:19:11.991363 | 2018-01-25T20:50:30 | 2018-01-25T20:50:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 301 | #!/usr/bin/python
import re
pattern = '.*\\[ INFO\\] \\[.*?\\]: (.*)' + chr(27)
rg = re.compile(pattern, re.IGNORECASE | re.DOTALL)
while(True):
try:
s = raw_input()
except EOFError:
break
m = rg.search(s)
if m:
print m.groups()[0]
else:
print s
| [
"[email protected]"
] | ||
94344abb60e3bf99a831886f6f3d88662ef74e31 | 47d4c7ab61991390ca2aea36f61c456aeddaada0 | /data.py | 69f2ec977b3b2f87aeae7bb686348758ee373ad1 | [] | no_license | RuslanPopov98/Flask_Project_2 | 768ac2a888251b43ba363de4688d0ea5dff5d3e2 | 201486f0482192b78a878c87b4527724bbf2e4de | refs/heads/master | 2023-07-26T15:02:35.859905 | 2021-09-08T20:54:37 | 2021-09-08T20:54:37 | 404,457,040 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 26,098 | py | goals = {"travel": "Для путешествий", "study": "Для учебы", "work": "Для работы", "relocate": "Для переезда"}
days_week = {"mon": "Понедельник", "tue": "Вторник", "wed": "Среда", "thu": "Четверг", "fri": "Пятница",
"sat": "Суббота", "sun": "Воскресенье"}
purpose_lesson = {"travel": "Для путешествия", "learn": "Для школы", "work": "Для раобты", "move": "Для переезда"}
teachers = [
{
"id": 0,
"name": "Morris Simmmons",
"about": "Репетитор американского английского языка. Структурированная система обучения. Всем привет! Я "
"предпочитаю называть себя «тренером» английского языка. Мои занятия похожи на тренировки",
"rating": 4.2,
"picture": "https://i.pravatar.cc/300?img=20",
"price": 900,
"goals": ["travel", "relocate", "study"],
"free": {
"mon": {"8:00": False, "10:00": True, "12:00": True, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"tue": {"8:00": True, "10:00": True, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"wed": {"8:00": True, "10:00": True, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"thu": {"8:00": True, "10:00": True, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"fri": {"8:00": True, "10:00": True, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sat": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": False},
"sun": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": False},
}
},
{
"id": 1,
"name": "Lee P",
"about": "I am a native speaker and conversation tutor, providing private English conversation lessons using "
"something called Life Learning.This method allows students to take complete control of how and what "
"they learn. It is student-led learning focusing on interests, life goals, enjoyment and effective "
"learning for you, as an individual.Stop wasting time with textbooks, tests and unneccesary "
"pressure. Find a love for learning and speaking English with creativity and freedom. The lessons "
"are completely chosen by you to keep you motivated and driven to achieve your goals.",
"rating": 4.8,
"picture": "https://i.pravatar.cc/300?img=19",
"price": 1200,
"goals": ["relocate", "study"],
"free": {
"mon": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"tue": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"wed": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"thu": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"fri": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sat": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sun": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
}
},
{
"id": 2,
"name": "Felix A",
"about": "The English language has become the language of the world, thus, it is considered a world language. "
"Today English seems to evolve to a future global tongue, as its spreading on the Internet in recent "
"years shows (almost 80% of the worldwide web's pages are now written in English). scientific "
"researchers have found out that in fact many small languages have already vanished.But to teach it "
"in a satisfactory manner a good teacher of English is required.xA good teacher of English must "
"possess some qualities.Business, General and conversational English",
"picture": "https://i.pravatar.cc/300?img=27",
"rating": 4.7,
"price": 1300,
"goals": ["work"],
"free": {
"mon": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"tue": {"8:00": True, "10:00": True, "12:00": True, "14:00": True, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"wed": {"8:00": True, "10:00": True, "12:00": True, "14:00": True, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"thu": {"8:00": True, "10:00": True, "12:00": True, "14:00": True, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"fri": {"8:00": True, "10:00": True, "12:00": True, "14:00": True, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"sat": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sun": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
}
},
{
"id": 3,
"name": "Milan S",
"about": "I have a wide range of interests, believe me, you will never be bored during our lesson. I worked "
"with lawyers, doctors, biologists and many others to help them improve their English in their "
"respective fields. Since I spent my whole life satisfying my curiosity, I acquired a huge "
"vocabulary that I can pass on to you.",
"picture": "https://i.pravatar.cc/300?img=28",
"rating": 4.9,
"price": 1300,
"goals": ["travel", "study"],
"free": {
"mon": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"tue": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"wed": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"thu": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"fri": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sat": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sun": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
}
},
{
"id": 4,
"name": "Gulya S",
"about": "Hello! My name is Gulya :) I am a native speaker of the Russian language and I am fluent in "
"English. I have been teaching online for 3 years now. I have an individual program, having studied "
"your requirements, I am preparing a special program. plan, and therefore deal) Books Cambridge, "
"Oxford, etc. I train and develop colloquial speech. We study words, stable combinations and put "
"them into practice. We speak and try to speak :) on different topics. We listen to audio lessons, "
"watch films with subtitles. We analyze everything on the shelves :) In parallel, of course, "
"we study the basics of grammar and the correct delivery of sentences :) All the materials are "
"provided by me. I promise you that you will talk from the first first lesson :)",
"picture": "https://i.pravatar.cc/300?img=29",
"rating": 4.3,
"price": 900,
"goals": ["travel"],
"free": {
"mon": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"tue": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"wed": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"thu": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"fri": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sat": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sun": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
}
},
{
"id": 5,
"name": "Yan M",
"about": "Hello! My name is Yang and for more than five years I have been teaching English. I spent part of "
"this time in China, where I worked with students from 3 to 40 years old. I deal with both adults "
"and children. But for all ages, I try to make my classes fun and interactive. Teaching English to "
"me is not just a language lesson. I always try to attract a wider cultural and historical context "
"that helps my students understand more about the language and its features. A degree in history "
"helps me a lot to create such an intellectual environment in the classroom.For each student, "
"I develop an individual curriculum that depends on its goals and needs.",
"picture": "https://i.pravatar.cc/300?img=30",
"rating": 3.9,
"price": 800,
"goals": ["travel", "study"],
"free": {
"mon": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"tue": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"wed": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"thu": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"fri": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sat": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sun": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
}
},
{
"id": 6,
"name": "Eran E",
"about": "Hello, my name is Eran & I am a friendly native English speaker. I am an experienced English "
"teacher with a neutral accent that is easily understood. This is because I have grown up living in "
"13 countries across 4 continents. They include England, America, Australia & Japan. Presently, "
"I live in Lisbon, Portugal. While I am primarily focused on 1 to 1 tuition, I’ve previously taught "
"classes with as many as 50 students at a time. My students have ranged from 12 to 70 years old. "
"From Israeli middle schoolers all the way through to Thai Government officials. As a result, "
"I’ve learned a wide variety of teaching methods. Currently, I'm taking students with English level: "
"B2 onwards, as well as those who are interested in long-term growth and multiple lessons.I "
"understand how hard it can be to learn another language. That's why my teaching style is fun, "
"constructive & easy-going. Lessons will be tailored to meet your needs & goals. Through my lessons, "
"you will gain the confidence to speak English in your daily life.",
"picture": "https://i.pravatar.cc/300?img=32",
"rating": 4.5,
"price": 1200,
"goals": ["travel", "relocate", "study"],
"free": {
"mon": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"tue": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"wed": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"thu": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"fri": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sat": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sun": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
}
},
{
"id": 7,
"name": "Mr. Mark",
"about": "My lessons are fun and practical, but most importantly, we are going to be extremely productive. I "
"believe that the best way to master English is through EXECUTION. Less theory, more practice. Lots "
"of practice. Our goal is to achieve maximum involvement and focus on the subject. Schools trained "
"us to be very passive. Sit quietly by yourself, be lectured to, just consume information. THAT is "
"not how we are going to learn English.",
"picture": "https://i.pravatar.cc/300?img=33",
"rating": 4.5,
"price": 1100,
"goals": ["study"],
"free": {
"mon": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"tue": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"wed": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"thu": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"fri": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sat": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sun": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
}
},
{
"id": 8,
"name": "Skye L.",
"about": "Hello, My name is Skye. I’m from London in the United Kingdom but I am currently living in Japan. I "
"have a TEFL certificate which I acquired last year. Since moving to Japan I have been teaching some "
"of my Japanese friends English. I think learning should be fun and engaging and even though English "
"can be difficult to learn I aim to make it enjoyable.I enjoy watching football and travelling. I do "
"a lot of Yoga in my spare time and I can't wait to meet you!",
"picture": "https://i.pravatar.cc/300?img=35",
"rating": 5,
"price": 1700,
"goals": ["relocate", "work"],
"free": {
"mon": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"tue": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"wed": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"thu": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"fri": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sat": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sun": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
}
},
{
"id": 9,
"name": "Syeta Y.",
"about": "Hello and welcome to my profile learners of the wonderful world of the English language! I am a "
"certified native English teacher with an A in TEFL. Learning something new should be fun and "
"exciting and not something you’re dragging your feet into doing which is why I believe a little fun "
"and humour plays a huge part in the learning process and also the development of a healthy and "
"enjoyable relationship between us.I am also currently trying to learn a new language and so I know "
"from my own experience how daunting or sometimes challenging it can be but please remember I’m here "
"to work with you and not against you. We can work together on pronunciation, reading, "
"conversational English, homework you may have from school or college, slang, in fact on any subject "
"area you enjoy or want to develop as when you’re enjoying the learning process you’re learning "
"without even realising.",
"picture": "https://i.pravatar.cc/300?img=36",
"rating": 4.1,
"price": 1200,
"goals": ["work"],
"free": {
"mon": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"tue": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"wed": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"thu": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"fri": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sat": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sun": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
}
},
{
"id": 10,
"name": "Salman S",
"about": "I motivate and guide students to achieve their goals. It depends on what is the problem they are "
"facing. Sometimes they just want to practice speaking to improve their fluency. Sometimes it's more "
"complicated which is a language barrier and I need to boost their confidence. Some are coming with "
"a specific task to pass an exam like IELTS and TOEFL. Moreover some are seeking to improve their "
"business skills and business conversation. Sometimes they need to pass the interview in English. "
"According to their requirements I have materials and programs to help them to achieve their desired "
"goals. My vast experience of teaching plays a vital role as well.",
"picture": "https://i.pravatar.cc/300?img=37",
"rating": 4.7,
"price": 1100,
"goals": ["travel", "study", "work"],
"free": {
"mon": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"tue": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"wed": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"thu": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"fri": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sat": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sun": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
}
},
{
"id": 11,
"name": "Andrew G",
"about": "Hi guys, My name is Andrew and I am an English teacher from the USA currently living now in "
"Atlanta, Georgia.My teaching experience ranges from 1 on 1 to groups, children to adults, "
"in-person or online. IMPORTANT*** Although I have experience teaching Children, right now I'm only "
"teaching Adults through Conversational English. This is my specialty and I do this through focusing "
"mainly on Accent Reduction, Pronunciation, Speech Therapy, and improving one's Vocabulary.I have "
"been traveling and teaching since 2008 and my travels have really helped me be more culturally "
"aware, and relevant. I am fun and unique when it comes to teaching English, you won't that find my "
"classes anywhere else.",
"picture": "https://i.pravatar.cc/300?img=38",
"rating": 4.2,
"price": 900,
"goals": ["travel", "work"],
"free": {
"mon": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"tue": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"wed": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"thu": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": True, "18:00": True,
"20:00": True, "22:00": True},
"fri": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sat": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
"sun": {"8:00": False, "10:00": False, "12:00": False, "14:00": False, "16:00": False, "18:00": False,
"20:00": False, "22:00": False},
}
},
]
| [
"[email protected]"
] | |
c7dacfdb2f24e04f9fe94c315e29c112de6d8f4c | c18fc2aa58818788713f6865ff1c3c7b041a738d | /project2-master/Functionals/__init__.py | abd5bfa7d0ef0fba1c2f58bd7f87d704234722e8 | [] | no_license | maor90b/Bartov | 5b3df4049132ee233da7d58fff4cdc5d110e0ad7 | 50708bd3d9b85e1aaa5470bd8a80907057d0b9ae | refs/heads/master | 2022-11-26T03:14:27.653421 | 2020-07-29T14:40:43 | 2020-07-29T14:40:43 | 282,659,055 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 48 | py | str=input("enter: ")
str=str[-1::-1]
print(str) | [
"[email protected]"
] | |
4495b2520174b3b77e0475b1ece2025a5553d02f | ba8fd32194162e373e6669a51d9d67edce251e10 | /Python/instances/gotic_3_3_10_ex1.py | cbf8fd1664f6956a29b7fabc6a916590811db45b | [] | no_license | avidhya06/SiteDepTRPTW | 805277445e55a408e96dcdece8292f2552ca428f | 0f400d8717fe7da48188cb133e119a69c3ccc1cd | refs/heads/master | 2023-03-16T09:58:35.938261 | 2019-01-05T01:04:56 | 2019-01-05T01:04:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,413 | py | data={
"name" : "gotic_3_3_10_ex1",
"nbTIC" : 3,
"nbJOB" : 10,
"nbCMP" : 3,
"speed" : 50,
"tics" : [
{ "id":"TIC_1", "x":11, "y":21, "t_start":480, "t_end":1080, "cmp_list":[1,2,3,] },
{ "id":"TIC_2", "x":26, "y":79, "t_start":480, "t_end":1080, "cmp_list":[1,2,3,] },
{ "id":"TIC_3", "x":55, "y":67, "t_start":480, "t_end":1080, "cmp_list":[1,2,3,] },
],
"jobs" : [
{ "id":"JOB_1", "x":58, "y":55, "t_min":990, "t_max":1079, "cmp":2, "dur":30, "day":1, "penal":100000},
{ "id":"JOB_2", "x":30, "y":19, "t_min":750, "t_max":839, "cmp":1, "dur":60, "day":1, "penal":100000},
{ "id":"JOB_3", "x":41, "y":45, "t_min":0, "t_max":1469, "cmp":1, "dur":30, "day":0, "penal":100000},
{ "id":"JOB_4", "x":58, "y":24, "t_min":840, "t_max":1019, "cmp":3, "dur":60, "day":1, "penal":100000},
{ "id":"JOB_5", "x":16, "y":9, "t_min":570, "t_max":689, "cmp":2, "dur":60, "day":1, "penal":100000},
{ "id":"JOB_6", "x":18, "y":32, "t_min":690, "t_max":809, "cmp":1, "dur":30, "day":1, "penal":100000},
{ "id":"JOB_7", "x":89, "y":100, "t_min":0, "t_max":1469, "cmp":2, "dur":60, "day":0, "penal":100000},
{ "id":"JOB_8", "x":57, "y":75, "t_min":0, "t_max":1469, "cmp":2, "dur":60, "day":0, "penal":100000},
{ "id":"JOB_9", "x":89, "y":60, "t_min":600, "t_max":779, "cmp":1, "dur":90, "day":1, "penal":100000},
{ "id":"JOB_10", "x":89, "y":3, "t_min":570, "t_max":629, "cmp":1, "dur":30, "day":1, "penal":100000},
]
}
| [
"[email protected]"
] | |
9f667c2714d1c53a42620dcf4ff00a13d613e2ae | 887f4beeb3ba480f3bcc7dff1e9eb3d61d445f9b | /console/common/captcha/models.py | 1ada08d10622a7562624b6259d9a90bf2fc2be17 | [] | no_license | wang-shun/console | ccaa8a1716e2ab6bf5ed6d1c4240cecd4e59f155 | 50425ff068c4795bf13bd178891da126f8677383 | refs/heads/master | 2020-04-04T18:15:14.382006 | 2018-07-09T14:42:42 | 2018-07-09T14:42:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,120 | py | # coding=utf-8
import datetime
import hashlib
import random
import time
from django.conf import settings
from django.db import models
from django.utils.encoding import smart_text
from console.common.captcha.conf import settings as captcha_settings
# Heavily based on session key generation in Django
# Use the system (hardware-based) random number generator if it exists.
if hasattr(random, 'SystemRandom'):
randrange = random.SystemRandom().randrange
else:
randrange = random.randrange
MAX_RANDOM_KEY = 18446744073709551616 # 2 << 63
def get_safe_now():
try:
from django.utils.timezone import utc
if settings.USE_TZ:
return datetime.datetime.utcnow().replace(tzinfo=utc)
except:
pass
return datetime.datetime.now()
class CloudinCaptchaStore(models.Model):
class Meta:
db_table = "capturestore"
challenge = models.CharField(max_length=32)
response = models.CharField(max_length=32)
hashkey = models.CharField(max_length=40, unique=True)
expiration = models.DateTimeField()
def save(self, *args, **kwargs):
self.response = self.response.lower()
if not self.expiration:
self.expiration = get_safe_now() + datetime.timedelta(minutes=int(captcha_settings.CAPTCHA_TIMEOUT))
if not self.hashkey:
key = (
smart_text(randrange(0, MAX_RANDOM_KEY)) +
smart_text(time.time()) +
smart_text(self.challenge, errors='ignore') +
smart_text(self.response, errors='ignore')
).encode('utf8')
self.hashkey = hashlib.sha1(key).hexdigest()
super(CloudinCaptchaStore, self).save(*args, **kwargs)
def __unicode__(self):
return self.challenge
@classmethod
def remove_expired(cls):
cls.objects.filter(expiration__lte=get_safe_now()).delete()
@classmethod
def generate_key(cls):
challenge, response = captcha_settings.get_challenge()()
store = cls.objects.create(challenge=challenge, response=response)
return store.hashkey
| [
"[email protected]"
] | |
0a2c971002d678357c1f3f68e02ee8d526e1238e | 880b789a2ad03d322768d8f471bdacb0ce205de2 | /main.py | 71247da411af20ffa4ebaaa6ce5b9cbe88925ff2 | [] | no_license | chrisschulz131/NBA_Game_Predictions | 31ef440f0b3cbb7322f69c1f2886aeda9027b39d | 1603370e7c464db57ef16e7dbaf81de81252c0ff | refs/heads/main | 2023-03-02T12:37:22.770605 | 2021-02-16T00:44:37 | 2021-02-16T00:44:37 | 321,150,489 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | """
This class will end up being the script to run the whole project.
"""
import pandas as pd
from sklearn import linear_model
from sklearn.preprocessing import LabelEncoder, OrdinalEncoder
from sklearn.compose import ColumnTransformer
from model.model_class import Model
if __name__ == '__main__':
model = Model('data_scraping/2019-20-stats.csv')
# preprocess will encode the teams and WL columns
model.preprocess()
model.train_model()
| [
"[email protected]"
] | |
15448d4c5289de6aea6418b0528790eadfe49f61 | 429a8441bb9730dcf0e33fedcb5f3672a731b3e7 | /xero_python/accounting/models/history_records.py | 60ca925cf57bc4fd2c2babc1cafa6f50af1e85cb | [
"MIT"
] | permissive | gregsteelxinja/xero-python | 1a26ec3b05ea156dd6848f2ec313c72e9f39b0e2 | d0473ba91099de3464b3dffa377df5a11ad95afc | refs/heads/master | 2022-12-16T10:54:11.424971 | 2020-09-01T01:00:23 | 2020-09-01T01:00:23 | 291,526,551 | 0 | 0 | null | 2020-08-30T18:16:48 | 2020-08-30T18:16:48 | null | UTF-8 | Python | false | false | 1,828 | py | # coding: utf-8
"""
Accounting API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: 2.2.14
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
from xero_python.models import BaseModel
class HistoryRecords(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {"history_records": "list[HistoryRecord]"}
attribute_map = {"history_records": "HistoryRecords"}
def __init__(self, history_records=None): # noqa: E501
"""HistoryRecords - a model defined in OpenAPI""" # noqa: E501
self._history_records = None
self.discriminator = None
if history_records is not None:
self.history_records = history_records
@property
def history_records(self):
"""Gets the history_records of this HistoryRecords. # noqa: E501
:return: The history_records of this HistoryRecords. # noqa: E501
:rtype: list[HistoryRecord]
"""
return self._history_records
@history_records.setter
def history_records(self, history_records):
"""Sets the history_records of this HistoryRecords.
:param history_records: The history_records of this HistoryRecords. # noqa: E501
:type: list[HistoryRecord]
"""
self._history_records = history_records
| [
"[email protected]"
] | |
74e32978392857402ada1f0d62b495f4d755c7e6 | bf867f8da531e0f17a57f9d526de7164207f4ac1 | /Practico-04/Ejercicio-02.py | e250e74e894d7adf92ae0a0a097805cf204187f9 | [] | no_license | nawealvarez/frro-soporte-2018-13 | 4476b57ce241639297709cfc666b0ac7d07d18f2 | 0f7f6518ddcf560a1070fe49d7a95fa8a2c850c9 | refs/heads/master | 2021-04-15T06:02:49.424867 | 2018-10-17T00:53:31 | 2018-10-17T00:53:31 | 126,844,485 | 0 | 0 | null | 2018-07-27T18:44:10 | 2018-03-26T14:50:08 | Python | UTF-8 | Python | false | false | 2,768 | py | from tkinter import *
class Calculator:
def clearall(self):
self.e.delete(0, END)
def action(self, num):
self.e.insert(END, num)
def equal(self):
try:
self.igual = self.e.get()
self.equialidad=eval(self.igual)
self.e.delete(0, END)
self.e.insert(END, self.equialidad)
except SyntaxError or NameError:
self.e.delete(0, END)
self.e.insert(END, 'DATA ERRROR')
def __init__(self, ventana):
ventana.title("Calculadora")
ventana.geometry("300x190")
# Entrada de texto
self.e = Entry(ventana, width=40)
self.e.grid(row=0, column=0, columnspan=4, pady=3)
self.e.focus_set()
ventana = Tk()
objeto = Calculator(ventana)
# Botones numeros
Button(ventana, text="0", width=8, height=2, command=lambda: objeto.action(0)).grid(column=1, row=4)
botUno = Button(ventana, text="1", width=8, height=2, command=lambda: objeto.action(1))
botDos = Button(ventana, text="2", width=8, height=2, command=lambda: objeto.action(2))
botTres = Button(ventana, text="3", width=8, height=2, command=lambda: objeto.action(3))
botCuatro = Button(ventana, text="4", width=8, height=2, command=lambda: objeto.action(4))
botCinco = Button(ventana, text="5", width=8, height=2, command=lambda: objeto.action(5))
botSeis = Button(ventana, text="6", width=8, height=2, command=lambda: objeto.action(6))
botSiete = Button(ventana, text="7", width=8, height=2, command=lambda: objeto.action(7))
botOcho = Button(ventana, text="8", width=8, height=2, command=lambda: objeto.action(8))
botNueve = Button(ventana, text="9", width=8, height=2, command=lambda: objeto.action(9))
# Botones operadores
botSuma = Button(ventana, text="+", width=8, height=2, command=lambda: objeto.action('+'))
botResta = Button(ventana, text="-", width=8, height=2, command=lambda: objeto.action('-'))
botMultiplicacion = Button(ventana, text="x", width=8, height=2, command=lambda: objeto.action('*'))
botDivision = Button(ventana, text="/", width=8, height=2, command=lambda: objeto.action('/'))
botIgual = Button(ventana, text="=", width=8, height=2, command=objeto.equal)
Button(ventana, text="AC", width=8, height=2, command=objeto.clearall).grid(column=0, row=4)
# Aparicion de botones
botSiete.grid(column=0, row=1)
botOcho.grid(column=1, row=1)
botNueve.grid(column=2, row=1)
botSuma.grid(column=3, row=1)
botCuatro.grid(column=0, row=2)
botCinco.grid(column=1, row=2)
botSeis.grid(column=2, row=2)
botResta.grid(column=3, row=2)
botUno.grid(column=0, row=3)
botDos.grid(column=1, row=3)
botTres.grid(column=2, row=3)
botMultiplicacion.grid(column=3, row=3)
botDivision.grid(column=2, row=4)
botIgual.grid(column=3, row=4)
ventana.mainloop()
| [
"[email protected]"
] | |
94db800dce4d743a1eb095d92417e8298f1cbb8e | 45837f5bf44edf6692efbf256ed79c28a805f7ab | /make_plasmid_derep_db.py | 5319e938b6d901af0f9eb23b6617c887c4b977f7 | [] | no_license | JoshDaly/TrackMscripts | 799dcdecb97a70355cb918ca13c0eb5a6540885d | ec53bd3278d5c1e7fc1e8de8d857a61c2e324a8c | refs/heads/master | 2021-01-20T08:48:39.260779 | 2015-06-04T05:09:03 | 2015-06-04T05:09:03 | 31,794,600 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,732 | py | #!/usr/bin/env python
###############################################################################
#
# __make_plasmid_derep_db__.py - Make plasmid db from dereplicated IMG genomes!
#
###############################################################################
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
# #
###############################################################################
__author__ = "Josh Daly"
__copyright__ = "Copyright 2014"
__credits__ = ["Josh Daly"]
__license__ = "GPL3"
__version__ = "0.0.1"
__maintainer__ = "Josh Daly"
__email__ = ""
__status__ = "Development"
###############################################################################
# system imports
import argparse
import sys
import re
import glob
from multiprocessing import Pool
from subprocess import Popen, PIPE
from Bio import SeqIO
from Bio.Seq import Seq
#import os
#import errno
import numpy as np
np.seterr(all='raise')
#import matplotlib as mpl
#import matplotlib.pyplot as plt
#from mpl_toolkits.mplot3d import axes3d, Axes3D
#from pylab import plot,subplot,axis,stem,show,figure
# local imports
import trackm_file_parser as TFP
###############################################################################
###############################################################################
###############################################################################
###############################################################################
class PlasmidDB(object):
def __init__(self, path_file, taxon_file):
self.derep_genomes = {} # list
self.plasmid_genome = {}
self.PD = TFP.PathsFileData(path_file)
self.TD = TFP.TaxonomyData(taxon_file)
def parseMetaData(self, derep_metadata):
with open(derep_metadata) as fh:
for l in fh:
if l[0] != 't':
tabs = l.rstrip().split("\t")
self.derep_genomes[tabs[0]] = 1
def parseIMGGenomes(self, directory, outfile):
IMG_genomes = glob.glob('%s/*/*.fna' % directory)
# initialise output file
if outfile:
out_file = open(outfile,'w')
#### check that it is a genome, and that it is contained within the dereplicated list, then check if it has a plasmid.
for fasta_file in IMG_genomes:
# check if genome
lenny = re.search('[0-9]+.fna',fasta_file)
if lenny:
# check if genome in dereplicated DB
img_genome = fasta_file.rstrip().split('/')[-1][:-4]
try:
carl = self.derep_genomes[img_genome]
fasta_sequences = SeqIO.parse(open(fasta_file),"fasta")
for fasta in fasta_sequences:
if 'plasmid' in fasta.description.lower():
self.plasmid_genome[fasta.description] = fasta.seq
gid = self.PD.img_to_gid[img_genome]
if not self.checkIfUpdatedTaxonomy(gid):
print gid
if outfile:
out_file.write('>%s\n' % fasta.description)
out_file.write('%s\n' % fasta.seq)
except KeyError:
# replicated genome
pass
def checkIfUpdatedTaxonomy(self, gid):
try:
lenny = self.TD.taxon_genus[gid]
return True
except KeyError:
return False
def averagePlasmidGenomeLength(self):
cumulative_length = []
# calculate the total length of plasmid genomes
for plasmid in self.plasmid_genome.keys():
cumulative_length.append(len(self.plasmid_genome[plasmid]))
cumulative_length_np = np.array(cumulative_length)
print "Average dereplicated plasmid genome length %d" % np.average(cumulative_length_np)
print "Standard deviation %d" % np.std(cumulative_length_np)
def buildPlasmidDB(self, directory, derep_metadata, outfile):
# build dict of derep genomes
self.parseMetaData(derep_metadata)
# parse through directory, only grabbing plasmid
# genomes of dereplicated IMG genomes!
self.parseIMGGenomes(directory, outfile)
# print to screen
self.averagePlasmidGenomeLength()
###############################################################################
###############################################################################
###############################################################################
###############################################################################
def runCommand(cmd):
"""Run a command and take care of stdout
expects 'cmd' to be a string like "foo -b ar"
returns (stdout, stderr)
"""
p = Popen(cmd.split(' '), stdout=PIPE)
return p.communicate()
def doWork( args ):
""" Main wrapper"""
PDB = PlasmidDB(args.path_file,
args.taxon_file)
PDB.buildPlasmidDB(args.genome_directory,
args.derep_metadata,
args.outfile)
###############################################################################
###############################################################################
###############################################################################
###############################################################################
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('genome_directory', help="Directory containing dereplicated IMG genomes.")
parser.add_argument('derep_metadata', help="Dereplicated IMG metadata.")
parser.add_argument('path_file', help="File containing paths to img files.")
parser.add_argument('taxon_file', help="File containing updated taxon information.")
parser.add_argument('-o','--outfile', default = False, help="Output file.")
#parser.add_argument('input_file2', help="gut_img_ids")
#parser.add_argument('input_file3', help="oral_img_ids")
#parser.add_argument('input_file4', help="ids_present_gut_and_oral.csv")
#parser.add_argument('output_file', help="output file")
#parser.add_argument('positional_arg3', nargs='+', help="Multiple values")
#parser.add_argument('-X', '--optional_X', action="store_true", default=False, help="flag")
# parse the arguments
args = parser.parse_args()
# do what we came here to do
doWork(args)
###############################################################################
###############################################################################
###############################################################################
###############################################################################
| [
"[email protected]"
] | |
e0bcafb17add192f0c17d976c9987de4a879e0b8 | c5a60e12ee8ccfe3d54af7002ea2415dfe1d465f | /lib/parser/map/google/France.py | 5f1603979ea11edc2b310ef8b75cc51b7d3f97f1 | [] | no_license | ptax/WebCrawler_FR | 3e2e34ca4cde20dd3590a4e9d554e744a62d663f | 3fa69996862b2d921a594a3160f800486d8c2ab8 | refs/heads/master | 2020-12-03T03:52:09.027456 | 2017-09-18T07:23:05 | 2017-09-18T07:23:05 | 95,782,184 | 1 | 1 | null | 2017-08-14T06:48:22 | 2017-06-29T13:46:30 | Python | UTF-8 | Python | false | false | 339 | py | from lib.parser.map.google.GMap import GMap as GMap
class France(GMap):
ADMIN_LEVEL_1 = 'country'
ADMIN_LEVEL_2 = 'administrative_area_level_1'
ADMIN_LEVEL_3 = 'administrative_area_level_2'
ADMIN_LEVEL_4 = 'locality'
ADMIN_LEVEL_5 = 'sublocality'
ADMIN_LEVEL_6 = 'neighborhood'
QUANTITY_OF_ADMIN_LEVELS = 6
| [
"10122202z3223544z"
] | 10122202z3223544z |
18e9eb598fb803d50ba5724cb3c8489254ce1bf4 | 7e3410923fa4dd4ad1427abb23bba56767f7a80e | /cmain.py | 46726a461119cefb782a3a9c42563e4fbc56fdef | [] | no_license | sofken/kyogi | 730cca61943fa4941eb15c76f3613a185459e481 | f0e9de6fdff6fa7f0a8e57e7bee12660964c6e8e | refs/heads/master | 2016-09-06T16:49:04.946823 | 2015-10-11T18:08:36 | 2015-10-11T18:08:36 | 42,280,235 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,466 | py | import math
from copy import deepcopy
import random
import numpy as np
from numba import double
from numba.decorators import jit
import pyximport
pyximport.install()
import cfunc
#stone & stage reading
#data = two stone
#onedata = one stone
#twostage = two stage
#onestage = one stage
data = cfunc.getdata() #stage & stone two data
onedata = []
for i in range(len(data)):
onedata.append(cfunc.getone(data[i])) #stage & stone one data
twostage = data[0]
cfunc.addone(twostage)
del data[0]
onestage = onedata[0]
del onedata[0]
#codes
IF_stage = cfunc.IF_STAGE()
IF_stone = cfunc.IF_STONE()
act = cfunc.actcode()
act.append([[7,7],7,7])
Main_code = []
LEN_N = 5
MAINLEN = 50
for i in range(LEN_N):
Main_code.append(cfunc.maincode(MAINLEN))
cpstage = deepcopy(twostage)
go= 1
ranktable = []
cou = 1
while(go):
cfunc.crossing(Main_code)
cfunc.mutation(Main_code)
go = cfunc.check_stop(Main_code,ranktable,cpstage,data,onedata,IF_stage,IF_stone,act)
cfunc.sortalive(ranktable)
if(go == 0):
#cfunc.check_stop_2([Main_code[ranktable[0][0]]],ranktable,cpstage,data,onedata,IF_stage,IF_stone,act)
break
#cfunc.work_2(cpstage,data,onedata,IF_stage,IF_stone,act,Main_code[ranktable[0][0]])
cfunc.roulette(Main_code,ranktable)
cou += 1
#go = check_stop(Main_code,ranktable,cpstage,data,IF_stage,IF_stone,act)
#viewstage(cpstage)
#result = [Main_code[ranktable[0][0]]]
#cfunc.check_stop_2(result,ranktable,cpstage,data,onedata,IF_stage,IF_stone,act)
| [
"[email protected]"
] | |
694f2ef27c13159bf25f60313acfe0fe794edba8 | 7a41f8a2c9859d4aa46f3418f3c5a927d0319033 | /test_imdb.py | 9d60165c4885746888dd1e21051671bc1eb4d334 | [] | no_license | gffbss/dev-project | 174905ac42c68efadbe1f3795a0ed7c5bb68452a | 2742b3f8ec9748ac008286caa1e7699eefbe5c35 | refs/heads/master | 2021-01-25T03:48:41.442643 | 2014-05-05T20:07:50 | 2014-05-05T20:07:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 644 | py | __author__ = 'geoffreyboss'
# from imdb import IMDb
#
# ia = IMDb()
# matrix = ia.get_movie('0075860')
# print matrix
#
#
#
# def median(alist):
#
# srtd = sorted(alist) # returns a sorted copy
# mid = len(alist)/2 # remember that integer division truncates
#
# if len(alist) % 2 == 0: # take the avg of middle two
# print (srtd[mid-1] + srtd[mid]) / 2.0
# else:
# print srtd[mid]
#
# median([1,2,3,4,5])
def check_for_mode(num_list):
num_counter = 0
for num in num_list:
if num == num_list[num]:
num_counter += 1
print num_counter, num
check_for_mode([1,4,2,4,3,5]) | [
"[email protected]"
] | |
f3d03f4c4adbe888a0b055ed9321fec36439c84f | d07287a13ef315057a9a037739176a24a76b68c7 | /Project/numberPrime.py | 74d4a1b8c6ec63fab82b504010a0a96e6e7b4ef3 | [] | no_license | panyisheng/network-security-technology | e99cc6717dd2e14e3bd0cbcf3ec8d1f745cba87e | 7f444d7725029a5be72bcbaec7d9027daba0152b | refs/heads/master | 2020-03-21T10:35:56.979005 | 2018-06-24T07:30:35 | 2018-06-24T07:30:35 | 138,460,427 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,861 | py | '''
Filename: numberPrime.py
Author: Pan Yisheng
Description: generate big prime and compute rsa key pair
'''
import random
from gcd import *
from primality_tester import *
from random import randrange, getrandbits
import time
# Function to test primality
def NumberIsPrime(n):
if (n==2 or n==3):
return True
if (n<2 or n % 3 == 0):
return False
return prime_test_miller_rabin(n)
#Function to generete number primes
#Generete two different prime number
def GenereteNumberPrime(n_bits):
f = False
while not f:
# Generate random numbers
p = getrandbits(n_bits)
q = getrandbits(n_bits)
if NumberIsPrime(p) and NumberIsPrime(q) and p!=q:
f = True
return p, q
# Function compute e and d
def GetParams(phi_n):
d = 0
e = 65537
f_e = False
'''
while f_e == False:
e = random.randint(1,phi_n-1)
if (gcd(e,phi_n) == 1):
f_e = True
'''
_, d, _ = euclideanExtendedGCD(e,phi_n)
if(d<0):
d+= phi_n
return d, e
def GenParms(n_bits,DEBUG):
p,q = GenereteNumberPrime(n_bits)
n = p * q
phi_n= (p-1) *(q-1)
d, e = GetParams(phi_n)
#while(d<0):
# d+=y
if DEBUG == True:
print("P, Q: ", p, q)
print("N: ", n)
print("Phi(n): ", phi_n)
print("E, D: ", e, d)
return n, d, e
#Debuger function GenParms
def Gen_Parms2(p,q,DEBUG=True):
#p,q = GenereteNumberPrime(n_bits)
n = p * q
#y = carmichael(p, q)
# totiente
phi_n= (p-1) *(q-1)
d, e = GetParams(phi_n)
#while(d<0):
# d+=y
if DEBUG == True:
print("P, Q: ", p, q)
print("N: ", n)
print("Phi(n): ", phi_n)
print("E, D: ", e, d)
return n, d, e
#Debuge function
if __name__ == '__main__':
# Testes
begin = time.time()
GenereteNumberPrime(512)
print("time1:",time.time()-begin)
begin = time.time()
y = randint_bits(512)
print("time2:",time.time()-begin)
#p,q= GenereteNumberPrime(n_bits=3)
print("Numeros primos")
#print(p,q)
| [
"[email protected]"
] | |
64334977d2d5fcc516210f941974a893c1015f0e | bb74ab80ef44f5bd7dc8ac5c7022e6c611f81f9d | /AmazonNet.py | 2de79eb61d17f8c81d8dd51fad5a1a1fc2f25991 | [] | no_license | xiaofeijiGH/Amazon | 710c97f679611db44dfa172bb131220202f8b369 | f538621d58e3b2cfda817bafdaa5455e620ac164 | refs/heads/master | 2020-09-23T13:50:37.810007 | 2019-12-09T04:29:33 | 2019-12-09T04:29:33 | 225,515,081 | 0 | 0 | null | 2019-12-03T02:42:29 | 2019-12-03T02:42:28 | null | UTF-8 | Python | false | false | 2,717 | py | import sys
import torch
import torch.nn as nn
import torch.nn.functional as F
sys.path.append('..')
class AmazonNet(nn.Module):
def __init__(self, game, args):
"""
torch.nn.Conv2d(in_channels, out_channels, kernel_size,
stride=1, padding=0, dilation=1, groups=1, bias=True, padding_mode='zeros')
nn.BatchNorm2d():数据的归一化处理
:param game:
:param args:
"""
# game params
self.board_x, self.board_y = game.get_board_size()
self.action_size = game.get_action_size()
self.args = args
super(AmazonNet, self).__init__()
self.conv1 = nn.Conv2d(1, args.num_channels, 3, stride=1, padding=1)
self.conv2 = nn.Conv2d(args.num_channels, args.num_channels, 3, stride=1, padding=1)
self.conv3 = nn.Conv2d(args.num_channels, args.num_channels, 3, stride=1)
self.conv4 = nn.Conv2d(args.num_channels, args.num_channels, 3, stride=1)
self.bn1 = nn.BatchNorm2d(args.num_channels)
self.bn2 = nn.BatchNorm2d(args.num_channels)
self.bn3 = nn.BatchNorm2d(args.num_channels)
self.bn4 = nn.BatchNorm2d(args.num_channels)
self.fc1 = nn.Linear(args.num_channels * (self.board_x - 4) * (self.board_y - 4), 1024)
self.fc_bn1 = nn.BatchNorm1d(1024)
self.fc2 = nn.Linear(1024, 512)
self.fc_bn2 = nn.BatchNorm1d(512)
# 评估策略
self.fc3 = nn.Linear(512, self.action_size)
# 评估v
self.fc4 = nn.Linear(512, 1)
def forward(self, s):
# s: batch_size x board_x x board_y
s = s.view(-1, 1, self.board_x, self.board_y)
# batch_size * 1 * board_x * board_y
s = F.relu(self.bn1(self.conv1(s)))
# batch_size * num_channels * board_x * board_y
s = F.relu(self.bn2(self.conv2(s)))
# batch_size * num_channels * board_x * board_y
s = F.relu(self.bn3(self.conv3(s)))
# batch_size * num_channels * (board_x-2) * (board_y-2)
s = F.relu(self.bn4(self.conv4(s)))
# batch_size * num_channels * (board_x-4) * (board_y-4)
s = s.view(-1, self.args.num_channels * (self.board_x - 4) * (self.board_y - 4))
# batch_size * 1024
s = F.dropout(F.relu(self.fc_bn1(self.fc1(s))), p=self.args.dropout, training=self.training)
# batch_size * 512
s = F.dropout(F.relu(self.fc_bn2(self.fc2(s))), p=self.args.dropout, training=self.training)
# 3 * batch_size * action_size
pi = self.fc3(s)
# batch_size x 1
v = self.fc4(s)
return F.log_softmax(pi, dim=1), torch.tanh(v)
| [
"[email protected]"
] | |
b438ce5030fd72d02e515c5a45c7fa5c0d9aad10 | 1a48806f2dd5fc156b0485e35156742164e0ef0c | /discwarriors/Wbug.py | da16667ccb2fa011ad9fce7acb966f3caedc3e27 | [] | no_license | MasMat2/Games | 29f4800e606bba984faa7c760575aebaeaf362cc | 48ca7adaa355f2a5d2c40936096dd6cbf02d55ee | refs/heads/master | 2020-06-05T20:21:25.582607 | 2020-04-16T21:01:17 | 2020-04-16T21:01:17 | 192,537,013 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,130 | py | #Disc Warriors
#
# Wbug.py
# Class for World based bugs.
#
# This is, so far, not a useable
# class. This is currently just a
# test to get the basics of the
# class to work.
#
import Dfile, pygame, functions
from pygame.locals import *
from functions import load_image
class bug(pygame.sprite.Sprite):
def __init__(self,path,ID):
pygame.sprite.Sprite.__init__(self)
self.dfile="dfiles/"+path
self.name=Dfile.getName(self.dfile)
self.type=Dfile.getType(self.dfile)
self.group="Bug"
self.x=int(Dfile.getAtr(self.dfile,"W","Startx"))
self.y=int(Dfile.getAtr(self.dfile,"W","Starty"))
self.id=ID
self.off=(-17,-32)
self.sprite=load_image("chars/bugs/teron/teron_D.png")
self.selected=False
self.health=100
def update(self,events,sur,of1):
self.draw(sur,of1)
return self.y
def draw(self,surf,of2):
tempx=self.x+self.off[0]-of2[0]
tempy=self.y+self.off[1]-of2[1]
surf.blit(self.sprite, (tempx,tempy))
if self.selected:
ox=self.x-of2[0]
oy=self.y+self.off[1]-of2[1]
pygame.draw.circle(surf,(0,0,0),(ox,oy),2)
del tempx
del tempy | [
"[email protected]"
] | |
68e561ffc920fb139d5b7a78738b824ef4517510 | 3893d026b8380f4a87cc727020c735d753e9abae | /core/session/local.py | 8930f2d13de4caf96078174c6f67beda6715bc0f | [
"Apache-2.0"
] | permissive | reinforcement-learning-fun/TradzQAI | 4e46d4a79fe1bd3930f528943ab2aa428cc96d14 | 746c4900359cfc69db26fcf3d178827f2b0947a2 | refs/heads/master | 2020-03-30T17:26:00.346150 | 2018-10-03T05:59:59 | 2018-10-03T05:59:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,596 | py | from core import Local_Worker
from core import Local_env
from threading import Thread
from tools import *
import time
class Local_session(Thread):
def __init__(self, mode="train", gui=0, contract_type="classic", config='config/', db=None, agent="PPO"):
self.db = db
if not "/" in config[len(config)-1]:
raise ValueError("You forget \"/\" at the end, it should be {}/".format(config))
self.env = Local_env(mode=mode, gui=gui, contract_type=contract_type, config=config, agent=agent)
self.config = config
self.agent = None
self.worker = Local_Worker
Thread.__init__(self)
def stop(self):
self.env.close()
def getWorker(self):
return self.worker
def getEnv(self):
return self.env
def getAgent(self):
return self.agent
def setAgent(self, agent=None, device=None):
if agent:
self.env.model_name = agent
if self.env.model_name in self.env.agents:
import warnings
with warnings.catch_warnings():
warnings.filterwarnings("ignore",category=FutureWarning)
self.agent = getattr(__import__('agents'), self.env.model_name)
self.device = device
else:
raise ValueError('could not import %s' %self. env.model_name)
def loadSession(self):
if not self.env.stop:
self.initAgent()
self.initWorker()
else:
print (red("Warning : ")+"You cannot load the session without setting,\
any data directory in %s/environnement" % self.config)
def initAgent(self):
if not self.agent:
self.setAgent()
for classe in self.agent.__mro__:
if ("tensorforce" and self.agent.__name__) in str(classe):
self.agent = self.agent(env=self.env, device=self.device)._get()
return
self.agent = self.agent(env=self.env, device=self.device)
def initWorker(self):
self.worker = self.worker(env=self.env, agent=self.agent)
def run(self):
if not self.agent:
raise ValueError("add an agent and load the session before running")
elif not self.env.stop:
self.env.logger.start()
if self.env.gui == 0:
Thread(target=self.worker.run).start()
else:
self.worker.start()
else:
print (red("Warning : ")+"You cannot start the session without setting,\
any data directory in %s/environnement" % self.config)
| [
"[email protected]"
] | |
0ee9d779bf7bc462a321478cadbff65f48d1fc79 | cfb9e78029e539caa0230d1903f287ce9bad204a | /manage.py | 98492c81a9ab920ef6cfe01a5015ed9249d4665c | [
"MIT"
] | permissive | ulyssesv/bflask | ec787587ff2daf09b2320a3ce27d116697aed61f | 0d1b477a9bbf31097ef2f5a81962ecdd625265a5 | refs/heads/master | 2020-02-29T21:30:21.007569 | 2016-06-07T07:58:46 | 2016-06-07T07:58:46 | 60,293,397 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,620 | py | from bflask import create_app, db
from bflask.models import Agency, Route, Stop
from bflask.nextbus import NextBus
from flask_migrate import MigrateCommand, Migrate
from flask_script import Manager, Server
from sqlalchemy import func
from sqlalchemy.orm import load_only
app = create_app()
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
manager.add_command("runserver", Server())
@manager.command
def load():
"""Load NextBus API entries."""
# TODO: Deal with IntegrityError from the unique constraint or add logic to synchronize the data.
print("Loading NextBus entries")
nb = NextBus()
# Load agencies.
print("Loading agencies... ", end="", flush=True)
r = nb.agency_list()
agencies = []
for agency in r['body']['agency']:
# TODO: Refactor the Stop model to remove this lock. The Stop.tag and stop.external_id are
# only unique when associated to a route/agency.
if agency['@tag'] == 'sf-muni':
agencies.append(Agency(tag=agency['@tag'], title=agency['@title']))
db.session.bulk_save_objects(agencies)
print("done.")
# Load routes.
print("Loading routes... ", end="", flush=True)
agencies = Agency.query.all()
routes = []
for agency in agencies:
r = nb.route_list(agency.tag)
for route in r['body']['route']:
routes.append(Route(agency_id=agency.id, tag=route['@tag'], title=route['@title']))
db.session.bulk_save_objects(routes)
print("done.")
# Load stops.
print("Loading stops... ", end="", flush=True)
agencies = db.session.query(Agency, func.count(Route.id)).join(Agency.routes).group_by(Agency.id).all()
stops = []
# Caches the routes to avoid querying in the loop to add relationship instances.
routes = Route.query.options(load_only('id', 'tag', 'agency_id')).all()
route_cache = {'{}:{}'.format(i.agency_id, i.tag): i for i in routes}
def _add_stop():
# Helper method to add a stop using outer scope variables.
stop_id_arg = {'external_id': stop['@stopId']} if '@stopId' in stop.keys() else {}
return Stop(
tag=stop['@tag'],
title=stop['@title'],
latitude=stop['@lat'],
longitude=stop['@lon'],
**stop_id_arg
)
for (agency, route_count) in agencies:
if route_count < NextBus.MAX_ROUTES_PER_ROUTE_CONFIG:
# The API returns up to 100 routes for an agency if the route_tag parameter is supressed.
r = nb.route_config(agency_tag=agency.tag)
for route in r['body']['route']:
for stop in route['stop']:
s = _add_stop()
s.routes.append(route_cache['{}:{}'.format(agency.id, route['@tag'])])
stops.append(s)
else:
# An error is returned if an agency has more than 100 routes. The only option is to query one by one since
# we have no pagination or batch request.
routes = agency.routes.all()
for route in routes:
r = nb.route_config(agency_tag=agency.tag, route_tag=route.tag)
for stop in r['body']['route'][0]['stop']:
s = _add_stop()
s.routes.append(route_cache['{}:{}'.format(agency.id, route.tag)])
stops.append(s)
db.session.bulk_save_objects(stops)
print("done.")
print("Committing to database...", end="", flush=True)
db.session.commit()
print("done.")
if __name__ == '__main__':
manager.run()
| [
"[email protected]"
] | |
2c30c1cec07249f205a3aeedf8fc289bd1576571 | fe2cbdb8859bfc541ef3b2b0b8f08b4400fdb81a | /Customer_segmentation/KMeans.py | 26b5790837b4fdb4bb7f3386e9807fc4f8f9a6ef | [] | no_license | SanaAmruth/MyML | 64173f81eba6c044c017e79aaa67bebc08f91ee2 | 93118602cd15e7b459675c3356fbd6a61a424022 | refs/heads/master | 2023-07-14T06:30:31.610501 | 2021-08-27T11:34:53 | 2021-08-27T11:34:53 | 285,191,251 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,779 | py | #!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
import numpy as np
import seaborn as sns
import matplotlib.pyplot as plt
from sklearn.preprocessing import StandardScaler, normalize ,MinMaxScaler
from sklearn.cluster import KMeans
from sklearn.decomposition import PCA
# In[2]:
creditcard_df= pd.read_csv('/Users/sana/Desktop/Academics/Projects/Customer_Segmentation/Credit_card_data.csv')
# In[3]:
creditcard_df
# In[4]:
type(creditcard_df)
creditcard_df.info()
# In[5]:
creditcard_df.describe()
# In[6]:
print(creditcard_df.isnull().sum())
# In[7]:
creditcard_df.loc[(creditcard_df['MINIMUM_PAYMENTS'].isnull() == True), 'MINIMUM_PAYMENTS'] = creditcard_df['MINIMUM_PAYMENTS'].mean()
creditcard_df.isnull().sum()
# In[8]:
creditcard_df.loc[(creditcard_df['CREDIT_LIMIT'].isnull() == True),'CREDIT_LIMIT']=creditcard_df['CREDIT_LIMIT'].mean()
# In[9]:
creditcard_df.isnull().sum()
# In[10]:
creditcard_df.isnull().sum().sum()
# In[11]:
creditcard_df.drop('CUST_ID',axis=1,inplace=True)
# In[12]:
creditcard_df
# In[13]:
n = len(creditcard_df.columns)
n
# In[14]:
creditcard_df.columns
# In[15]:
correlations=creditcard_df.corr()
sns.heatmap(correlations,annot=True)
# In[16]:
scaler=StandardScaler()
creditcard_df_scaled=scaler.fit_transform(creditcard_df)
# In[17]:
type(creditcard_df_scaled)
# In[18]:
creditcard_df_scaled
# In[19]:
cost=[]
range_values=range(1,20)
for i in range_values:
kmeans=KMeans(i)
kmeans.fit(creditcard_df_scaled)
cost.append(kmeans.inertia_)
plt.plot(cost)
# In[20]:
kmeans = KMeans(7)
kmeans.fit(creditcard_df_scaled)#find the nearest clusters for given data
labels = kmeans.labels_
labels
# In[21]:
kmeans.cluster_centers_.shape
# In[22]:
cluster_centers=pd.DataFrame(data=kmeans.cluster_centers_,columns=[creditcard_df.columns])
cluster_centers
# In[23]:
# scaler=StandardScaler()
# cluster_centers=scaler.inverse_transform(cluster_centers)
# cluster_centers=pd.DataFrame(data=cluster_centers,columns=[creditcard_df_scaled.columns])
# cluster_centers
# In[24]:
labels.shape
# In[25]:
labels.max()
# In[26]:
labels.min()
# In[27]:
credit_df_cluster=pd.concat([creditcard_df,pd.DataFrame(({'cluster':labels}))],axis=1)
credit_df_cluster
# In[28]:
pca=PCA(n_components=2)
principal_comp=pca.fit_transform(creditcard_df_scaled)
# In[29]:
pca_df=pd.DataFrame(data=principal_comp,columns=['pca1','pca2'])
pca_df
# In[30]:
pca_df=pd.concat([pca_df,pd.DataFrame({'Cluster':labels})],axis=1)
pca_df
# In[31]:
plt.figure(figsize=(10,10))
ax=sns.scatterplot(x='pca1',y='pca2',hue='Cluster',data=pca_df,palette=['yellow','red','blue','pink','orange','black','purple'])
plt.show()
# In[ ]:
| [
"[email protected]"
] | |
d716689682146fff0d52e853e9d9fd20cde66ec4 | 4bf82a8be06a1c39fc8cd5536a72839ea18f6b03 | /sensors/APDS-9960/python/gesture-oled.py | 9da39f9eefa5f1d9d659fe2440a5cae3545e2d72 | [
"MIT"
] | permissive | AnaviTechnology/anavi-examples | 5571a29bd61452a1ea855dc0c16a3aa5a317657b | 2725b21d2b5f0d0a0a40cec90f7edd7b4f2b9a86 | refs/heads/master | 2023-08-31T20:18:30.849894 | 2023-08-24T15:52:52 | 2023-08-24T15:52:52 | 63,726,355 | 34 | 18 | MIT | 2023-08-24T09:36:18 | 2016-07-19T20:36:36 | C++ | UTF-8 | Python | false | false | 2,127 | py | import os
import sys
import signal
import smbus
import socket
from time import sleep
from apds9960.const import *
from apds9960 import APDS9960
from luma.core.interface.serial import i2c
from luma.core.render import canvas
from luma.oled.device import ssd1306, ssd1325, ssd1331, sh1106
from luma.core.error import DeviceNotFoundError
from PIL import ImageFont, ImageDraw
def signal_handler(sig, frame):
# Clear the OLED display
serial = i2c(port=1, address=0x3C)
device = ssd1306(serial, rotate=0)
print("\nApplication termiated with Ctrl+C.")
os._exit(0)
def draw_text(display, line1, line2, line3):
with canvas(display) as draw:
draw.rectangle(device.bounding_box, outline="white", fill="black")
font = ImageFont.truetype(drawfont, 10)
draw.text((5, 5), line1, fill="white", font=font)
font = ImageFont.truetype(drawfont, 16)
draw.text((5, 20), line2, fill="white", font=font)
draw.text((5, 42), line3, fill="white", font=font)
dirs = {
APDS9960_DIR_NONE: "none",
APDS9960_DIR_LEFT: "left",
APDS9960_DIR_RIGHT: "right",
APDS9960_DIR_UP: "up",
APDS9960_DIR_DOWN: "down",
APDS9960_DIR_NEAR: "near",
APDS9960_DIR_FAR: "far",
}
try:
signal.signal(signal.SIGINT, signal_handler)
drawfont = "pixelmix.ttf"
serial = i2c(port=1, address=0x3C)
device = ssd1306(serial, rotate=0)
port = 1
bus = smbus.SMBus(port)
apds = APDS9960(bus)
apds.setProximityIntLowThreshold(50)
print("APDS9960 Gesture Test")
print("============")
apds.enableGestureSensor()
draw_text(device, "APDS-9960", "Show me a", "gesture!")
while True:
sleep(0.5)
if apds.isGestureAvailable():
motion = apds.readGesture()
print("Gesture={}".format(dirs.get(motion, "unknown")))
draw_text(device, "APDS-9960", "Gesture", "{}".format(dirs.get(motion, "unknown")))
except OSError as error:
if 121 == error.errno:
print('No sensor found')
elif None == error.errno:
print('Error. Is the ttf font file available?')
else:
print('Error:', sys.exc_info()[0])
os._exit(1)
except:
print("Unexpected error:", sys.exc_info()[0])
os._exit(2)
| [
"[email protected]"
] | |
a41f99fb7ffa728e0744d0a8e53589f34b2a1f53 | 0f68624a2019ff7492dd0dc0b8bc55495a2e950d | /playLA/LinearSystem.py | f64a6ca6612ff20925f82afb659fc0b0d52333f2 | [] | no_license | DaoLinZhou/learning-linear-algebra | 3fd6fd83428c0a5354f40cb44d3f69d43a118971 | 413fbdebacf1ffbb625b1cdb9cbb29e10300a416 | refs/heads/master | 2020-09-23T17:08:13.489545 | 2020-03-05T03:28:22 | 2020-03-05T03:28:22 | 225,546,128 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,058 | py | from playLA._global import is_zero
from .Matrix import Matrix
from .Vector import Vector
class LinearSystem:
def __init__(self, A: Matrix, b=None):
assert b is None or A.row_num() == len(b), "row number of A must be equal to the length of b"
self._m = A.row_num()
self._n = A.col_num()
if b is None:
self.Ab = [A.row_vector(i) for i in range(self._m)]
if isinstance(b, Vector):
self.Ab = [Vector(A.row_vector(i).underlying_list() + [b[i]])
for i in range(self._m)]
if isinstance(b, Matrix):
self.Ab = [Vector(A.row_vector(i).underlying_list() + b.row_vector(i).underlying_list())
for i in range(self._m)]
self.pivots = []
def gauss_jordan_elimination(self):
"""如果有解, 返回True; 无解, 返回False"""
self._forward()
self._backward()
for i in range(len(self.pivots), self._m):
if not is_zero(self.Ab[i][-1]):
return False
return True
def fancy_print(self):
for i in range(self._m):
print(" ".join(str(self.Ab[i][j]) for j in range(self._n)), end=" ")
print("|", self.Ab[i][-1])
def _max_row(self, index_i, index_j, n):
"""寻找从index行到第n行, index位置元素绝对值最大的行号"""
best, ret = self.Ab[index_i][index_j], index_i
for i in range(index_i+1, n):
if abs(self.Ab[i][index_j]) > best:
best, ret = self.Ab[i][index_j], i
return ret
def _forward(self):
i, k = 0, 0
while i < self._m and k < self._n:
# 看Ab[i][k] 位置是否可以是主元
max_row = self._max_row(i, k, self._m)
self.Ab[i], self.Ab[max_row] = self.Ab[max_row], self.Ab[i]
if is_zero(self.Ab[i][k]):
k += 1
else:
# 将主元归一
self.Ab[i] = self.Ab[i] / self.Ab[i][k]
for j in range(i + 1, self._m):
self.Ab[j] = self.Ab[j] - self.Ab[j][k] * self.Ab[i]
self.pivots.append(k)
i += 1
def _backward(self):
"""n为最后一个非零行行号, pivots有多少个元素就有多少个非零行"""
n = len(self.pivots)
for i in range(n - 1, -1, -1):
k = self.pivots[i]
# Ab[i][k] 为主元
for j in range(i - 1, -1, -1):
self.Ab[j] = self.Ab[j] - self.Ab[j][k] * self.Ab[i]
def inv(A: Matrix):
if A.row_num() != A.col_num():
return None
n = A.row_num()
ls = LinearSystem(A, Matrix.identity(n))
if not ls.gauss_jordan_elimination():
return None
invA = [[row[i] for i in range(n, 2*n)] for row in ls.Ab]
return Matrix(invA)
def rank(A: Matrix):
"""求解矩阵A的秩"""
ls = LinearSystem(A)
ls.gauss_jordan_elimination()
zero = Vector.zero(A.col_num())
return sum([row != zero for row in ls.Ab])
| [
"[email protected]"
] | |
561c2730ade460b8bf541c0fffa15de9760225ec | 50a99ade25d8f2edd12dbf2d2514edf27bc41f02 | /text_seed_DCN.py | aff2c25cc477d7a5b98949c16e47775d54aa21b5 | [] | no_license | KEAML-JLU/DeepTextClustering | e8bdd86e4119ecfef44cbcb1e4f1aa320bf2c824 | b84d28cbe9db02e51b3f5fa9c6e31e540b3ad219 | refs/heads/main | 2023-01-21T07:19:49.716295 | 2020-12-06T01:50:59 | 2020-12-06T01:50:59 | 318,919,906 | 6 | 1 | null | null | null | null | UTF-8 | Python | false | false | 12,208 | py | import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from sklearn.cluster import KMeans
from torch.autograd import Variable
from utils import cluster_acc, load_seeds_dict, align_labels
class seed_DCN(object):
def __init__(self,
n_clusters,
net,
hidden_dim,
lr=0.001,
tol=0.001,
batch_size=256,
max_epochs=100,
recons_lam=1,
cluster_lam=0.5,
use_cuda=torch.cuda.is_available(),
verbose=True):
self.n_clusters = n_clusters
self.hidden_dim = hidden_dim
self.lr = lr
self.batch_size = batch_size
self.tol = tol
self.max_epochs = max_epochs
self.recons_lam = recons_lam
self.cluster_lam = cluster_lam
self.use_cuda = use_cuda
self.verbose = verbose
self.net = net
assert isinstance(self.net, nn.Module)
self.centers = None
@staticmethod
def get_mask(seeds_dict, data_size):
mask = np.zeros(data_size, dtype=np.float32)
for _, ids in seeds_dict.items():
for i in ids:
mask[i] = 1
return mask
@staticmethod
def get_seed_labels(seeds_dict, data_size):
labels = np.zeros(data_size, dtype=np.int64)
# labels.fill(-1)
for l, ids in seeds_dict.items():
for i in ids:
labels[i] = l
return labels
def fit(self, feat, seeds_dict, labels=None):
assert len(seeds_dict) <= self.n_clusters
feat = feat.astype(np.float32)
batch_size = self.batch_size
data_size = feat.shape[0]
count = {i: 0 for i in range(self.n_clusters)}
seed_masks = self.get_mask(seeds_dict, data_size)
seed_labels = self.get_seed_labels(seeds_dict, data_size)
hidden_feat = self.get_hidden_features(feat, self.net, self.hidden_dim, batch_size=self.batch_size, use_cuda=self.use_cuda)
if True:
seed_centers = self.get_seed_centers(n_clusters, seeds_dict, hidden_feat)
else:
seed_centers = None
# idx, centers = self.init_cluster(hidden_feat, n_clusters=self.n_clusters)
idx, centers = self.init_cluster(hidden_feat, n_clusters=self.n_clusters, init_centers=seed_centers)
last_pred = idx[:]
if labels is not None:
acc = cluster_acc(labels, idx)
print('KMeans pretraining acc is {}'.format(acc))
for i in range(data_size):
if seed_masks[i] == 1:
idx[i] = seed_labels[i]
if False:
# align
tmp_seed_labels = seed_labels[seed_masks.astype(np.bool)]
tmp_idx = np.array(idx)[seed_masks.astype(np.bool)]
tmp_mapping = align_labels(tmp_seed_labels, tmp_idx)
tmp_idx = [tmp_mapping[i] for i in idx]
tmp_range = [tmp_mapping[i] for i in range(self.n_clusters)]
tmp_centers = centers[np.array(tmp_range)]
centers = tmp_centers
idx = tmp_idx
if labels is not None:
idx = np.array(idx)
print(idx.size)
print(labels.size)
acc = cluster_acc(labels, idx)
print('KMeans pretraining acc is {}'.format(acc))
###########################3
# optimizer = optim.Adam(self.net.parameters(), lr=self.lr)
# optimizer = optim.ASGD(self.net.parameters(), lr=self.lr)
optimizer = optim.SGD(self.net.parameters(), lr=self.lr, momentum=0.9)
for epoch in range(self.max_epochs):
for index in range(0, data_size, batch_size):
feat_batch = Variable(torch.from_numpy(feat[index: index+batch_size]))
idx_batch = idx[index: index+batch_size]
mask_batch = Variable(torch.from_numpy(seed_masks[index: index+batch_size]))
seeds_labels_batch = seed_labels[index: index+batch_size]
centers_batch = Variable(torch.from_numpy(centers[idx_batch]))
seeds_centers_batch = Variable(torch.from_numpy(centers[seeds_labels_batch]))
if self.use_cuda:
feat_batch = feat_batch.cuda()
centers_batch = centers_batch.cuda()
mask_batch = mask_batch.cuda()
seeds_centers_batch = seeds_centers_batch.cuda()
optimizer.zero_grad()
hidden_batch, output_batch = self.net(feat_batch)
recons_loss = F.mse_loss(output_batch, feat_batch)
cluster_loss = F.mse_loss(hidden_batch, centers_batch)
seed_loss = torch.mean(mask_batch * torch.norm(hidden_batch - seeds_centers_batch, p=2, dim=1))
# loss = self.recons_lam * recons_loss + self.cluster_lam * cluster_loss + seed_loss
loss = self.recons_lam * recons_loss + self.cluster_lam * cluster_loss
loss.backward()
optimizer.step()
hidden_batch2, _ = self.net(feat_batch)
hidden_batch2 = hidden_batch2.cpu().data.numpy()
# tmp_idx_batch, centers, count = self.batch_km(hidden_batch2, centers, count)
tmp_idx_batch, centers, count = self.batch_km_seed(hidden_batch2, centers, count, mask_batch.cpu().data.numpy(), seeds_labels_batch)
idx[index: index+batch_size] = tmp_idx_batch
hidden_feat = self.get_hidden_features(feat, self.net, self.hidden_dim, batch_size=self.batch_size, use_cuda=self.use_cuda)
idx, centers = self.init_cluster(hidden_feat, n_clusters=self.n_clusters, init_centers=centers)
acc = None
if labels is not None:
acc = cluster_acc(labels, idx)
if self.verbose:
print('Epoch {} end, current acc is {}'.format(epoch + 1, acc))
if self.whether_convergence(last_pred, idx, self.tol):
print('End Iter')
break
else:
last_pred = idx[:]
self.centenrs = centers
def predict(self, feat):
hidden_feat = self.get_hidden_features(feat, self.net, self.hidden_dim, batch_size=self.batch_size, use_cuda=self.use_cuda)
distances = np.linalg.norm(hidden_feat[:,np.newaxis] - self.centers[np.newaxis, :], axis=-1)
pred = np.argmin(distances, axis=-1)
return pred
@staticmethod
def get_hidden_features(feat, net, hidden_dim, batch_size=256, use_cuda=torch.cuda.is_available()):
feat = feat.astype(np.float32)
data_size = feat.shape[0]
hidden_feat = np.zeros((data_size, hidden_dim))
for index in range(0, data_size, batch_size):
data_batch = feat[index: index + batch_size]
data_batch = Variable(torch.from_numpy(data_batch))
if use_cuda:
data_batch = data_batch.cuda()
hidden_batch, _ = net(data_batch)
hidden_batch = hidden_batch.data.cpu().numpy()
hidden_feat[index: index+batch_size] = hidden_batch
return hidden_feat
@staticmethod
def get_seed_centers(n_clusters, seeds_dict, feat):
feature_size = feat.shape[1]
centers = np.zeros((n_clusters, feature_size))
for l in seeds_dict.keys():
tmp_seeds = np.array(seeds_dict[l])
tmp_feat = feat[tmp_seeds]
tmp_center = tmp_feat.mean(axis=0)
centers[l] = tmp_center
return centers
@staticmethod
def init_cluster(feat, n_clusters, init_centers=None):
init_centers = 'k-means++' if init_centers is None else init_centers
kmeans = KMeans(n_clusters=n_clusters, init=init_centers, n_init=20)
idx = kmeans.fit_predict(feat)
centers = kmeans.cluster_centers_
centers = centers.astype(np.float32)
return idx, centers
@staticmethod
def batch_km(data, centers, count):
# data[:, np.newaxis] is a data_size * 1 * feat_size array
# centers[np.newaxis, :] is a 1 * center_size * feat_size array
distances = np.linalg.norm(data[:, np.newaxis] - centers[np.newaxis, :], axis=-1)
tmp_idx = np.argmin(distances, axis=-1)
N = tmp_idx.shape[0]
for i in range(N):
c = tmp_idx[i]
count[c] += 1
eta = 1. / count[c]
centers[c] = (1 - eta) * centers[c] + eta * data[c]
return tmp_idx, centers, count
@staticmethod
def batch_km_seed(data, centers, count, mask, seed_labels):
# data[:, np.newaxis] is a data_size * 1 * feat_size array
# centers[np.newaxis, :] is a 1 * center_size * feat_size array
distances = np.linalg.norm(data[:, np.newaxis] - centers[np.newaxis, :], axis=-1)
tmp_idx = np.argmin(distances, axis=-1)
for i in range(len(mask)):
if mask[i] == 1:
tmp_idx[i] == seed_labels[i]
N = tmp_idx.shape[0]
for i in range(N):
c = tmp_idx[i]
count[c] += 1
eta = 1. / count[c]
centers[c] = (1 - eta) * centers[c] + eta * data[c]
return tmp_idx, centers, count
@staticmethod
def whether_convergence(last_pred, current_pred, tol):
delta = np.sum(last_pred != current_pred) / float(len(current_pred))
return delta < tol
if __name__ == '__main__':
from utils import load_feat, initialize_environment
from SDAE import extract_sdae_model
from config import cfg, get_output_dir
import os
def get_args():
import argparse
parser = argparse.ArgumentParser(description='Deep Text Cluster Model')
parser.add_argument('--data_dir', type=str, default='data/dbpedia/', help='directory of dataset')
parser.add_argument('--n_clusters', type=int, default=14, help='cluster number')
parser.add_argument('--seed', type=int, default=cfg.RNG_SEED, help='random seed')
parser.add_argument('--tol', type=float, default=0.001, help='tolerance')
parser.add_argument('--lr', type=float, default=0.001, help='learning rate')
parser.add_argument('--recons_lam', type=float, default=1, help='reconstruction loss regularization coefficient')
parser.add_argument('--cluster_lam', type=float, default=0.5, help='cluster loss regularization coefficient')
parser.add_argument('--batch_size', type=int, default=256, help='batch size')
parser.add_argument('--max_epochs', type=int, default=100, help='max epochs')
parser.add_argument('--verbose', help='whether to print log', action='store_true')
args = parser.parse_args()
return args
args = get_args()
# n_clusters = 4
# data_dir = 'data/ag_news/'
data_dir = args.data_dir
n_clusters = args.n_clusters
use_cuda = torch.cuda.is_available()
random_seed = args.seed
recons_lam = args.recons_lam
cluster_lam = args.cluster_lam
batch_size = args.batch_size
tol = args.tol
lr = args.lr
initialize_environment(random_seed=random_seed, use_cuda=use_cuda)
feat_path = os.path.join(data_dir, cfg.TRAIN_TEXT_FEAT_FILE_NAME)
feat, labels, ids = load_feat(feat_path)
outputdir = get_output_dir(data_dir)
net_filename = os.path.join(outputdir, cfg.PRETRAINED_FAE_FILENAME)
checkpoint = torch.load(net_filename)
net = extract_sdae_model(input_dim=cfg.INPUT_DIM, hidden_dims=cfg.HIDDEN_DIMS)
net.load_state_dict(checkpoint['state_dict'])
if use_cuda:
net.cuda()
seed_path = os.path.join(data_dir, cfg.SEED_FILE_NAME)
seeds_dict = load_seeds_dict(seed_path)
dcn = seed_DCN(n_clusters,
net,
cfg.HIDDEN_DIMS[-1],
lr=lr,
tol=tol,
batch_size=batch_size,
recons_lam=recons_lam,
cluster_lam=cluster_lam,
use_cuda=use_cuda,
verbose=True)
dcn.fit(feat, seeds_dict, labels=labels)
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.