id
stringlengths 1
8
| text
stringlengths 6
1.05M
| dataset_id
stringclasses 1
value |
---|---|---|
9655535
|
<gh_stars>0
#!/usr/bin/env python
"""This file contains various utility classes used by GRR data stores."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
import collections
import os
import re
import stat
from grr_response_core.lib import rdfvalue
from grr_response_core.lib import utils
def ConvertStringToFilename(name):
"""Converts an unicode string to a filesystem safe filename.
For maximum compatibility we escape all chars which are not alphanumeric (in
the unicode sense).
Args:
name: a unicode string that is part of a subject.
Returns:
A safe filename with escaped special chars.
"""
return re.sub(
r"\W", lambda x: "%%%02X" % ord(x.group(0)), name,
flags=re.UNICODE).rstrip("/")
def Components(subject):
if not isinstance(subject, rdfvalue.RDFURN):
subject = rdfvalue.RDFURN(subject)
return subject.Split()
def ResolveSubjectDestination(subject, regexes):
"""Returns the directory/filename where the subject will be stored.
Args:
subject: The subject.
regexes: The list of regular expressions by priority.
Returns:
File name and directory.
"""
components = Components(subject)
if not components:
# No components to work with.
return "aff4", ""
# Make all the components safe to use.
path = utils.JoinPath(*[ConvertStringToFilename(x) for x in components])
for route in regexes:
m = route.match(path)
if m:
value = m.group("path")
if value:
base = os.path.basename(value)
dirname = os.path.dirname(value)
return base, dirname
# Default value if nothing else matches.
return "aff4", ""
def MakeDestinationKey(directory, filename):
"""Creates a name that identifies a database file."""
return utils.SmartStr(utils.JoinPath(directory, filename)).lstrip("/")
def DatabaseDirectorySize(root_path, extension):
"""Compute size (in bytes) and number of files of a file-based data store."""
directories = collections.deque([root_path])
total_size = 0
total_files = 0
while directories:
directory = directories.popleft()
try:
items = os.listdir(directory)
except OSError:
continue
for comp in items:
path = os.path.join(directory, comp)
try:
statinfo = os.lstat(path)
if stat.S_ISLNK(statinfo.st_mode):
continue
if stat.S_ISDIR(statinfo.st_mode):
directories.append(path)
elif stat.S_ISREG(statinfo.st_mode):
if comp.endswith(extension):
total_size += statinfo.st_size
total_files += 1
except OSError:
continue
return total_size, total_files
|
StarcoderdataPython
|
9647643
|
# -*- coding: utf8 -*-
import unittest
import os
import sys
import mock
# Change path so we find sdk
sys.path.insert(1, os.path.join(sys.path[0], '..'))
from sdk.api.resource import *
class TestResourcesAPI(unittest.TestCase):
__TEST_TOKEN = "test"
__USER_AGENT = "SDK v2"
__API_CLIENT = "6918a2e6-86e8-4be3-9800-e658dd37e760"
__TEST_HEADERS = {
"Authorization": "bearer {0}".format(__TEST_TOKEN),
"User-Agent": __USER_AGENT,
"X-API-CLIENT": __API_CLIENT
}
def mocked_requests_get(*args, **kwargs):
class MockResponse:
def __init__(self, json_data, status_code):
self.json_data = json_data
self.status_code = status_code
self.raise_for_status = None
def json(self):
if self.status_code != 200:
raise Exception
return self.json_data
return MockResponse({'msg': 'Not found'}, 200)
def setUp(self):
self.bot_ips = Resource(base_url='https://api.blueliv.com',
name='botips',
token=self.__TEST_TOKEN,
http_timeout=60)
self.crimeservers = Resource(base_url='https://api.blueliv.com',
name='crimeservers',
token=self.__TEST_TOKEN,
http_timeout=60)
self.malwares = Resource(base_url='https://api.blueliv.com',
name='malwares',
token=self.__TEST_TOKEN,
http_timeout=60)
self.hacktivism_ops = Resource(base_url='https://api.blueliv.com',
name='hacktivism_ops',
token=self.__TEST_TOKEN,
http_timeout=60)
self.hacktivism_country = Resource(base_url='https://api.blueliv.com',
name='hacktivism_country',
token=self.__TEST_TOKEN,
http_timeout=60)
def test_token_headers(self):
self.assertEqual(self.bot_ips.headers, self.__TEST_HEADERS)
@mock.patch('requests.get', side_effect=mocked_requests_get)
def test_bot_ips_feed(self, mock_get):
self.bot_ips.recent('non-pos')
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/ip/recent?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
self.bot_ips.last('non-pos')
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/ip/last?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
@mock.patch('requests.get', side_effect=mocked_requests_get)
def test_crimeservers_feed(self, mock_get):
self.crimeservers.recent()
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/crimeserver/recent?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
self.crimeservers.last()
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/crimeserver/last?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
self.crimeservers.online()
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/crimeserver/online?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
@mock.patch('requests.get', side_effect=mocked_requests_get)
def test_bot_ips_pos_feed(self, mock_get):
self.bot_ips.recent(feed_type='pos')
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/ip/pos/recent?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
self.bot_ips.last(feed_type='pos')
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/ip/pos/last?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
@mock.patch('requests.get', side_effect=mocked_requests_get)
def test_bot_ips_full_feed(self, mock_get):
self.bot_ips.recent(feed_type='pos')
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/ip/pos/recent?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
self.bot_ips.last(feed_type='pos')
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/ip/pos/last?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
@mock.patch('requests.get', side_effect=mocked_requests_get)
def test_malwares__feed(self, mock_get):
self.malwares.recent()
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/malware/recent?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
self.malwares.last()
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/malware/last?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
@mock.patch('requests.get', side_effect=mocked_requests_get)
def test_hacktivism__feed(self, mock_get):
self.hacktivism_ops.recent()
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/hacktivism/ops/recent?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
self.hacktivism_country.last()
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/hacktivism/country/last?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
self.hacktivism_ops.current()
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/hacktivism/ops/current?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
def test_non_existant_feed(self):
self.assertRaises(InvalidResource, self.bot_ips.recent, ('p0s'))
self.assertRaises(InvalidResource, self.bot_ips.last, ('p0s'))
self.assertRaises(InvalidResource, self.crimeservers.recent, ('xx'))
self.assertRaises(InvalidResource, self.crimeservers.last, ('xx'))
@mock.patch('requests.get', side_effect=mocked_requests_get)
def test_debug_endpoint(self, mock_get):
self.bot_ips.test()
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/ip/test?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
self.crimeservers.test()
self.assertIn(mock.call(self.bot_ips.base_url + '/v1/crimeserver/test?key={0}'.format(self.__API_CLIENT),
headers=self.__TEST_HEADERS,
proxies=None,
timeout=60,
verify=True), mock_get.call_args_list)
if __name__ == '__main__':
suite = unittest.TestLoader().loadTestsFromTestCase(TestResourcesAPI)
unittest.TextTestRunner(verbosity=2).run(suite)
|
StarcoderdataPython
|
6684663
|
<gh_stars>0
"""
Django settings for test_opa project.
For more information on this file, see
https://docs.djangoproject.com/en/1.7/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.7/ref/settings/
"""
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.7/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '<KEY>'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
TEMPLATE_DEBUG = True
ALLOWED_HOSTS = []
LOGIN_REDIRECT_URL = 'payments:dash_payments'
LOGIN_URL = 'login'
LOGOUT_URL = 'logout'
# Application definition
INSTALLED_APPS = (
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'catalog',
'paints',
'sales',
'payments',
'django_celery_beat',
'django_celery_results',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
ROOT_URLCONF = 'conf.urls'
WSGI_APPLICATION = 'conf.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.7/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Internationalization
# https://docs.djangoproject.com/en/1.7/topics/i18n/
LANGUAGE_CODE = 'pt-br'
TIME_ZONE = 'America/Fortaleza'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.7/howto/static-files/
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS as DEFAULT_TEMPLATE_CONTEXT_PROCESSORS
TEMPLATE_CONTEXT_PROCESSORS = DEFAULT_TEMPLATE_CONTEXT_PROCESSORS + (
'django.core.context_processors.request',
)
TEMPLATE_DIRS = (
os.path.join(BASE_DIR, 'templates'),
)
STATICFILES_DIRS = (
os.path.join(BASE_DIR, 'static'),
)
MEDIA_ROOT = os.path.join(BASE_DIR,'media')
MEDIA_URL = '/media/'
STATIC_ROOT = os.path.join(BASE_DIR,'static_files')
STATIC_URL = '/static/'
BROKER_URL = 'redis://localhost:6379/0'
CELERY_ACCEPT_CONTENT = ['json']
CELERY_TASK_SERIALIZER = 'json'
CELERY_RESULT_SERIALIZER = 'json'
|
StarcoderdataPython
|
6418887
|
<gh_stars>0
# -*- coding:utf-8 -*-
import xmensur
men1 = xmensur.Men()
men2 = xmensur.Men(1)
men11 = men1
print(men1 == men2)
print(men1 == men11)
|
StarcoderdataPython
|
5152758
|
<reponame>pawan3091/pawan
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
def RK2(func, X0,tmin,tmax,h,cons):
N=int((tmax-tmin)/h)
t = np.linspace(tmin,tmax,N)
X = np.zeros([N, len(X0)])
X[0] = X0
for i in range(N-1):
k1 =h* func(t[i],X[i],cons)
k2 = h*func( t[i] + h,X[i] + k1,cons)
X[i+1] = X[i] + (k1 +k2 )/2
return X,t
def f2(t,X,cons):
k,m,b=cons
x,z=X
dx_dt=z
dx2_dt2=-b*z/m-k*x/m
return np.array([dx_dt, dx2_dt2])
def f3(t,X,cons):
g,L=cons
th,z=X
dth_dt=z
dth2_dt2=-g*th/L
return np.array([dth_dt, dth2_dt2])
def f4(t,X,cons):
k,m,g,L=cons
w0=np.sqrt(g/L)
xa,z1,xb,z2=X
xb=np.radians(xb)
xa=np.radians(xa)
e1=z1
e2=-(w0**2)*xa-(k/m)*(xa-xb)
e3=z2
e4=-(w0**2)*xb+(k/m)*(xa-xb)
return np.array([e1,e2,e3,e4])
#3a
IC=[2,0];tmin=0;m=0.5;k=4;h=0.01;b=0#SI units
tp=2*np.pi*np.sqrt(m/k)
tmax=5*tp
cons=(k,m,b)
S=RK2(f2,IC,tmin,tmax,h,cons)
x,v= S[0].T
t=S[1]
t1=t/tp
fig, axs = plt.subplots(2)
fig.suptitle('SIMPLE HARMONIC OSCILLATOR',c="r")
axs[0].plot(t1,x,marker="*",c="orange")
axs[0].set(xlabel="time/time period ",ylabel="Dispacement")
axs[0].grid()
axs[1].plot(t1,v,marker="*",c="cyan")
axs[1].set(xlabel=" Time/Time Period ",ylabel="Velocity")
axs[1].grid()
plt.show()
print("----------------------------- SIMPLE HARMONIC OSCILLATOR---------------------------------")
d={"No. of time periods":t1,"Displacement":x,"Velocity":v}
print(pd.DataFrame(d))
#3b https://scipy-lectures.org/intro/scipy/auto_examples/plot_odeint_damped_spring_mass.html (ref)
IC=[1,0];tmin=0;h=0.1;m=0.5;k=4 #SI units
tmax=100
dis=[];vel=[];time=[]
ba=[0.2,np.sqrt(4*m*k),3]
for b in ba:
cons=(m,k,b)
S=RK2(f2,IC,tmin,tmax,h,cons)
x,v= S[0].T
t=S[1]
dis.append(x)
vel.append(v)
fig, ax = plt.subplots(3,2)
fig.suptitle('DAMPED HARMONIC OSCILLATOR',c="r")
ax[0,0].plot(t,dis[0],marker="*",c="r")
ax[0,0].set(xlabel="time ",ylabel="Dispacement",title="Underdamped")
ax[0,0].grid()
ax[0,1].plot(t,vel[0],marker="*",c="green")
ax[0,1].set(xlabel="time ",ylabel="Velocity",title="Underdamped")
ax[0,1].grid()
ax[1,0].plot(t,dis[1],marker="*",c="purple")
ax[1,0].set(xlabel="time ",ylabel="Dispacement",title="Critically Damped")
ax[1,0].grid()
ax[1,1].plot(t,vel[1],marker="*",c="darkblue")
ax[1,1].set(xlabel="time ",ylabel="Velocity",title="Critically Damped")
ax[1,1].grid()
ax[2,0].plot(t,dis[2],marker="*",c="brown")
ax[2,0].set(xlabel="time ",ylabel="Dispacement",title="Overdamped")
ax[2,0].grid()
ax[2,1].plot(t,vel[2],marker="*",c="violet")
ax[2,1].set(xlabel="time ",ylabel="Velocity",title="Overdamped")
ax[2,1].grid()
plt.show()
print("---------------------- DAMPED HARMONIC OSCILLATOR (UNDERDAMPED)-----------------------")
d={"Time":t,"Displacement":dis[0],"Velocity":vel[0]}
print(pd.DataFrame(d))
print("---------------------- DAMPED HARMONIC OSCILLATOR (CRITICALLY DAMPED)-----------------------")
d={"Time":t,"Displacement":dis[1],"Velocity":vel[1]}
print(pd.DataFrame(d))
print("---------------------- DAMPED HARMONIC OSCILLATOR (OVERDAMPED)-----------------------")
d={"Time":t,"Displacement":dis[2],"Velocity":vel[2]}
print(pd.DataFrame(d))
#3c
IC=[1,0];tmin=0;g=9.8;L=2 #SI units
cons1=(g,L)
tp=2*np.pi*np.sqrt(L/g)
tmax=10*tp
h=tp/50
S=RK2(f3,IC,tmin,tmax,h,cons1)
x,v= S[0].T
t=S[1]
t1=t/tp
fig, axs = plt.subplots(2)
fig.suptitle('SIMPLE PENDULUM',c="r")
axs[0].plot(t1,x,marker="*",c="r")
axs[0].set(xlabel="Time/Time Period ",ylabel="Angular Dispacement")
axs[0].grid()
axs[1].plot(t1,v,marker="*",c="green")
axs[1].set(xlabel=" Time/Time Period ",ylabel="Angular Velocity")
axs[1].grid()
plt.show()
print("------------------------------ SIMPLE PENDULUM------------------------------")
d={"No. of time periods":t,"Angular Displacement":x,"Angular Velocity":v}
print(pd.DataFrame(d))
#3d
IC=[10,0,-10,0];tmax=80;tmin=0;h=0.01;m1=10;k1=90;g1=9.8;l1=10 #SI units
cons1=(k1,m1,g1,l1)
S=RK2(f4,IC,tmin,tmax,h,cons1)
x1,v1,x2,v2= S[0].T
t3=S[1]
fig, axs = plt.subplots(2, 2)
fig.suptitle('COUPLED SYSTEM',c="r")
axs[0, 0].plot(t3,x1,c="r")
axs[0,0].set(xlabel=" Time",ylabel="Displacement",title="Mass A")
axs[0,0].grid()
axs[0, 1].plot(t3,v1,c="green")
axs[0,1].set(xlabel=" Time",ylabel="Velocity",title="Mass A")
axs[0,1].grid()
axs[1, 0].plot(t3,x2,c="magenta")
axs[1,0].set(xlabel=" Time",ylabel="Displacement",title="Mass B")
axs[1,0].grid()
axs[1, 1].plot(t3,v2,c="green")
axs[1,1].set(xlabel=" Time",ylabel="Velocity",title="Mass B")
axs[1,1].grid()
plt.show()
print("---------------------- COUPLED PENDULUM-----------------------")
print("-------------------------------- MASS A ---------------------------------")
d={"Time":t3,"Displacement":x1,"Velocity":v1}
print(pd.DataFrame(d))
print("-------------------------------- MASS B ---------------------------------")
d={"Time":t3,"Displacement":x2,"Velocity":v2}
print(pd.DataFrame(d))
|
StarcoderdataPython
|
6684512
|
"""CLI command to run shell commands on a Lambda Function."""
import json
import os
import subprocess
from pprint import pformat
from typing import Any, Dict, List, Optional
import typer
from boto3 import Session
from .exceptions import LambdaInvocationFailed, ShellCommandFailed, UnexpectedResponse
from .helpers import get_aws_account_information
def run_fargate_shell(
aws_account: Dict[str, Any], shell_args: Optional[List[str]]
) -> None:
"""
Run a shell command in an ECS container through aws ecs exec-command.
`stdout` and and `stderr` from the ran command are printed locally
to stdout. The output comes from the AWS CLI `exec-command`.
A `ShellCommandFailed` exception is raised if it is not possible
to execute the command.
The return code of the command is not captured. The output needs to
be parsed in order to detect if the command executed properly or not.
"""
ecs_client = Session(
aws_access_key_id=aws_account["credentials"]["aws_access_key_id"],
aws_secret_access_key=aws_account["credentials"]["aws_secret_access_key"],
aws_session_token=aws_account["credentials"]["aws_session_token"],
region_name=aws_account["credentials"]["region_name"],
).client("ecs")
task_arn = ecs_client.list_tasks(
cluster=aws_account["ecs_cluster"],
serviceName=aws_account["worker_service"] or aws_account["web_service"],
)["taskArns"][0]
process = subprocess.Popen(
[
"aws",
"ecs",
"execute-command",
"--cluster",
aws_account["ecs_cluster"],
"--task",
task_arn,
"--container",
"WebContainer",
"--interactive",
"--command",
" ".join(shell_args),
],
env={
**os.environ,
**{
"AWS_ACCESS_KEY_ID": aws_account["credentials"]["aws_access_key_id"],
"AWS_SECRET_ACCESS_KEY": aws_account["credentials"][
"aws_secret_access_key"
],
"AWS_SESSION_TOKEN": aws_account["credentials"]["aws_session_token"],
"AWS_REGION": aws_account["credentials"]["region_name"],
},
},
stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
)
success = True
for line in iter(lambda: process.stdout.readline(), b""):
if b"----------ERROR-------" in line:
success = False
typer.echo(line.rstrip())
if not success:
raise ShellCommandFailed("Shell command failed")
def run_lambda_shell(
aws_account: Dict[str, Any], shell_args: Optional[List[str]], log_result: bool
) -> None:
"""
Run a shell command in the Tasks lambda function.
`stdout` and and `stderr` from the ran command are printed locally
in their corresponding stream.
The python process exits with the same return code from the command.
If the lambda function fails to execute or it is not possible to execute
the shell command, an exception is raised.
"""
lambda_client = Session(
aws_access_key_id=aws_account["credentials"]["aws_access_key_id"],
aws_secret_access_key=aws_account["credentials"]["aws_secret_access_key"],
aws_session_token=aws_account["credentials"]["aws_session_token"],
region_name=aws_account["credentials"]["region_name"],
).client("lambda")
payload = {
"args": shell_args,
"log_result": log_result,
"handler_path": "pd_aws_lambda.handlers.shell.handler",
}
typer.echo("Invoking Lambda function")
response = lambda_client.invoke(
FunctionName=aws_account["tasks_function"],
Payload=json.dumps(payload).encode(),
)
if response["StatusCode"] != 200:
raise LambdaInvocationFailed(
"Lambda execution failed", response.get("FunctionError")
)
result = json.loads(response["Payload"].read().decode())
if not isinstance(result, dict):
raise UnexpectedResponse(result)
if "FunctionError" in response:
typer.echo(pformat(result["errorMessage"]), err=True)
raise ShellCommandFailed("Shell command failed", result["errorType"])
if result["stdout"]:
typer.echo(result["stdout"].strip("\n"))
if result["stderr"]:
typer.echo(result["stderr"].strip("\n"), err=True)
exit(result["returncode"])
def run_command(
shell_args: Optional[List[str]] = typer.Argument(None, help="Command to execute."),
log_result: bool = typer.Option(
False,
help="Log the results into AWS CloudWatch. (Only for Lambda applications)",
),
app_id: str = typer.Option(
os.environ.get("PD_APP_ID"),
help="PythonDeploy application id. Default: environment variable PD_APP_ID",
),
api_key: str = typer.Option(
os.environ.get("PD_API_KEY"),
help="PythonDeploy api key. Default: environment variable PD_API_KEY",
),
) -> None:
"""
Execute a remote commands in your application.
---
For Fargate applications, run a shell command in an ECS container through
`aws ecs exec-command`.
`stdout` and and `stderr` from the ran command are printed locally
to stdout. The output comes from the AWS CLI `exec-command`.
A `ShellCommandFailed` exception is raised if it is not possible
to execute the command.
The return code of the command is not captured. The output needs to
be parsed in order to detect if the command executed properly or not.
---
For Lambda applications, run a shell command in the Tasks lambda function.
`stdout` and and `stderr` from the ran command are printed locally
in their corresponding stream.
The python process exits with the same return code from the command.
If the lambda function fails to execute or it is not possible to execute
the shell command, an exception is raised.
"""
aws_account = get_aws_account_information(app_id, api_key)
if aws_account["manager"] == "LambdaFunctionManager":
run_lambda_shell(aws_account, shell_args, log_result)
return
if aws_account["manager"] == "FargateFunctionManager":
run_fargate_shell(aws_account, shell_args)
return
|
StarcoderdataPython
|
279424
|
<gh_stars>0
# Generated by Django 3.1.7 on 2021-03-20 17:56
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('integration', '0004_auto_20210320_1207'),
]
operations = [
migrations.CreateModel(
name='Broker',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('id_broker', models.IntegerField(blank=True, null=True)),
('operadora', models.CharField(blank=True, choices=[('OI', 'OI'), ('CLARO', 'CLARO'), ('VIVO', 'VICO'), ('TIM', 'TIM'), ('NEXTEL', 'NEXTEL')], max_length=15, null=True)),
],
),
]
|
StarcoderdataPython
|
6573226
|
<filename>pyGPs/GraphExtensions/__init__.py<gh_stars>100-1000
from __future__ import absolute_import
from . import graphKernels
from . import graphUtil
from . import nodeKernels
|
StarcoderdataPython
|
8156029
|
from django.shortcuts import render
from django.core.paginator import Paginator
from django.shortcuts import render, redirect
from django.http import HttpResponse
from django.http import JsonResponse
from django.core.paginator import Paginator
from django.db import connection
from django.contrib.auth import authenticate, login
from django.contrib.auth.models import User
from .forms import UsersLoginForm, UsersRegisterForm
from .forms import UsersRegisterForm
from .forms import AddCommunity
from .forms import AddPosttype
from .forms import SendPrimitives
from .forms import AddTextEntry, AddTextEntryEnum, AddTagPost, AddTextPost, AddTextAreaPost, AddImagePost, AddAudioPost, AddVideoPost, AddBooleanPost, AddEmailPost, AddIpAddressPost, AddUrlPost, AddDatePost, AddTimePost, AddDateTimePost, AddIntegerPost, AddDecimalPost, AddFloatPost, AddEnumaratedPost, AddLocationPost, textComment, ReportPost, EditCommunity
from .forms import AddTextEntry, AddTextEntryEnum, AddTagSearch, AddTextSearch, AddTextAreaSearch, AddImageSearch, AddAudioSearch, AddVideoSearch, AddBooleanSearch, AddEmailSearch, AddIpAddressSearch, AddUrlSearch, AddDateSearch, AddTimeSearch, AddDateTimeSearch, AddIntegerSearch, AddDecimalSearch, AddFloatSearch, AddEnumaratedSearch, AddLocationSearch
from .forms import posttypeList, searchList, freeSearchField, EditUser
from django.contrib.auth import logout
from django.http import HttpResponseRedirect
from django.http import JsonResponse
from django.template.loader import render_to_string, get_template
from django.template import RequestContext
from django.core.files.storage import FileSystemStorage
from django.conf import settings
import json
import requests
import uuid
import hashlib
from datetime import datetime
from streampage.models import Primitives,communityUsers,Communities,Datatypes,DatatypeFields,PostsMetaHash,Posts,PostComments,CommunityTags,DatatTypeTags,PostTags,UserTags,ActivityStreams,ReportedPosts,UserCircle
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import Q
from countryinfo import CountryInfo
from unicode_tr import unicode_tr
def saveTagSearch_view(src):
SEARCHPAGE = src
PARAMS = {
"action":"wbsearchentities",
"format": "json",
"limit": "50",
"language":"en",
"search": SEARCHPAGE
}
Srch = requests.Session()
URL = "https://wikidata.org/w/api.php"
Res = Srch.get(url=URL, params=PARAMS)
DATA = Res.json()['search']
titles = ""
items = ""
for tt in DATA:
titles = titles + tt['label']+","
items = items + tt['id']+","
return {'TITLE' : titles, "ITEM" : items}
def saveTag_view(returneditems):
looping = returneditems.replace("#",",").split(",")
titles=""
items=""
for iter in looping:
if iter != '':
resp=saveTagSearch_view(iter)
try:
titles = titles + resp["TITLE"]
items = items + resp["ITEM"]
except:
print("!")
print({'TITLE' : titles, "ITEM" : items})
return {'TITLE' : titles, "ITEM" : items}
def LoginPage(request):
return render(request, 'login.html', {'community_resp': 'test'})
def index(request):
if request.user.is_authenticated:
user = request.user
userModel = communityUsers.objects.filter(nickName=user)[0]
userphoto = userModel.userPhoto
PosttypeList = Datatypes.objects.filter(subscribers=userModel)
followingList = []
subscriptionList = []
for i in PosttypeList:
subscriptionList.append(i.name)
if len(UserCircle.objects.filter(circleOwner=userModel)) > 0:
for i in UserCircle.objects.get(circleOwner=userModel).circleUsers.all():
followingList.append(i.nickName)
queriesUser = [Q(detail__actor__name=following) for following in followingList]
queries = queriesUser + [Q(detail__object__name=posttypename) for posttypename in subscriptionList]
else:
queries = [Q(detail__object__name=posttypename) for posttypename in subscriptionList]
if len(queries) > 0:
query = queries.pop()
for item in queries:
query |= item
activityDetailList = ActivityStreams.objects.filter(query).order_by('-id')
paginator = Paginator(activityDetailList, 10)
page = request.GET.get('page')
index_resp = paginator.get_page(page)
return render(request, 'index.html', {'activities': activityDetailList, 'index_resp': index_resp, 'userPhoto':userphoto})
else:
return render(request, 'index.html', {})
else:
return HttpResponseRedirect("/streampage/login")
def browsePage(request):
if Communities.objects.all():
Community_List = Communities.objects.filter(communityPrv=False).order_by('-communityCreationDate')
paginator = Paginator(Community_List, 3)
page = request.GET.get('page')
community_resp = paginator.get_page(page)
return render(request, 'browse.html', {'community_resp': community_resp})
else:
return render(request, 'login.html', {})
def PosttypePageBrowse(request):
try:
CommunityHash = request.GET.get('showDataTypes')
Community_List = Communities.objects.filter(communityHash=CommunityHash)
currentCommunity = Community_List[0]
postEntries={}
c = connection.cursor()
postHashQuery='select "entryHash" from streampage_posts where "relatedCommunityforPost_id" ='+str(currentCommunity.id)+' group by "entryHash"'
c.execute(postHashQuery)
posts=c.fetchall()
postInstance=[]
for hashes in posts:
currentObject={}
postInfo = PostsMetaHash.objects.filter(postMetaHash=hashes[0])[0]
currentObject['postList']=Posts.objects.filter(entryHash=hashes[0])
currentObject['posttype']=Posts.objects.filter(entryHash=hashes[0])[0].relatedDatatypes.datatypefields_set.all()
currentObject['comments']=postInfo.postcomments_set.all().order_by('-id')
postInstance.append(currentObject)
postEntries['postInstances']=postInstance
print(postEntries)
paginator = Paginator(posts, 5)
page = request.GET.get('page')
post_resp = paginator.get_page(page)
comment=textComment()
return render(request, 'browseDatatypes.html', {'postEntries':postEntries, 'comment': comment, 'post_resp': post_resp, 'community_Hash':CommunityHash, 'community':Community_List[0]})
except:
return HttpResponseRedirect("/streampage/login")
def showPostDetailsBrowse_view(request):
try:
EntryHash = request.GET.get('postHash')
queryPost = Posts.objects.filter(entryHash=EntryHash)
currentPost = queryPost[0]
relatedCommunity = currentPost.relatedCommunityforPost
relatedPosttype = currentPost.relatedDatatypes
postEntries={}
postInstance=[]
currentObject={}
postInfo = PostsMetaHash.objects.filter(postMetaHash=EntryHash)[0]
currentObject['postList']=Posts.objects.filter(entryHash=EntryHash)
currentObject['posttype']=Posts.objects.filter(entryHash=EntryHash)[0].relatedDatatypes.datatypefields_set.all()
currentObject['comments']=postInfo.postcomments_set.all().order_by('-id')
postInstance.append(currentObject)
postEntries['postInstances']=postInstance
comment=textComment()
return render(request, 'postDetailsBrowse.html', {'postEntries':postEntries, 'comment': comment, 'community':relatedCommunity, 'posttype': relatedPosttype })
except:
return HttpResponseRedirect("/streampage/login")
def communityPage(request):
if request.user.is_authenticated:
if Communities.objects.all():
Community_List = Communities.objects.all().order_by('-communityCreationDate')
Cuser = request.user
UserList = communityUsers.objects.filter(nickName=Cuser)[0]
userphoto = UserList.userPhoto
User_communities = UserList.members.all().order_by('-id')
paginator = Paginator(Community_List, 3)
page = request.GET.get('page')
community_resp = paginator.get_page(page)
return render(request, 'community.html', {'community_resp': community_resp, 'User_communities': User_communities, 'userPhoto':userphoto})
else:
return render(request, 'community.html', {})
else:
return HttpResponseRedirect("/streampage/login")
def communityForm(request):
form = AddCommunity()
return render(request, 'modal.html', {'form': form})
def populateProvince(request):
country = request.GET.__getitem__("country")
provinceList = []
if(str(country) == "Turkey"):
provinceList = ['Adana', 'Adıyaman', 'Afyon', 'Ağrı', 'Aksaray', 'Amasya', 'Ankara', 'Antalya', 'Ardahan', 'Artvin', 'Aydın', 'Balıkesir', 'Bartın', 'Batman',
'Bayburt', 'Bilecik', 'Bingöl', 'Bitlis', 'Bolu', 'Burdur', 'Bursa', 'Çanakkale', 'Çankırı', 'Çorum', 'Denizli', 'Diyarbakır', 'Düzce', 'Edirne',
'Elazığ', 'Erzincan', 'Erzurum', 'Eskişehir', 'Gaziantep', 'Giresun', 'Gümüşhane', 'Hakkari', 'Hatay', 'Içel', 'Iğdır', 'Isparta', 'İstanbul',
'İzmir', 'Kahramanmaraş', 'Karabük', 'Karaman', 'Kars', 'Kastamonu', 'Kayseri', 'Kilis', 'Kırıkkale', 'Kırklareli', 'Kırşehir', 'Kocaeli', 'Konya',
'Kütahya', 'Malatya', 'Manisa', 'Mardin', 'Muğla', 'Muş', 'Nevşehir', 'Niğde', 'Ordu', 'Osmaniye', 'Rize', 'Sakarya', 'Samsun', 'Şanlıurfa', 'Siirt',
'Sinop', 'Şırnak', 'Sivas', 'Tekirdağ', 'Tokat', 'Trabzon', 'Tunceli', 'Uşak', 'Van', 'Yalova', 'Yozgat', 'Zonguldak']
else:
try:
if CountryInfo(str(country)).provinces() != None:
for province in CountryInfo(str(country)).provinces():
provinceList.append(province)
except:
print("exception")
return JsonResponse({'provinceList': provinceList})
def handle_uploaded_file(f):
filepath = 'streampage/static/uploads/communities/'+f.name
with open(filepath, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
return "/"+filepath.split("/")[1]+"/"+filepath.split("/")[2]+"/"+filepath.split("/")[3]+"/"+filepath.split("/")[4]+"/"
def CreateCommunity_view(request):
form = AddCommunity(request.POST, request.FILES)
c_image=request.FILES.get("Community_Image")
image_path=handle_uploaded_file(c_image)
comm = Communities()
comm.name = request.POST.get("Community_Name")
comm.description = request.POST.get("Community_Description")
salt = uuid.uuid4().hex
commhash = hashlib.sha256(salt.encode() + comm.name.encode()).hexdigest() + salt
comm.communityHash = commhash
if request.POST.get("Private_Community"):
comm.communityPrv = True
else:
comm.communityPrv = False
comm.communityPhoto = image_path
comm.communityCountry = request.POST.get("Community_Country")
comm.communityLocation = request.POST.get("Community_Location")
comm.communityTags = request.POST.get("Community_Tags")
comm.communityCreationDate = datetime.now()
comm.communityCreator = communityUsers.objects.get(nickName=request.user)
comm.save()
comm.communityMembers.add(communityUsers.objects.get(nickName=request.user))
comm.save()
Tags = saveTag_view(request.POST.get("Community_Tags"))
tagentry = CommunityTags()
relatedComm = Communities.objects.filter(communityHash=commhash)[0]
tagentry.communityTag = relatedComm
tagentry.tagName = Tags["TITLE"]
tagentry.tagItem = Tags["ITEM"]
tagentry.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "created",
"published": str(comm.communityCreationDate),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "Community",
"name": comm.name,
"hash": comm.communityHash
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form' : "Community is created Successfully!"})
def EditCommunityModal_view(request):
form = EditCommunity()
return render(request, 'modal.html', {'form': form})
def EditCommunity_view(request):
try:
form = EditCommunity(request.POST, request.FILES)
c_image=request.FILES.get("Community_Image")
image_path=handle_uploaded_file(c_image)
communityHash = request.POST.get("community_Hash")
comm = Communities.objects.filter(communityHash=communityHash)[0]
comm.description = request.POST.get("Community_Description")
if request.POST.get("Private_Community"):
comm.communityPrv = True
else:
comm.communityPrv = False
comm.communityPhoto = image_path
comm.communityTags = request.POST.get("Community_Tags")
comm.communityCreationDate = datetime.now()
comm.communityCreator = communityUsers.objects.get(nickName=request.user)
comm.save()
comm.communityMembers.add(communityUsers.objects.get(nickName=request.user))
comm.save()
Tags = saveTag_view(request.POST.get("Community_Tags"))
tagentry = CommunityTags()
relatedComm = comm
tagentry.communityTag = relatedComm
tagentry.tagName = Tags["TITLE"]
tagentry.tagItem = Tags["ITEM"]
tagentry.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "edited",
"published": str(comm.communityCreationDate),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "Community",
"name": comm.name,
"hash": comm.communityHash
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form' : "Community is Edited Successfully!"})
except:
return render(None, 'tagSearch.html', {'form' : "Community cannot be Edited Successfully!"})
def JoinCommunity_view(request):
user = request.user
userModel = communityUsers.objects.filter(nickName=user)[0]
Comm = Communities.objects.get(communityHash=request.POST.get("community_Hash"))
Comm.communityMembers.add(userModel)
Comm.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "joined",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "Community",
"name": Comm.name,
"hash": Comm.communityHash
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form': "You joined to Community Successfully!"})
def LeaveCommunity_view(request):
user = request.user
userModel = communityUsers.objects.filter(nickName=user)[0]
Comm = Communities.objects.get(communityHash=request.POST.get("community_Hash"))
Comm.communityMembers.remove(userModel)
Comm.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "left",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "Community",
"name": Comm.name,
"hash": Comm.communityHash
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form': "You left from Community successfully!"})
def CheckMembership_view(request):
user = request.user
userModel = communityUsers.objects.filter(nickName=user)[0]
if Communities.objects.filter(communityMembers=userModel,communityHash=request.POST.get("community_Hash")):
return render(None, 'tagSearch.html', {'form': "Yes"})
else:
return render(None, 'tagSearch.html', {'form': "No"})
def VoteCommunity_view(request):
user = request.user
userModel = communityUsers.objects.filter(nickName=user)[0]
Comm = Communities.objects.get(communityHash=request.POST.get("community_Hash"))
Comm.communityPopularity.add(userModel)
return render(request, 'tagSearch.html', {'form': form})
def DeleteCommunity_view(request):
user = request.user
userModel = communityUsers.objects.filter(nickName=user)[0]
Comm = Communities.objects.get(communityHash=request.POST.get("community_Hash"))
name = Comm.name
comCreator = Comm.communityCreator
if str(user) == str(comCreator):
try:
Comm.delete()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "deleted",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "Community",
"name": name,
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form': name+" Community has been Deleted Successfully !"})
except:
return render(None, 'tagSearch.html', {'form': name+" Community cannot be Deleted!"})
def posttypeForm(request):
form = AddPosttype()
return render(request, 'modal.html', {'form': form})
def searchTag_view(request):
txtSRC = request.GET.get('search_text')
SEARCHPAGE = txtSRC
PARAMS = {
"action":"wbsearchentities",
"format": "json",
"limit": "50",
"language":"en",
"search": SEARCHPAGE
}
Srch = requests.Session()
URL = "https://wikidata.org/w/api.php"
Res = Srch.get(url=URL, params=PARAMS)
DATA = Res.json()['search']
titles=""
for tt in DATA:
if tt['label'] not in titles:
titles+="#"+tt['label']
return render(None, 'tagSearch.html', {'form' : titles})
#TODO
def PosttypePageBCK(request):
if request.user.is_authenticated:
CommunityHash = request.GET.get('showDataTypes')
Community_List = Communities.objects.filter(communityHash=CommunityHash)
c = connection.cursor()
execution_string = 'select "entryHash",json_object_agg("propertyName","propertyValue") from (select "entryHash","propertyName","propertyValue" from streampage_posts) S GROUP BY "entryHash"'
c.execute(execution_string)
posts=c.fetchall()
paginator = Paginator(posts, 5)
page = request.GET.get('page')
post_resp = paginator.get_page(page)
comment=textComment()
return render(request, 'datatypes.html', {'comment': comment, 'post_resp': post_resp, 'community_Hash':CommunityHash, 'community':Community_List[0]})
else:
return HttpResponseRedirect("/streampage/login")
def PosttypePage(request):
if request.user.is_authenticated:
CommunityHash = request.GET.get('showDataTypes')
Community_List = Communities.objects.filter(communityHash=CommunityHash)
User = communityUsers.objects.filter(nickName=request.user)[0]
userphoto = User.userPhoto
currentCommunity = Community_List[0]
postEntries={}
c = connection.cursor()
postHashQuery='select "entryHash", "postCreationDate" from streampage_posts where "relatedCommunityforPost_id" ='+str(currentCommunity.id)+' group by "entryHash","postCreationDate" order by "postCreationDate" desc '
c.execute(postHashQuery)
posts=c.fetchall()
hashList=[]
for tuples in posts:
hashList.append(tuples[0])
cleanPosts = list(dict.fromkeys(hashList))
postInstance=[]
for hashes in cleanPosts:
currentObject={}
postInfo = PostsMetaHash.objects.filter(postMetaHash=hashes)[0]
currentObject['postList']=Posts.objects.filter(entryHash=hashes).order_by('-id')
currentObject['posttype']=Posts.objects.filter(entryHash=hashes)[0].relatedDatatypes.datatypefields_set.all().order_by('-id')
currentObject['comments']=postInfo.postcomments_set.all().order_by('-id')
postInstance.append(currentObject)
postEntries['postInstances']=postInstance
print(postEntries)
paginator = Paginator(posts, 5)
page = request.GET.get('page')
post_resp = paginator.get_page(page)
comment=textComment()
return render(request, 'datatypes.html', {'postEntries':postEntries, 'comment': comment, 'post_resp': post_resp, 'community_Hash':CommunityHash, 'community':Community_List[0], 'userPhoto':userphoto})
else:
return HttpResponseRedirect("/streampage/login")
def showPostDetails_view(request):
if request.user.is_authenticated:
EntryHash = request.GET.get('postHash')
User = communityUsers.objects.filter(nickName=request.user)[0]
userphoto = User.userPhoto
queryPost = Posts.objects.filter(entryHash=EntryHash)
currentPost = queryPost[0]
relatedCommunity = currentPost.relatedCommunityforPost
relatedPosttype = currentPost.relatedDatatypes
postEntries={}
postInstance=[]
currentObject={}
postInfo = PostsMetaHash.objects.filter(postMetaHash=EntryHash)[0]
currentObject['postList']=Posts.objects.filter(entryHash=EntryHash)
currentObject['posttype']=Posts.objects.filter(entryHash=EntryHash)[0].relatedDatatypes.datatypefields_set.all()
currentObject['comments']=postInfo.postcomments_set.all().order_by('-id')
postInstance.append(currentObject)
postEntries['postInstances']=postInstance
comment=textComment()
return render(request, 'postDetails.html', {'postEntries':postEntries, 'comment': comment, 'community':relatedCommunity, 'posttype': relatedPosttype, 'userPhoto':userphoto })
else:
return HttpResponseRedirect("/streampage/login")
def PostPage(request):
if request.user.is_authenticated:
DatatypeResult = Datatypes.objects.filter(datatypeHash=request.GET.get('showPosts'))
DatatypeHash = DatatypeResult[0].datatypeHash
DatatypeId = DatatypeResult[0].id
RCommunityFilter = DatatypeResult[0].relatedCommunity
RCommunity = Communities.objects.filter(name=RCommunityFilter.name)
Primitive_List = DatatypeResult[0].datatypefields_set.all()
c = connection.cursor()
execution_string = 'select "entryHash",json_object_agg("propertyName","propertyValue") from (select "entryHash","propertyName","propertyValue" from streampage_posts where "relatedDatatypes_id"='+str(DatatypeId)+') S GROUP BY "entryHash"'
c.execute(execution_string)
posts=c.fetchall()
paginator = Paginator(posts, 5)
page = request.GET.get('page')
post_resp = paginator.get_page(page)
return render(request, 'posts.html', {'post_resp': post_resp,'table_fields':Primitive_List,'Datatype_Id':DatatypeHash, 'Datatype_Name':DatatypeResult, 'Community_Name': RCommunity})
else:
return HttpResponseRedirect("/streampage/login")
def handle_uploaded_datatypefile(f):
filepath = 'streampage/static/uploads/datatypes/'+f.name
with open(filepath, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
return "/"+filepath.split("/")[1]+"/"+filepath.split("/")[2]+"/"+filepath.split("/")[3]+"/"+filepath.split("/")[4]+"/"
def CreatePosttype_view(request):
form = AddPosttype(request.POST, request.FILES)
dt = Datatypes()
dt.name = request.POST.get("Posttype_Name")
salt = uuid.uuid4().hex
communityHash=request.POST.get("community_Hash")
DtHash = hashlib.sha256(salt.encode() + dt.name.encode()).hexdigest() + salt
dt.datatypeHash = DtHash
dt.relatedCommunity=Communities.objects.get(communityHash=request.POST.get("community_Hash"))
dt.datatypeTags = request.POST.get("Posttype_Tags")
dt.datatypeCreationDate = datetime.now()
dt.datatypeCreator = communityUsers.objects.get(nickName=request.user)
dt.save()
Tags = saveTag_view(request.POST.get("Posttype_Tags"))
tagentry = DatatTypeTags()
relatedDt = Datatypes.objects.filter(datatypeHash=DtHash)[0]
tagentry.datatypeTag = relatedDt
tagentry.tagName = Tags["TITLE"]
tagentry.tagItem = Tags["ITEM"]
tagentry.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "created",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "Posttype",
"name": dt.name,
},
"target": {
"id": "",
"type": "Community",
"name": dt.relatedCommunity.name,
"hash": dt.relatedCommunity.communityHash,
}
}
ActivityStreams.objects.create(detail = description)
return JsonResponse({'form' : "Posttype is created Successfully!",'communityHash' : communityHash, 'posttypeHash':DtHash})
def EditPosttypeMeta_view(request):
dt_hash = request.POST.get("Posttype_Hash")
dt = Datatypes.objects.filter(datatypeHash = dt_hash)[0]
dt.name = request.POST.get("Posttype_Name")
dt.datatypeTags = request.POST.get("Posttype_Tags")
dt.datatypeCreationDate = datetime.now()
dt.datatypeCreator = communityUsers.objects.get(nickName=request.user)
dt.save()
Tags = saveTag_view(request.POST.get("Posttype_Tags"))
tagentry = DatatTypeTags()
relatedDt = Datatypes.objects.filter(datatypeHash=dt_hash)[0]
tagentry.datatypeTag = relatedDt
tagentry.tagName = Tags["TITLE"]
tagentry.tagItem = Tags["ITEM"]
tagentry.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "edited",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "Posttype",
"name": dt.name,
},
"target": {
"id": "",
"type": "Community",
"name": dt.relatedCommunity.name,
"hash": dt.relatedCommunity.communityHash
}
}
ActivityStreams.objects.create(detail = description)
return JsonResponse({'form' : "Posttype is updated Successfully!",'posttypeHash':dt_hash})
def DeletePosttypeMeta_view(request):
dt_hash = request.POST.get("Posttype_Hash")
dt = Datatypes.objects.filter(datatypeHash = dt_hash)[0]
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "deleted",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "Posttype",
"name": dt.name,
},
"target": {
"id": "",
"type": "Community",
"name": dt.relatedCommunity.name,
"hash": dt.relatedCommunity.communityHash
}
}
ActivityStreams.objects.create(detail = description)
dt.delete()
return JsonResponse({'form' : "Posttype is deleted Successfully!",'posttypeHash':dt_hash})
def addPosttypeField_view(request):
EnField = request.POST.get("Enumeration")
if EnField == 'on':
form = AddTextEntryEnum()
else:
form = AddTextEntry()
return render(None, 'modalPost.html', {'form' : form })
def SavePrimitives_view(request):
name = request.POST.get("name")
type = request.POST.get("Types")
req = request.POST.get("Required")
show = request.POST.get("ShowPage")
CommunityHash = request.POST.get("CommunityHash")
DatatypeHash = request.POST.get("PosttypeHash")
postType = Datatypes.objects.filter(datatypeHash=DatatypeHash)[0]
try:
checkName = postType.datatypefields_set.filter(name=name)[0].name
if checkName == name:
Enumeration = request.POST.get("Enum")
dtFields = DatatypeFields.objects.filter(name=name,relatedDatatype=postType)[0]
dtFields.fieldCreationDate = datetime.now()
dtFields.fieldCreator = communityUsers.objects.get(nickName=request.user)
if req == 'on':
dtFields.fieldRequired = True
else:
dtFields.fieldRequired = False
if show == 'on':
dtFields.fronttableShow = True
else:
dtFields.fronttableShow = False
if name == '':
return render(None, 'tagSearch.html', {'form' : "Please Enter The Name!!"})
elif type == '':
return render(None, 'tagSearch.html', {'form' : "Please Choose The Type!!"})
else:
if Enumeration is None:
typefield = Primitives.objects.get(name=type)
dtFields.name = name
dtFields.relatedDatatype = Datatypes.objects.get(datatypeHash=DatatypeHash)
dtFields.relatedComm = Communities.objects.get(communityHash=CommunityHash)
dtFields.relatedPrimitives = typefield
dtFields.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "updated",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "PosttypeField",
"name": Datatypes.objects.get(datatypeHash=DatatypeHash).name
},
"target": {
"id": "",
"type": "Community",
"name": Communities.objects.get(communityHash=CommunityHash).name,
"hash": Communities.objects.get(communityHash=CommunityHash).communityHash,
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form' : "Data is updated!"})
else:
if Enumeration == '':
return render(None, 'tagSearch.html', {'form' : "Please Enter the Enumeration Fields!"})
else:
typefield = Primitives.objects.get(name=type)
dtFields.name = name
dtFields.relatedDatatype = Datatypes.objects.get(datatypeHash=DatatypeHash)
dtFields.relatedComm = Communities.objects.get(communityHash=CommunityHash)
dtFields.relatedPrimitives = typefield
dtFields.enumerations = Enumeration
dtFields.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "updated",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "PosttypeField",
"name": Datatypes.objects.get(datatypeHash=DatatypeHash).name
},
"target": {
"id": "",
"type": "Community",
"name": Communities.objects.get(communityHash=CommunityHash).name,
"hash": Communities.objects.get(communityHash=CommunityHash).communityHash,
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form' : "Data is updated!"})
except:
Enumeration = request.POST.get("Enum")
dtFields = DatatypeFields()
dtFields.fieldCreationDate = datetime.now()
dtFields.fieldCreator = communityUsers.objects.get(nickName=request.user)
if req == 'on':
dtFields.fieldRequired = True
else:
dtFields.fieldRequired = False
if show == 'on':
dtFields.fronttableShow = True
else:
dtFields.fronttableShow = False
if name == '':
return render(None, 'tagSearch.html', {'form' : "Please Enter The Name!!"})
elif type == '':
return render(None, 'tagSearch.html', {'form' : "Please Choose The Type!!"})
else:
if Enumeration is None:
typefield = Primitives.objects.get(name=type)
dtFields.name = name
dtFields.relatedDatatype = Datatypes.objects.get(datatypeHash=DatatypeHash)
dtFields.relatedComm = Communities.objects.get(communityHash=CommunityHash)
dtFields.relatedPrimitives = typefield
dtFields.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "added",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "PosttypeField",
"name": Datatypes.objects.get(datatypeHash=DatatypeHash).name
},
"target": {
"id": "",
"type": "Community",
"name": Communities.objects.get(communityHash=CommunityHash).name,
"hash": Communities.objects.get(communityHash=CommunityHash).communityHash,
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form' : "Data is saved!"})
else:
if Enumeration == '':
return render(None, 'tagSearch.html', {'form' : "Please Enter the Enumeration Fields!"})
else:
typefield = Primitives.objects.get(name=type)
dtFields.name = name
dtFields.relatedDatatype = Datatypes.objects.get(datatypeHash=DatatypeHash)
dtFields.relatedComm = Communities.objects.get(communityHash=CommunityHash)
dtFields.relatedPrimitives = typefield
dtFields.enumerations = Enumeration
dtFields.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "added",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "PosttypeField",
"name": Datatypes.objects.get(datatypeHash=DatatypeHash).name
},
"target": {
"id": "",
"type": "Community",
"name": Communities.objects.get(communityHash=CommunityHash).name,
"hash": Communities.objects.get(communityHash=CommunityHash).communityHash,
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form' : "Data is saved!"})
def DeletePosttypeFields_view(request):
CommunityHash = request.POST.get("CommunityHash")
DatatypeHash = request.POST.get("DatatypeHash")
Dt= Datatypes.objects.filter(datatypeHash=DatatypeHash)[0]
name = request.POST.get("name")
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "deleted",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "PosttypeField",
"name": Datatypes.objects.get(datatypeHash=DatatypeHash).name
},
"target": {
"id": "",
"type": "Community",
"name": Communities.objects.get(communityHash=CommunityHash).name,
"hash": Communities.objects.get(communityHash=CommunityHash).communityHash,
}
}
ActivityStreams.objects.create(detail = description)
HiddenPosts= Posts.objects.filter(propertyName=name,relatedDatatypes=Dt).delete()
DatatypeFields.objects.filter(name=name,relatedDatatype=Dt).delete()
return render(None, 'tagSearch.html', {'form' : "Posttyype Field is Deleted Successfully!"})
def EditPosttypes_view(request):
CommunityHash = request.GET.get("community_Hash")
context={}
form=posttypeList(cHash=CommunityHash)
return render(request, 'modal.html', {'form': form})
def ShowPosttypeFields_view(request):
CommunityHash = request.POST.get("CommunityHash")
PosttypeName = request.POST.get("PosttypeEntry")
Cm = Communities.objects.filter(communityHash=CommunityHash)[0]
Dt = Cm.datatypes_set.filter(name=PosttypeName)[0]
PostFields = DatatypeFields.objects.filter(relatedDatatype=Dt)
if DatatypeFields.objects.filter(relatedDatatype = Dt):
PtFields = DatatypeFields.objects.filter(relatedDatatype = Dt)
context = {}
iter=0
for fields in PtFields:
name = fields.name
Types = fields.relatedPrimitives
Required = fields.fieldRequired
Show = fields.fronttableShow
if fields.enumerations:
Enum = fields.enumerations
form = AddTextEntryEnum(initial={'name': name, 'Types': Types, 'Required': Required, 'ShowPage': Show, 'Enum': Enum})
context['form'+str(iter)]=form
else:
form = AddTextEntry(initial={'name': name, 'Types': Types, 'Required': Required, 'ShowPage': Show})
context['form'+str(iter)]=form
iter +=1
return render(None, 'showDataTypeFields.html', {'form':context, 'posttypeHash':Dt.datatypeHash})
else:
return render(None, 'showDataTypeFields.html', {'form':"Yes", 'posttypeHash':Dt.datatypeHash})
def DeletePosttypes_view(request):
CommunityHash = request.POST.get("CommunityHash")
PosttypeName = request.POST.get("PosttypeEntry")
Cm = Communities.objects.filter(communityHash=CommunityHash)[0]
Dt = Cm.datatypes_set.filter(name=PosttypeName)[0].delete()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "deleted",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "Posttype",
"name": PosttypeName,
},
"target": {
"id": "",
"type": "Community",
"name": Cm.name,
"hash": Cm.communityHash,
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form':"Selected posttype is deleted succesfully!"})
def addPosttypeEditField_view(request):
EnField = request.POST.get("Enumeration")
if EnField == 'on':
form = AddTextEntryEnum()
else:
form = AddTextEntry()
return render(None, 'modalPostEdit.html', {'form' : form })
def subscribePosttype_view(request):
user = request.user
userModel = communityUsers.objects.filter(nickName=user)[0]
Posttype = Posts.objects.filter(entryHash=request.POST.get("post_Hash"))[0].relatedDatatypes
Posttype.subscribers.add(userModel)
Posttype.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "subscribed",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "Posttype",
"name": Posttype.name,
},
"target": {
"id": "",
"type": "Community",
"name": Posttype.relatedCommunity.name,
"hash": Posttype.relatedCommunity.communityHash,
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form': "You Subscribed to the Community Successfully!"})
def unsubscribePosttype_view(request):
user = request.user
userModel = communityUsers.objects.filter(nickName=user)[0]
Posttype = Posts.objects.filter(entryHash=request.POST.get("post_Hash"))[0].relatedDatatypes
Posttype.subscribers.remove(userModel)
Posttype.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "unsubscribed",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "Posttype",
"name": Posttype.name,
},
"target": {
"id": "",
"type": "Community",
"name": Posttype.relatedCommunity.name,
"hash": Posttype.relatedCommunity.communityHash,
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form': "You Unsubscribed from the Community Successfully!"})
def reportPostModal_view(request):
form = ReportPost()
return render(None, 'tagSearch.html', {'form' : form })
def reportPost_view(request):
PostHash = request.POST.get("post_Hash")
try:
PosttypeMeta = PostsMetaHash.objects.filter(postMetaHash=PostHash)[0]
Cm = PosttypeMeta.relatedCommunity
user = request.user
userModel = communityUsers.objects.filter(nickName=user)[0]
salt = uuid.uuid4().hex
reportHash = hashlib.sha256(salt.encode() + request.POST.get("Report_Reason").encode()).hexdigest() + salt
reportEntry = ReportedPosts()
reportEntry.relatedCommunity = Cm
reportEntry.relatedMeta = PosttypeMeta
reportEntry.reportHash = reportHash
reportEntry.reason = request.POST.get("Report_Reason")
reportEntry.description = request.POST.get("Description")
reportEntry.reportPostCreator = userModel
reportEntry.reportPostCreationDate = datetime.now()
reportEntry.save()
return render(None, 'tagSearch.html', {'form' : 'You successfully reported the post!' })
except:
return render(None, 'tagSearch.html', {'form' : 'Reporting is unsuccessfull!' })
def reportPostDelete_view(request):
PostHash = request.POST.get("post_Hash")
try:
user = request.user
userModel = communityUsers.objects.filter(nickName=user)[0]
reportEntry = ReportedPosts.objects.get(reportHash=PostHash)
reportEntry.delete()
return render(None, 'tagSearch.html', {'form' : 'The Report is Removed!' })
except:
return render(None, 'tagSearch.html', {'form' : 'The Report cannot be Removed!' })
def ReturnPostFields_view(request):
CommunityHash = request.POST.get("community_Hash")
PosttypeName = request.POST.get("PosttypeEntry")
Cm = Communities.objects.filter(communityHash=CommunityHash)[0]
Dt = Cm.datatypes_set.filter(name=PosttypeName)[0]
PostFields = DatatypeFields.objects.filter(relatedDatatype=Dt)
iter=0
context={}
for fields in PostFields:
if fields.enumerations is not None:
name = fields.name
types = fields.relatedPrimitives.name
req = fields.fieldRequired
show = fields.fronttableShow
enum = fields.enumerations
enumList = enum.split(",")
context[fields.name]=AddEnumaratedPost(en=enumList,nm=name)
else:
print(fields.relatedPrimitives.name)
if fields.relatedPrimitives.name == "Text":
context[fields.name]=AddTextPost()
elif fields.relatedPrimitives.name == "Text Area":
context[fields.name]=AddTextAreaPost()
elif fields.relatedPrimitives.name == "Audio":
context[fields.name]=AddAudioPost(request.POST, request.FILES)
elif fields.relatedPrimitives.name == "Boolean":
context[fields.name]=AddBooleanPost()
elif fields.relatedPrimitives.name == "Date":
context[fields.name]=AddDatePost()
elif fields.relatedPrimitives.name == "DateTime":
context[fields.name]=AddDateTimePost()
elif fields.relatedPrimitives.name == "Decimal":
context[fields.name]=AddDecimalPost()
elif fields.relatedPrimitives.name == "E-mail":
context[fields.name]=AddEmailPost()
elif fields.relatedPrimitives.name == "Float":
context[fields.name]=AddFloatPost()
elif fields.relatedPrimitives.name == "IP Address":
context[fields.name]=AddIpAddressPost()
elif fields.relatedPrimitives.name == "Image":
context[fields.name]=AddImagePost(request.POST, request.FILES)
elif fields.relatedPrimitives.name == "Integer":
context[fields.name]=AddIntegerPost()
elif fields.relatedPrimitives.name == "Location":
context[fields.name]=AddLocationPost()
elif fields.relatedPrimitives.name == "Time":
context[fields.name]=AddTimePost()
elif fields.relatedPrimitives.name == "URL":
context[fields.name]=AddUrlPost()
elif fields.relatedPrimitives.name == "Video":
context[fields.name]=AddVideoPost(request.POST, request.FILES)
name = fields.name
types = fields.relatedPrimitives.name
req = fields.fieldRequired
show = fields.fronttableShow
iter += 1
print(context)
context["Tags"]=AddTagPost()
return render(None, 'entryReturnFields.html', {'form' : context, 'posttypeHash':Dt.datatypeHash})
def AddPostModal_view(request):
CommunityHash = request.POST.get("community_Hash")
context={}
form=posttypeList(cHash=CommunityHash)
return render(request, 'modal.html', {'form': form})
def handle_uploaded_postfile(f):
filepath = 'streampage/static/uploads/posts/'+f.name
with open(filepath, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
return "/"+filepath.split("/")[1]+"/"+filepath.split("/")[2]+"/"+filepath.split("/")[3]+"/"+filepath.split("/")[4]+"/"
def CreatePost_view(request):
CommunityHash = request.POST.get("community_Hash")
DatatypeHash = request.POST.get("PosttypeHash")
Dt = Datatypes.objects.filter(datatypeHash=DatatypeHash)[0]
PostFields = DatatypeFields.objects.filter(relatedDatatype=Dt)
print(PostFields[0].name)
salt = uuid.uuid4().hex
try:
PostHash = hashlib.sha256(salt.encode() + request.POST.get(PostFields[0].name).encode()).hexdigest() + salt
except:
PostHash = hashlib.sha256(salt.encode() + uuid.uuid4().hex.upper()[0:9].encode()).hexdigest() + salt
PostTime = datetime.now()
metaPost = PostsMetaHash()
metaPost.relatedCommunity = Communities.objects.get(communityHash=CommunityHash)
metaPost.relatedDatatypes = Datatypes.objects.get(datatypeHash=DatatypeHash)
metaPost.postCreator = communityUsers.objects.get(nickName=request.user)
metaPost.postCreationDate = PostTime
metaPost.postMetaHash = PostHash
metaPost.save()
for fields in PostFields:
if (fields.relatedPrimitives.name == "Image" or fields.relatedPrimitives.name == "Audio" or fields.relatedPrimitives.name == "Video") and request.POST.get(fields.name) != "":
p_image=request.FILES.get(fields.name)
file_path=handle_uploaded_postfile(p_image)
entry = Posts()
entry.propertyName = fields.name
entry.propertyValue = file_path
entry.relatedDatatypes = Datatypes.objects.get(datatypeHash=DatatypeHash)
entry.relatedCommunityforPost = Communities.objects.get(communityHash=CommunityHash)
entry.entryHash = PostHash
entry.relatedMeta = PostsMetaHash.objects.get(postMetaHash = PostHash)
entry.postCreator = communityUsers.objects.get(nickName=request.user)
entry.postCreationDate = PostTime
entry.postTag = request.POST.get("Tags")
entry.save()
elif request.POST.get(fields.name) != "" and fields.relatedPrimitives.name != "Boolean":
entry = Posts()
entry.propertyName = fields.name
entry.propertyValue = request.POST.get(fields.name)
entry.relatedDatatypes = Datatypes.objects.get(datatypeHash=DatatypeHash)
entry.relatedCommunityforPost = Communities.objects.get(communityHash=CommunityHash)
entry.entryHash = PostHash
entry.relatedMeta = PostsMetaHash.objects.get(postMetaHash = PostHash)
entry.postCreator = communityUsers.objects.get(nickName=request.user)
entry.postCreationDate = PostTime
entry.postTag = request.POST.get("Tags")
entry.save()
elif fields.relatedPrimitives.name == "Boolean" and request.POST.get(fields.name) != "":
entry = Posts()
entry.propertyName = fields.name
if entry.propertyValue == "on":
entry.propertyValue = "Yes"
entry.relatedDatatypes = Datatypes.objects.get(datatypeHash=DatatypeHash)
entry.relatedCommunityforPost = Communities.objects.get(communityHash=CommunityHash)
entry.entryHash = PostHash
entry.relatedMeta = PostsMetaHash.objects.get(postMetaHash = PostHash)
entry.postCreator = communityUsers.objects.get(nickName=request.user)
entry.postCreationDate = PostTime
entry.postTag = request.POST.get("Tags")
entry.save()
else:
entry.propertyValue = "No"
entry.relatedDatatypes = Datatypes.objects.get(datatypeHash=DatatypeHash)
entry.relatedCommunityforPost = Communities.objects.get(communityHash=CommunityHash)
entry.entryHash = PostHash
entry.relatedMeta = PostsMetaHash.objects.get(postMetaHash = PostHash)
entry.postCreator = communityUsers.objects.get(nickName=request.user)
entry.postCreationDate = PostTime
entry.postTag = request.POST.get("Tags")
entry.save()
else:
if fields.fieldRequired == True:
return render(None, 'tagSearch.html', {'form' : fields.name+" is required!"})
Tags = saveTag_view(request.POST.get("Tags"))
tagentry = PostTags()
relatedPost = Posts.objects.filter(entryHash=PostHash)[0]
tagentry.relatedPostTag = relatedPost
tagentry.tagName = Tags["TITLE"]
tagentry.tagItem = Tags["ITEM"]
tagentry.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "created",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto,
},
"object": {
"id": "",
"type": "Post",
"hash": entry.relatedMeta.postMetaHash,
"posttype": Dt.name
},
"target": {
"id": "",
"type": "Community",
"name": entry.relatedCommunityforPost.name,
"hash": entry.relatedCommunityforPost.communityHash,
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form' : "The Entry is Created Successfully"})
def DeletePost_view(request):
PostHash = request.POST.get("PostHash")
activityStream = ActivityStreams()
entry = Posts.objects.filter(entryHash=PostHash)[0]
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "deleted",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto,
},
"object": {
"id": "",
"type": "Post",
"posttype": entry.relatedDatatypes.name
},
"target": {
"id": "",
"type": "Community",
"name": entry.relatedCommunityforPost.name,
"hash": entry.relatedCommunityforPost.communityHash,
}
}
ActivityStreams.objects.create(detail = description)
Posts.objects.filter(entryHash=PostHash).delete()
return render(None, 'tagSearch.html', {'form' : "The Entry is deleted Successfully"})
def CreatePostComment_view(request):
CommunityHash = request.POST.get("community_Hash")
postHash = request.POST.get("post_Hash")
salt = uuid.uuid4().hex
commentHash = hashlib.sha256(salt.encode() + request.POST.get("Comment").encode()).hexdigest() + salt
commentTime = datetime.now()
test = Posts.objects.filter(entryHash = postHash)[0]
entryComment = PostComments()
entryComment.relatedCommunityforComment = Communities.objects.get(communityHash=CommunityHash)
entryComment.relatedMeta = PostsMetaHash.objects.get(postMetaHash = postHash)
entryComment.commentHash = commentHash
entryComment.commentText = request.POST.get("Comment")
entryComment.postCommentCreator = communityUsers.objects.get(nickName=request.user)
entryComment.postCommentCreationDate = commentTime
entryComment.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "commented",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto,
},
"object": {
"id": "",
"type": "Comment",
"hash": entryComment.relatedMeta.postMetaHash,
"name": entryComment.commentText,
"posttype": entryComment.relatedMeta.relatedDatatypes.name,
},
"target": {
"id": "",
"name": entryComment.relatedCommunityforComment.name,
"hash": entryComment.relatedCommunityforComment.communityHash,
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form' : "Successfully Commented on the Post!"})
def deletePostComment_view(request):
commentHash = request.POST.get("comment_Hash")
comment = PostComments.objects.filter(commentHash=commentHash)[0]
try:
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "deleted",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto,
},
"object": {
"id": "",
"type": "Comment",
"hash": comment.relatedMeta.postMetaHash,
"name": comment.commentText,
"posttype": comment.relatedMeta.relatedDatatypes.name
},
"target": {
"id": "",
"type": "Post",
"name": comment.relatedCommunityforComment.name,
"hash": comment.relatedCommunityforComment.communityHash,
}
}
comment.delete()
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form' : "The Comment is Deleted Successfully!"})
except:
return render(None, 'tagSearch.html', {'form' : "The Comment cannot be deleted!"})
def login_view(request):
form = UsersLoginForm(request.POST or None)
if form.is_valid():
username = form.cleaned_data.get("username")
password = form.cleaned_data.get("password")
user = authenticate(username = username, password = password)
login(request, user)
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "login",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
}
}
ActivityStreams.objects.create(detail = description)
return redirect("/streampage")
return render(request, "login.html", {
"form" : form,
"title" : "Login",})
def register_view(request):
form = UsersRegisterForm(request.POST or None)
if form.is_valid():
user = form.save()
password = form.cleaned_data.get("password")
user.set_password(password)
user.save()
comUsers = communityUsers()
comUsers.userMail = user.email
comUsers.nickName = user.username
comUsers.save()
new_user = authenticate(username = user.username, password = password)
login(request, new_user)
return redirect("/streampage/login")
return render(request, "login.html", {
"title" : "Register",
"form" : form,
})
def logout_view(request):
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "logged out",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
}
}
logout(request)
ActivityStreams.objects.create(detail = description)
return HttpResponseRedirect("/streampage/login")
def profilePage(request):
if request.user.is_authenticated:
username=request.user
CUser = communityUsers.objects.filter(nickName=username)[0]
userphoto = CUser.userPhoto
Community_List = CUser.creator.all()
reportList = []
for comm in Community_List:
reports = comm.reportedposts_set.all()
for rp in reports:
reportList.append(rp)
Datatype_List = CUser.datatypecreator.all()
Post_List = CUser.postcreator.all()
joined_Communities = CUser.members.all()
activityDetailList = ActivityStreams.objects.filter(detail__actor__name = str(username)).order_by('-id')
subscriptionList = Datatypes.objects.filter(subscribers = CUser)
followingList = []
if len(UserCircle.objects.filter(circleOwner=CUser)) > 0:
for i in UserCircle.objects.get(circleOwner=CUser).circleUsers.all():
followingList.append(i.nickName)
followerList = []
for i in communityUsers.objects.get(nickName=username).Followers.all():
followerList.append(i.circleOwner.nickName)
return render(request, "profile.html", {
"Communities" : Community_List,
"Datatypes" : Datatype_List,
"Posts" : Post_List,
"Joined" : joined_Communities,
"UserInfo" : CUser,
"ReportList": reportList,
"activities":activityDetailList,
"followers" : followerList,
"following" : followingList,
"subscriptionList": subscriptionList,
"userPhoto":userphoto
})
else:
return HttpResponseRedirect("/streampage/login")
def chooseSearch_view(request):
CommunityHash = request.POST.get("community_Hash")
form=searchList(cHash=CommunityHash)
return render(request, 'modal.html', {'form': form})
def ReturnSearchFields_view(request):
CommunityHash = request.POST.get("community_Hash")
DatatypeHash = request.POST.get("DatatypeHash")
PostfieldName = request.POST.get("searchEntry")
Cm = Communities.objects.filter(communityHash=CommunityHash)[0]
PostFields = DatatypeFields.objects.filter(name=PostfieldName)
fields=PostFields[0]
context={}
if fields.enumerations is not None:
name = fields.name
types = fields.relatedPrimitives.name
req = fields.fieldRequired
show = fields.fronttableShow
enum = fields.enumerations
enumList = enum.split(",")
context[fields.name]=AddEnumaratedSearch(en=enumList,nm=name)
else:
if fields.relatedPrimitives.name == "Text":
context[fields.name]=AddTextSearch()
elif fields.relatedPrimitives.name == "Text Area":
context[fields.name]=AddTextAreaSearch()
elif fields.relatedPrimitives.name == "Audio":
context[fields.name]=AddAudioSearch(request.POST, request.FILES)
elif fields.relatedPrimitives.name == "Boolean":
context[fields.name]=AddBooleanSearch()
elif fields.relatedPrimitives.name == "Date":
context[fields.name]=AddDateSearch()
elif fields.relatedPrimitives.name == "DateTime":
context[fields.name]=AddDateTimeSearch()
elif fields.relatedPrimitives.name == "Decimal":
context[fields.name]=AddDecimalSearch()
elif fields.relatedPrimitives.name == "E-mail":
context[fields.name]=AddEmailSearch()
elif fields.relatedPrimitives.name == "Float":
context[fields.name]=AddFloatSearch()
elif fields.relatedPrimitives.name == "IP Address":
context[fields.name]=AddIpAddressSearch()
elif fields.relatedPrimitives.name == "Image":
context[fields.name]=AddImageSearch(request.POST, request.FILES)
elif fields.relatedPrimitives.name == "Integer":
context[fields.name]=AddIntegerSearch()
elif fields.relatedPrimitives.name == "Location":
context[fields.name]=AddLocationSearch()
elif fields.relatedPrimitives.name == "Time":
context[fields.name]=AddTimeSearch()
elif fields.relatedPrimitives.name == "URL":
context[fields.name]=AddUrlSearch()
elif fields.relatedPrimitives.name == "Video":
context[fields.name]=AddVideoSearch(request.POST, request.FILES)
name = fields.name
types = fields.relatedPrimitives.name
req = fields.fieldRequired
show = fields.fronttableShow
#context["Tags"]=AddTagSearch()
return render(None, 'entrySearchFields.html', {'form' : context})
def ReturnFreeSearchFields_view(request):
CommunityHash = request.POST.get("community_Hash")
Cm = Communities.objects.filter(communityHash=CommunityHash)[0]
context={}
context["Free Search"]=freeSearchField()
return render(None, 'entrySearchFields.html', {'form' : context})
def ReturnEntrySearchResults_view(request):
CommunityHash = request.POST.get('CommunityHash')
Community_List = Communities.objects.filter(communityHash=CommunityHash)
User = communityUsers.objects.filter(nickName=request.user)[0]
userphoto = User.userPhoto
currentCommunity = Community_List[0]
postEntries={}
Dtfields = currentCommunity.datatypefields_set.all()
if request.user.is_authenticated:
querylist=[]
querylistFree=[]
for fields in Dtfields:
print(request.POST.get("Free Search_Value"))
subquery=""
subqueryFree=""
if request.POST.get(fields.name+"_Value"):
if request.POST.get(fields.name+"_Condition") == "equals":
subquery = "\"entryHash\" in (select \"entryHash\" from streampage_posts where \"propertyName\""+" = "+"'"+fields.name+"' AND \"propertyValue\""+" = "+"'"+request.POST.get(fields.name+"_Value")+"')"
querylist.append(subquery)
elif request.POST.get(fields.name+"_Condition") == "not equal":
subquery = "\"entryHash\" not in (select \"entryHash\" from streampage_posts where \"propertyName\""+" = "+"'"+fields.name+"' AND \"propertyValue\""+" = "+"'"+request.POST.get(fields.name+"_Value")+"')"
querylist.append(subquery)
elif request.POST.get(fields.name+"_Condition") == "contains":
subquery = "\"entryHash\" in (select \"entryHash\" from streampage_posts where \"propertyName\""+" = "+"'"+fields.name+"' AND \"propertyValue\""+" ~ "+"'"+request.POST.get(fields.name+"_Value")+"')"
querylist.append(subquery)
elif request.POST.get(fields.name+"_Condition") == "not contain":
subquery = "\"entryHash\" not in (select \"entryHash\" from streampage_posts where \"propertyName\""+" = "+"'"+fields.name+"' AND \"propertyValue\""+" ~ "+"'"+request.POST.get(fields.name+"_Value")+"')"
querylist.append(subquery)
elif request.POST.get(fields.name+"_Condition") == "less than":
subquery = "\"entryHash\" in (select \"entryHash\" from streampage_posts where \"propertyName\""+" = "+"'"+fields.name+"' AND CAST(\"propertyValue\" as INTEGER)"+" < "+"'"+request.POST.get(fields.name+"_Value")+"')"
querylist.append(subquery)
elif request.POST.get(fields.name+"_Condition") == "more than":
subquery = "\"entryHash\" in (select \"entryHash\" from streampage_posts where \"propertyName\""+" = "+"'"+fields.name+"' AND CAST(\"propertyValue\" as INTEGER)"+" > "+"'"+request.POST.get(fields.name+"_Value")+"')"
querylist.append(subquery)
elif request.POST.get(fields.name+"_Condition"):
if request.POST.get(fields.name+"_Condition") == "equals":
subquery = "\"entryHash\" in (select \"entryHash\" from streampage_posts where \"propertyName\""+" = "+"'"+fields.name+"' AND \"propertyValue\""+" = "+"'No')"
querylist.append(subquery)
elif request.POST.get(fields.name+"_Condition") == "not equal":
subquery = "\"entryHash\" not in (select \"entryHash\" from streampage_posts where \"propertyName\""+" = "+"'"+fields.name+"' AND \"propertyValue\""+" = "+"'No')"
querylist.append(subquery)
if request.POST.get("Free Search_Value"):
subqueryFree = "\"entryHash\" in (select \"entryHash\" from streampage_posts where \"propertyName\""+" = "+"'"+fields.name+"' AND \"propertyValue\""+" ~ "+"'"+request.POST.get("Free Search_Value")+"')"
querylistFree.append(subqueryFree)
querystring = " and ".join(querylist)
querystringFree = " or ".join(querylistFree)
RCommunity = Communities.objects.filter(communityHash=CommunityHash)
c = connection.cursor()
if querystring != "":
execution_string = 'select "entryHash" from streampage_posts where '+querystring+' and "relatedCommunityforPost_id" ='+str(currentCommunity.id)+' GROUP BY "entryHash"'
elif querystringFree != "":
execution_string = 'select "entryHash" from streampage_posts where '+querystringFree+' and "relatedCommunityforPost_id" ='+str(currentCommunity.id)+' GROUP BY "entryHash"'
c.execute(execution_string)
posts=c.fetchall()
postInstance=[]
for hashes in posts:
currentObject={}
postInfo = PostsMetaHash.objects.filter(postMetaHash=hashes[0])[0]
currentObject['postList']=Posts.objects.filter(entryHash=hashes[0])
currentObject['posttype']=Posts.objects.filter(entryHash=hashes[0])[0].relatedDatatypes.datatypefields_set.all()
currentObject['comments']=postInfo.postcomments_set.all()
postInstance.append(currentObject)
postEntries['postInstances']=postInstance
print(querystring)
paginator = Paginator(posts, 5)
page = request.GET.get('page')
post_resp = paginator.get_page(page)
comment=textComment()
return render(request, 'datatypes.html', {'postEntries':postEntries, 'comment': comment, 'post_resp': post_resp, 'community_Hash':CommunityHash, 'community':Community_List[0], 'userPhoto': userphoto})
else:
return HttpResponseRedirect("/streampage/login")
def uploadPhotoForm_view(request):
form = AddImagePost()
return render(request, 'tagSearch.html', {'form': form})
def handle_uploaded_profilefile(f):
filepath = 'streampage/static/uploads/profiles/'+f.name
with open(filepath, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
return "/"+filepath.split("/")[1]+"/"+filepath.split("/")[2]+"/"+filepath.split("/")[3]+"/"+filepath.split("/")[4]+"/"
def uploadPhoto_view(request):
if request.user.is_authenticated:
try:
u_image = request.FILES.get("ImageEntry")
userProfile = communityUsers.objects.get(nickName=request.user)
image_path = handle_uploaded_profilefile(u_image)
userProfile.userPhoto = image_path
userProfile.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "uploaded",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto,
},
"object": {
"id": "",
"type": "ProfilePhoto",
"name": image_path,
},
"target": {
"id": "",
"type": "Profile",
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form' : 'The Photo is Saved Successfully!'})
except:
return render(None, 'tagSearch.html', {'form' : 'The Photo cannot be Saved!'})
def EditUserModal_view(request):
form = EditUser()
return render(request, 'modal.html', {'form': form})
def EditUser_view(request):
if request.user.is_authenticated:
try:
name = request.POST.get("name")
surname = request.POST.get("surname")
birthday= request.POST.get("birth")
email = request.POST.get("email")
bio = request.POST.get("bio")
userProfile = communityUsers.objects.get(nickName=request.user)
userProfile.userName = name
userProfile.userSurname = surname
userProfile.userBirthDay = birthday
userProfile.userMail = email
userProfile.userBio = bio
userProfile.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "updated",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto,
},
"object": {
"id": "",
"type": "ProfileInformation",
"email": email,
"bio" : bio,
},
"target": {
"id": "",
"type": "Profile Information",
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form' : 'The Information is Updated Successfully!'})
except:
return render(None, 'tagSearch.html', {'form' : 'The Information cannot be Updated!'})
def UserPage_view(request):
if request.user.is_authenticated:
Username = request.GET.get('user')
CUser = communityUsers.objects.filter(nickName=Username)[0]
userphoto = communityUsers.objects.filter(nickName=request.user)[0].userPhoto
Community_List = CUser.creator.all()
Datatype_List = CUser.datatypecreator.all()
Post_List = CUser.postcreator.all()
joined_Communities = CUser.members.all()
activityDetailList = ActivityStreams.objects.filter(detail__actor__name = str(Username)).order_by('-id')
subscriptionList = Datatypes.objects.filter(subscribers = CUser)
followingList = []
if len(UserCircle.objects.filter(circleOwner=CUser)) > 0:
for i in UserCircle.objects.get(circleOwner=CUser).circleUsers.all():
followingList.append(i.nickName)
followerList = []
for i in communityUsers.objects.get(nickName=Username).Followers.all():
followerList.append(i.circleOwner.nickName)
if str(request.user) == str(Username):
return render(request, "profile.html", {
"Communities" : Community_List,
"Datatypes" : Datatype_List,
"Posts" : Post_List,
"Joined" : joined_Communities,
"UserInfo" : CUser,
"activities": activityDetailList,
"followers" : followerList,
"following" : followingList,
"subscriptionList": subscriptionList,
"userPhoto": userphoto
})
else:
return render(request, "user.html", {
"Communities" : Community_List,
"Datatypes" : Datatype_List,
"Posts" : Post_List,
"Joined" : joined_Communities,
"UserInfo" : CUser,
"activities": activityDetailList,
"followers" : followerList,
"following" : followingList,
"subscriptionList": subscriptionList,
"userPhoto": userphoto
})
else:
return HttpResponseRedirect("/streampage/login")
def FollowUser_view(request):
user = request.user
Username = request.POST.get('user')
userModel = communityUsers.objects.filter(nickName=user)[0]
followingUser = communityUsers.objects.filter(nickName=Username)[0]
try:
circUser = UserCircle.objects.get(circleOwner=userModel)
circUser.circleUsers.add(followingUser)
circUser.save()
except:
circUser = UserCircle()
circUser.circleOwner = userModel
circUser.save()
addFollower = UserCircle.objects.get(circleOwner=userModel)
addFollower.circleUsers.add(followingUser)
addFollower.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "followed",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "Username",
"name": str(followingUser.nickName),
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form': "You are following the user!"})
def UnFollowUser_view(request):
user = request.user
Username = request.POST.get('user')
userModel = communityUsers.objects.filter(nickName=user)[0]
followingUser = communityUsers.objects.filter(nickName=Username)[0]
circUser = UserCircle.objects.get(circleOwner=userModel)
circUser.circleUsers.remove(followingUser)
circUser.save()
activityStream = ActivityStreams()
description = {
"@context": "https://www.w3.org/ns/activitystreams",
"type": "unfollowed",
"published": str(datetime.now()),
"actor": {
"id": "",
"name": communityUsers.objects.get(nickName=request.user).nickName,
"photo": communityUsers.objects.get(nickName=request.user).userPhoto
},
"object": {
"id": "",
"type": "Username",
"name": str(followingUser.nickName),
}
}
ActivityStreams.objects.create(detail = description)
return render(None, 'tagSearch.html', {'form': "You are unfollowing the user!"})
def communityPageSearch_view(request):
if request.user.is_authenticated:
if request.GET.get('keyword'):
if Communities.objects.all():
searchString = request.GET.get('keyword')
Community_List = Communities.objects.filter(description__contains=searchString).order_by(
'-communityCreationDate') | Communities.objects.filter(name__contains=searchString).order_by(
'-communityCreationDate')
Cuser = request.user
UserList = communityUsers.objects.filter(nickName=Cuser)[0]
userphoto = UserList.userPhoto
User_communities = UserList.members.all()
paginator = Paginator(Community_List, 3)
page = request.GET.get('page')
community_resp = paginator.get_page(page)
return render(request, 'community.html',
{'community_resp': community_resp, 'User_communities': User_communities,
'userPhoto': userphoto})
else:
return render(request, 'community.html', {})
else:
if Communities.objects.all():
Community_List = Communities.objects.all().order_by('-communityCreationDate')
Cuser = request.user
UserList = communityUsers.objects.filter(nickName=Cuser)[0]
userphoto = UserList.userPhoto
User_communities = UserList.members.all()
paginator = Paginator(Community_List, 3)
page = request.GET.get('page')
community_resp = paginator.get_page(page)
return render(request, 'community.html',
{'community_resp': community_resp, 'User_communities': User_communities,
'userPhoto': userphoto})
else:
return render(request, 'community.html', {})
else:
return HttpResponseRedirect("/streampage/login")
def communityLocationPageSearch_view(request):
if request.user.is_authenticated:
if request.GET.get('keyword'):
if Communities.objects.all():
searchString = request.GET.get('keyword')
text_true = unicode_tr(searchString)
print(text_true.capitalize())
Community_List = Communities.objects.filter(
communityCountry__icontains=text_true.capitalize()).order_by(
'-communityCreationDate') | Communities.objects.filter(
communityLocation__icontains=text_true.capitalize()).order_by(
'-communityCreationDate')
Cuser = request.user
UserList = communityUsers.objects.filter(nickName=Cuser)[0]
userphoto = UserList.userPhoto
User_communities = UserList.members.all()
paginator = Paginator(Community_List, 3)
page = request.GET.get('page')
community_resp = paginator.get_page(page)
return render(request, 'community.html',
{'community_resp': community_resp, 'User_communities': User_communities,
'userPhoto': userphoto})
else:
return render(request, 'community.html', {})
else:
if Communities.objects.all():
Community_List = Communities.objects.all().order_by('-communityCreationDate')
Cuser = request.user
UserList = communityUsers.objects.filter(nickName=Cuser)[0]
userphoto = UserList.userPhoto
User_communities = UserList.members.all()
paginator = Paginator(Community_List, 3)
page = request.GET.get('page')
community_resp = paginator.get_page(page)
return render(request, 'community.html',
{'community_resp': community_resp, 'User_communities': User_communities,
'userPhoto': userphoto})
else:
return render(request, 'community.html', {})
else:
return HttpResponseRedirect("/streampage/login")
|
StarcoderdataPython
|
271205
|
from __future__ import annotations
from typing import TYPE_CHECKING, Dict
import logging
import threading
from kubernetes import watch, client
import ipaddress
from k8s_netem.match import LabelSelector
from k8s_netem.resource import Resource
if TYPE_CHECKING:
from k8s_netem.direction import Rule
class Namespace:
def __init__(self, peer, name=None):
self.peer = peer
self.name = name
self.logger = logging.getLogger(f'namespace:{name}')
self.thread = threading.Thread(target=self.watch_pods)
self.watch = None
def init(self):
self.logger.info('Initialize namespace watcher %s', self.name)
self.thread.start()
def deinit(self):
self.logger.info('Deinitialize namespace watcher %s', self.name)
if self.thread.is_alive():
if self.watch:
self.watch.stop()
self.thread.join(0.0)
def watch_pods(self):
self.logger.info('Started watching pods for %s', self.peer.spec)
self.watch = watch.Watch()
v1 = client.CoreV1Api()
selector = LabelSelector(self.peer.spec['podSelector']).to_labelselector()
stream_args = {
'label_selector': selector
}
if self.name:
stream_func = v1.list_namespaced_pod
stream_args['namespace': self.name]
else:
stream_func = v1.list_pod_for_all_namespaces
for event in self.watch.stream(stream_func, **stream_args):
self.peer.handle_pod_event(event)
class Peer(Resource):
def __init__(self, rule: Rule, index: int, spec):
super().__init__(spec)
self.logger = logging.getLogger(f'peer:{rule.direction.name}-{rule.index}-{index}')
self.rule = rule
self.index = index
self.thread = threading.Thread(target=self.watch_namespaces)
self.namespaces: Dict[str, Namespace] = {}
self.watch = None
def init(self):
self.logger.info('Initialize peer: %s', self.spec)
if 'namespaceSelector' in self.spec:
self.thread.start()
elif 'podSelector' in self.spec:
ns = Namespace(self)
ns.init()
self.namespaces['all'] = ns
def deinit(self):
self.logger.info('Deinitialize peer: %s', self.spec)
if self.thread.is_alive():
if self.watch:
self.watch.stop()
self.thread.join(0.0)
for _, ns in self.namespaces.items():
ns.deinit()
def watch_namespaces(self, peer):
self.logger.info('Started watching namespaces for %s', peer)
self.watch = watch.Watch()
v1 = client.CoreV1Api()
selector = LabelSelector(peer['namespaceSelector']).to_labelselector()
stream_args = {
'selector': selector
}
for event in self.watch.stream(v1.list_namespace, **stream_args):
self.handle_namespace_event(event)
def handle_namespace_event(self, event):
type = event['type']
ns = event['object']
uid = ns.metadata.uid
self.logger.info('%s %s %s', type.capitalize(),
ns.kind,
ns.metadata.name)
if type == 'ADDED':
ns = Namespace(self, ns.metadata.name)
ns.init()
self.namespaces[uid] = ns
elif type == 'DELETED':
ns = self.namespaces[uid]
ns.deinit()
del self.namespaces[uid]
def handle_pod_event(self, event):
pod = event['object']
type = event['type']
if pod.status.pod_ip is None:
self.logger.debug('Pod is missing IP address. Skipping')
return
verb = {
'MODIFIED': 'in',
'ADDED': 'to',
'DELETED': 'from'
}
if type in ['MODIFIED', 'ADDED', 'DELETED']:
self.logger.info('%s %s %s set %s for pod %s/%s',
type.capitalize(),
pod.status.pod_ip,
verb[type],
self.rule.set_nets_name,
pod.metadata.namespace,
pod.metadata.name)
cidr = ipaddress.IPv4Network(pod.status.pod_ip)
if type == 'DELETED':
self.rule.delete_net(cidr)
else:
self.rule.add_net(cidr, f'{pod.metadata.namespace}/{pod.metadata.name}')
|
StarcoderdataPython
|
317767
|
<gh_stars>1-10
# coding: utf-8
## pip install tabula-py
#
# Actually, it extracted the table in PDF by tabula-java commond line.
# dependences: java jdk >= v1.7.0
#
import tabula
import pandas as pd
# Convert to DataFrame.
# df = tabula.read_pdf("c.pdf")
# Convert to CSV.
tabula.convert_into("c.pdf", "c.csv", output_format="csv", pages="all", multiple_tables=True)
print("Done")
|
StarcoderdataPython
|
1631820
|
# -*- coding: utf-8 -*-
import re
import json
from socialoauth.sites.base import OAuth2
from socialoauth.exception import SocialAPIError
QQ_OPENID_PATTERN = re.compile('\{.+\}')
class QQApp(OAuth2):
AUTHORIZE_URL = 'https://graph.qq.com/oauth2.0/authorize'
ACCESS_TOKEN_URL = 'https://graph.qq.com/oauth2.0/token'
OPENID_URL = 'https://graph.qq.com/oauth2.0/me'
@property
def authorize_url(self):
url = super(QQApp, self).authorize_url
return '%s&state=socialoauth' % url
def get_access_token(self, code):
super(QQApp, self).get_access_token(code, method='GET', parse=False)
def build_api_url(self, url):
return url
def build_api_data(self, **kwargs):
data = {
'access_token': self.access_token,
'oauth_consumer_key': self.CLIENT_ID,
'openid': self.uid
}
data.update(kwargs)
return data
def parse_token_response(self, res):
self.uid = res['userid']
self.access_token = res['access_token']
self.expires_in = 0
self.refresh_token = None
_url = 'https://graph.qq.com/user/get_user_info'
res = self.api_call_get(_url)
if res['ret'] != 0:
raise SocialAPIError(self.site_name, _url, res)
self.name = res['nickname']
self.avatar = res['figureurl_qq_1']
self.avatar_large = res['figureurl_qq_2']
self.gender = res['gender'] == u"男" and "M" or "F"
|
StarcoderdataPython
|
3571069
|
from .models import *
from django import forms
class SearchForm(forms.Form):
"""Configure and return a search form."""
q = forms.CharField(required=True, widget=forms.TextInput(attrs={'class': 'validate'}))
def __init__(self, *args, **kwargs):
super(SearchForm, self).__init__(*args, **kwargs)
self.fields['q'].label = 'Search'
|
StarcoderdataPython
|
1643362
|
import struct
PROP_PAYLOAD_FORMAT_INDICATOR = 1
PROP_MESSAGE_EXPIRY_INTERVAL = 2
PROP_CONTENT_TYPE = 3
PROP_RESPONSE_TOPIC = 8
PROP_CORRELATION_DATA = 9
PROP_SUBSCRIPTION_IDENTIFIER = 11
PROP_SESSION_EXPIRY_INTERVAL = 17
PROP_ASSIGNED_CLIENT_IDENTIFIER = 18
PROP_SERVER_KEEP_ALIVE = 19
PROP_AUTHENTICATION_METHOD = 21
PROP_AUTHENTICATION_DATA = 22
PROP_REQUEST_PROBLEM_INFO = 23
PROP_WILL_DELAY_INTERVAL = 24
PROP_REQUEST_RESPONSE_INFO = 25
PROP_RESPONSE_INFO = 26
PROP_SERVER_REFERENCE = 28
PROP_REASON_STRING = 31
PROP_RECEIVE_MAXIMUM = 33
PROP_TOPIC_ALIAS_MAXIMUM = 34
PROP_TOPIC_ALIAS = 35
PROP_MAXIMUM_QOS = 36
PROP_RETAIN_AVAILABLE = 37
PROP_USER_PROPERTY = 38
PROP_MAXIMUM_PACKET_SIZE = 39
PROP_WILDCARD_SUB_AVAILABLE = 40
PROP_SUBSCRIPTION_ID_AVAILABLE = 41
PROP_SHARED_SUB_AVAILABLE = 42
def gen_byte_prop(identifier, byte):
prop = struct.pack('BB', identifier, byte)
return prop
def gen_uint16_prop(identifier, word):
prop = struct.pack('!BH', identifier, word)
return prop
def gen_uint32_prop(identifier, word):
prop = struct.pack('!BI', identifier, word)
return prop
def gen_string_prop(identifier, s):
s = s.encode("utf-8")
prop = struct.pack('!BH%ds'%(len(s)), identifier, len(s), s)
return prop
def gen_string_pair_prop(identifier, s1, s2):
s1 = s1.encode("utf-8")
s2 = s2.encode("utf-8")
prop = struct.pack('!BH%dsH%ds'%(len(s1), len(s2)), identifier, len(s1), s1, len(s2), s2)
return prop
def gen_varint_prop(identifier, val):
v = pack_varint(val)
return struct.pack("!B"+str(len(v))+"s", identifier, v)
def pack_varint(varint):
s = b""
while True:
byte = varint % 128
varint = varint // 128
# If there are more digits to encode, set the top bit of this digit
if varint > 0:
byte = byte | 0x80
s = s + struct.pack("!B", byte)
if varint == 0:
return s
def prop_finalise(props):
return pack_varint(len(props)) + props
|
StarcoderdataPython
|
11350966
|
#!/usr/bin/env python
"""
@package mi.dataset.parser.test
@file marine-integrations/mi/dataset/parser/test/test_flort_dj_sio.py
@author <NAME>, <NAME> (telemetered)
@brief Test code for a flort_dj_sio data parser
"""
import os
from nose.plugins.attrib import attr
from mi.core.exceptions import UnexpectedDataException
from mi.core.log import get_logger
from mi.dataset.dataset_parser import DataSetDriverConfigKeys
from mi.dataset.driver.flort_dj.sio.resource import RESOURCE_PATH
from mi.dataset.parser.flort_dj_sio import FlortDjSioParser, \
FlortdRecoveredParserDataParticle
from mi.dataset.parser.utilities import particle_to_yml
from mi.dataset.test.test_parser import ParserUnitTestCase
log = get_logger()
@attr('UNIT', group='mi')
class FlortDjSioParserUnitTestCase(ParserUnitTestCase):
def setUp(self):
ParserUnitTestCase.setUp(self)
self.telem_config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.flort_dj_sio',
DataSetDriverConfigKeys.PARTICLE_CLASS: 'FlortdParserDataParticle'
}
self.recov_config = {
DataSetDriverConfigKeys.PARTICLE_MODULE: 'mi.dataset.parser.flort_dj_sio',
DataSetDriverConfigKeys.PARTICLE_CLASS: 'FlortdRecoveredParserDataParticle'
}
# particles from FLO15908.DAT and FLO_short.DAT
self.particle_a_recov = FlortdRecoveredParserDataParticle(
'51EC760117/12/13\t00:00:05\t700\t4130\t695\t700\t460\t4130\t547')
self.particle_b_recov = FlortdRecoveredParserDataParticle(
'51EC798517/12/13\t00:15:04\t700\t4130\t695\t708\t460\t4130\t548')
self.particle_c_recov = FlortdRecoveredParserDataParticle(
'51EC7D0917/12/13\t00:30:04\t700\t4130\t695\t702\t460\t4130\t548')
self.particle_d_recov = FlortdRecoveredParserDataParticle(
'51EC808D17/12/13\t00:45:04\t700\t4130\t695\t710\t460\t4130\t548')
self.particle_e_recov = FlortdRecoveredParserDataParticle(
'51EC841117/12/13\t01:00:04\t700\t4130\t695\t708\t460\t4130\t548')
self.particle_f_recov = FlortdRecoveredParserDataParticle(
'51EC879517/12/13\t01:15:04\t700\t4130\t695\t700\t460\t4130\t548')
# particles from FLO15908.DAT
self.particle_long_before_last = FlortdRecoveredParserDataParticle(
'51EDC07917/12/13\t23:30:05\t700\t4130\t695\t677\t460\t4130\t545')
self.particle_long_last = FlortdRecoveredParserDataParticle(
'51EDC3FD17/12/13\t23:45:05\t700\t4130\t695\t674\t460\t4130\t545')
self.stream_handle = None
def assert_result(self, result, particle):
self.assertEqual(result, [particle])
def build_telem_parser(self):
"""
Build a telemetered parser, storing it in self.parser
"""
if self.stream_handle is None:
self.fail("Must set stream handle before building telemetered parser")
self.parser = FlortDjSioParser(self.telem_config, self.stream_handle,
self.exception_callback)
def build_recov_parser(self):
"""
Build a telemetered parser, storing it in self.parser
This requires stream handle to be set before calling it
"""
if self.stream_handle is None:
self.fail("Must set stream handle before building recovered parser")
self.parser = FlortDjSioParser(self.recov_config, self.stream_handle,
self.exception_callback)
def test_simple_recov(self):
"""
Test that we can pull out data particles one at a time from for a recovered
parser and file.
"""
self.stream_handle = open(os.path.join(RESOURCE_PATH, 'FLO_short.DAT'))
self.build_recov_parser()
# get all 6 records in this file one at a time, comparing the state and particle
result = self.parser.get_records(1)
self.assert_result(result, self.particle_a_recov)
result = self.parser.get_records(1)
self.assert_result(result, self.particle_b_recov)
result = self.parser.get_records(1)
self.assert_result(result, self.particle_c_recov)
result = self.parser.get_records(1)
self.assert_result(result, self.particle_d_recov)
result = self.parser.get_records(1)
self.assert_result(result, self.particle_e_recov)
result = self.parser.get_records(1)
self.assert_result(result, self.particle_f_recov)
# make sure there are no more records
result = self.parser.get_records(1)
self.assertEqual(result, [])
# make sure there were no exceptions
self.assertEqual(self.exception_callback_value, [])
self.stream_handle.close()
def test_get_many(self):
"""
Read test data from the file and pull out multiple data particles a few a time.
Assert that the results are those we expected.
"""
self.stream_handle = open(os.path.join(RESOURCE_PATH,
'node59p1_0.flort.dat'))
self.build_telem_parser()
# get 18 total
result = self.parser.get_records(3)
result.extend(self.parser.get_records(10))
result.extend(self.parser.get_records(5))
particle_to_yml(result, os.path.join(RESOURCE_PATH, 'node59p1_0.flort.yml'))
self.stream_handle.close()
self.assert_particles(result, "node59p1_0.flort.yml", RESOURCE_PATH)
# make sure there were no exceptions
self.assertEqual(self.exception_callback_value, [])
def test_get_many_recov(self):
"""
Read recovered test data from the file and pull out multiple data particles at one time.
Assert that the results are those we expected.
"""
self.stream_handle = open(os.path.join(RESOURCE_PATH, 'FLO_short.DAT'))
self.build_recov_parser()
# get all 6 records
result = self.parser.get_records(6)
# compare returned particles
self.assertEqual(result,
[self.particle_a_recov,
self.particle_b_recov,
self.particle_c_recov,
self.particle_d_recov,
self.particle_e_recov,
self.particle_f_recov])
# make sure there were no exceptions
self.assertEqual(self.exception_callback_value, [])
def test_dash(self):
"""
Test that the particle with a field replaced by dashes is found
"""
self.stream_handle = open(os.path.join(RESOURCE_PATH,
'node59p1_0_dash.flort.dat'))
self.build_telem_parser()
result = self.parser.get_records(18)
particle_to_yml(result, os.path.join(RESOURCE_PATH, 'node59p1_0_dash.flort.yml'))
self.assert_particles(result, "node59p1_0_dash.flort.yml", RESOURCE_PATH)
# make sure there were no exceptions
self.assertEqual(self.exception_callback_value, [])
def test_long_stream(self):
"""
Read test data and pull out telemetered data particles and compare against yml
"""
self.stream_handle = open(os.path.join(RESOURCE_PATH, 'node59p1_0.flort.dat'))
self.build_telem_parser()
particles = self.parser.get_records(18)
particle_to_yml(particles, os.path.join(RESOURCE_PATH, 'node59p1_0.flort.yml'))
self.assert_particles(particles, "node59p1_0.flort.yml", RESOURCE_PATH)
# confirm no exceptions occurred
self.assertEqual(self.exception_callback_value, [])
def test_long_stream_recov(self):
"""
test that a longer file can be read and compare the end particles
"""
self.stream_handle = open(os.path.join(RESOURCE_PATH, 'FLO15908.DAT'))
self.build_recov_parser()
result = self.parser.get_records(96)
for particle in result:
log.debug(particle.generate())
# compare returned particles at the start of the file
self.assertEqual(result[0], self.particle_a_recov)
self.assertEqual(result[1], self.particle_b_recov)
self.assertEqual(result[2], self.particle_c_recov)
# compare returned particles at the end of the file
self.assertEqual(result[-2], self.particle_long_before_last)
self.assertEqual(result[-1], self.particle_long_last)
# make sure there were no exceptions
self.assertEqual(self.exception_callback_value, [])
def test_against_yml_recov(self):
"""
Read test data and pull out recovered data particles and compare against yml
"""
self.stream_handle = open(os.path.join(RESOURCE_PATH, 'FLO15908.DAT'))
self.build_recov_parser()
# get 20 particles
particles = self.parser.get_records(96)
particle_to_yml(particles, os.path.join(RESOURCE_PATH, 'FLO15908.yml'))
self.assert_particles(particles, "FLO15908.yml", RESOURCE_PATH)
# confirm no exceptions occurred
self.assertEqual(self.exception_callback_value, [])
def test_bad_header(self):
"""
The file used in this test has a header with 'D0' instead of 'FL' in the first record.
(A dosta_abcdjm_sio record was copied in for the test.)
This results in 5 particles being retrieved instead of 6, and also result in the exception
callback being called.
"""
log.debug('===== START TEST BAD HEADER =====')
num_particles_to_request = 6
num_expected_particles = 5
self.stream_handle = open(os.path.join(RESOURCE_PATH, 'FLO_bad_header.DAT'))
self.build_recov_parser()
particles = self.parser.get_records(num_particles_to_request)
self.assertEquals(len(particles), num_expected_particles)
particle_to_yml(particles, os.path.join(RESOURCE_PATH, 'flo_bad_header.yml'))
self.assert_particles(particles, "flo_bad_header.yml", RESOURCE_PATH)
log.debug('Exceptions : %s', self.exception_callback_value)
self.assert_(isinstance(self.exception_callback_value[0], UnexpectedDataException))
log.debug('===== END TEST BAD HEADER =====')
|
StarcoderdataPython
|
3585539
|
<filename>csv_specs_generator.py<gh_stars>1-10
import numpy as np
import pandas as pd
import torch
import random
import functools
import os
from trajectories_trans_tools import *
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--path_to_data", help="Path to the original Trajnet data")
parser.add_argument("--output_file", help="output csv file path and name")
args = parser.parse_args()
path = args.path_to_data
def compute_relative_angle(trajectories):
'''
Compute relative orientation of the trajectories passed in parameter with respect to a vector facing up (having an angle of 90 degrees with the x-axis )
Parameters
----------
trajectories : pytorch tensor of size (nb_trajectories*nb_frames*2)
Returns
-------
mean_angles : Mean rotation of the trajectory with respect to a vector facing up across all frames for every trajectory
max_angles : Maximum rotation of the trajectory with respect to a vector facing up across all frames for every trajectory
is_static : List of boolean values of size nb_trajectory which determines if a pedestrian does not move during the observed nb_frames
'''
speeds = compute_speeds(trajectories[:, :, [2, 3]])[:, 1:, :]
# Remove static positions
cond = (torch.sqrt(speeds[:, :, 0]**2 + speeds[:, :, 1]**2)) > 1
trajectories_without_stops = []
is_static = []
for idx, traj in enumerate(speeds):
is_static_t = False
# Check if the pedestrian does not move
if (len(cond[idx, :].nonzero().size()) == 0):
trajectories_without_stops.append(en_cuda(torch.Tensor([[0, 1]])))
is_static_t = True
else:
trajectories_without_stops.append(
traj[cond[idx, :].nonzero().squeeze(1), :])
is_static.append(is_static_t)
mean_angles = []
max_angles = []
# Compute angle wrt starting position
for idx in range(len(trajectories_without_stops)):
angles = torch.abs(torch.atan2(trajectories_without_stops[idx][
:, 1], trajectories_without_stops[idx][:, 0]) - (0.5 * np.pi))
angles[angles > np.pi] = 2 * np.pi - angles[angles > np.pi]
mean_angles.append(torch.mean(angles))
max_angles.append(torch.max(angles))
return np.degrees(np.array(mean_angles)), np.degrees(np.array(max_angles)), is_static
# Dataframe to save
df_infos = []
for dataset in os.listdir(path):
if(not dataset.startswith('.') and dataset not in ['validation']):
print('Reading {} dataset'.format(dataset))
dts = []
for file in os.listdir(path + '/' + dataset):
if(not file.startswith('.')):
print('\tReading {}'.format(file))
# Load tracklets and transform them
res = generate_tracklets(path + '/' + dataset + '/' + file)
rotated = []
nb_critical_neigbors, nb_neigbors, mean_contact, mean_nb_neighbors, mean_nb_critical_neighbors = [], [], [], [], []
# For every transformed tracklet append the corresponding infos
# to the dataframe to Save (generate_tracklets doc for more
# infos)
for result in transform_tracklets_trajectories(res, compute_neighbors=False):
rotated.append(result[0].unsqueeze(0))
nb_critical_neigbors.append(
result[2]['nb_critical_neighb'])
nb_neigbors.append(len(result[2]['neighb']))
mean_nb_neighbors.append(
result[2]['mean_nb_neighbors_per_frame'])
mean_nb_critical_neighbors.append(
result[2]['mean_nb_critical_neighbors_per_frame'])
mean_contact.append(result[2]['mean_contact'])
rotated = torch.cat(rotated, 0)
# Compute the mean and max deviation of the trajectories with respect to the vector (0,1)
# (Which has a similar orientation to the vector between the first point and second point of every trajectory)
mean, max_, is_static = compute_relative_angle(rotated)
# tracklet specs
df_tmp = pd.DataFrame(columns=['Dataset', 'File', 'Track_ID', 'Mean_rotation', 'nb_neighbors', 'nb_critical_neighbors',
'mean_nb_neighbors_per_frame', 'mean_nb_critical_neighbors_per_frame', 'mean_contact', 'is_static'])
df_tmp['Mean_rotation'] = mean
df_tmp['Dataset'] = dataset
df_tmp['File'] = file
df_tmp['Track_ID'] = rotated[:, 0, 1].cpu().numpy()
df_tmp['nb_neighbors'] = nb_neigbors
df_tmp['mean_contact'] = mean_contact
df_tmp['nb_critical_neighbors'] = nb_critical_neigbors
df_tmp['is_static'] = is_static
df_tmp['mean_nb_neighbors_per_frame'] = mean_nb_neighbors
df_tmp[
'mean_nb_critical_neighbors_per_frame'] = mean_nb_critical_neighbors
df_infos.append(df_tmp)
# Save final Dataframe
df = pd.concat(df_infos).reset_index().drop(['index'], axis=1)
df.to_csv(args.output_file)
print('CSV file saved')
|
StarcoderdataPython
|
122636
|
<gh_stars>0
from game_states import *
pygame.init()
FPS = 60
DISPLAY_WIDTH = 600
DISPLAY_HEIGHT = 600
DISPLAY = pygame.display.set_mode([DISPLAY_WIDTH, DISPLAY_HEIGHT])
clock = pygame.time.Clock()
def main_loop():
# TODO: Add a dedicated state manager for this.
game_state = Game_State(DISPLAY_WIDTH, DISPLAY_HEIGHT)
while True:
# Temporary fix. The player's model for some reason does not update as
# it should and smears across the screen. This fixes that, though in
# a messy way.
DISPLAY.fill(BLACK)
pressed_buttons = pygame.key.get_pressed()
game_state.handle_events(pressed_buttons, DISPLAY_WIDTH, DISPLAY_HEIGHT)
game_state.update()
game_state.render(DISPLAY)
pygame.display.update()
clock.tick(FPS)
main_loop()
|
StarcoderdataPython
|
3587615
|
#!/usr/bin/env python2
"""
Reads PDFs in the parent directory, creates directories based on their
names, splits the PDFs and exports the pages into directories based on the original filename.
@Author: <NAME>
Required:
- pip install pyPdf
Inspired by:
- http://stackoverflow.com/questions/490195/split-a-multi-page-pdf-file-into-multiple-pdf-files-with-python
- http://stackoverflow.com/questions/273192/how-to-check-if-a-directory-exists-and-create-it-if-necessary/14364249#14364249
"""
import os
from pyPdf import PdfFileWriter, PdfFileReader
# List all the filenames in the parent directory and filter to PDFs only.
os.chdir('..')
files = os.listdir('.')
files = filter(lambda x: x.lower().endswith('.pdf'), files)
for f in files:
print 'Processing {}'.format(f)
# Remove file extension.
folder = f[:-4] + '_pages'
# Create directory for split output if it does not exist.
try:
os.makedirs(folder)
except OSError:
if not os.path.isdir(folder):
raise
# Read in PDF.
inputpdf = PdfFileReader(open(f, "rb"))
# Export pages of PDF.
for i in xrange(inputpdf.numPages):
output = PdfFileWriter()
output.addPage(inputpdf.getPage(i))
outPath = os.path.join(folder, "{}.pdf".format(i+1))
with open(outPath, "wb") as outputStream:
output.write(outputStream)
print '{} pages created'.format(inputpdf.numPages)
print
|
StarcoderdataPython
|
4871246
|
<reponame>NeuroDataDesign/lids-bloby
import sys
import numpy as np
import matplotlib.pyplot as plt
import os
import progressbar
from image_processing import (
ImageStack,
read_tif
)
from detectors import (
DoG,
find_negative_curvative_points,
blob_descriptors,
post_prune,
is_well_connected
)
#REMOVE THIS CODE LATER
import sys
sys.path.append('../clarity-f17s18/src/util/')
from ImageDrawer import ImageDrawer
import tifffile as tiff
class BlobDetector():
print_level = 1
@classmethod
def detect_3d_blobs(cls, fname, batch_process=False, inverted=0, output_dir='./output/'):
# Read in images.
# If batch is true then image is broken up for faster processing
print_level = cls.print_level
img_stack = read_tif(fname, batch=batch_process, print_level=print_level)
# Compute SIFT features
DoG_stack = []
detected_blobs = []
for i in range(img_stack.stack_size):
if print_level:
if img_stack.stack_size == 1:
print("Computing DoG for image")
else:
print("Computing DoG for image {}".format(i+1))
DoG_stack = DoG(img_stack.images[i], dark=inverted, print_level=print_level)
# Find concave points
U = set()
bar = progressbar.ProgressBar()
concave_point_bar = bar(DoG_stack)
if not print_level:
concave_point_bar = DoG_stack
else:
print("Computing concave points")
for sigma, DoG_img in concave_point_bar:
indices = find_negative_curvative_points(DoG_img)
for idx in range(indices.shape[0]):
U.add(tuple(indices[idx,:].astype(int)))
if print_level:
print("{} concave points found".format(len(U)))
# Compute blob descriptors
# TODO: calculating the blob descriptors is taking way to long. We need to trunate U
stack_iter = zip(DoG_stack, img_stack.images)
if print_level:
bar = progressbar.ProgressBar()
stack_iter = bar([x for x in stack_iter])
print("Computing blob descriptors")
blob_candidates_T = {}
for (sigma, DoG_img), intensity_img in stack_iter:
blob_candidates_T[sigma] = blob_descriptors(DoG_img, intensity_img, sigma, U)
# Auto post-pruning using GMM
detected_blobs = post_prune(blob_candidates_T)
outfile_path = output_dir + 'detected_blob_centers_stack_{}.csv'.format(i+1)
print("Writing detected blobs to {} ...".format(outfile_path))
outfile = open(outfile_path, 'w')
for blob in detected_blobs:
outfile.write(','.join(str(x) for x in blob) + '\n')
outfile.close()
print("Done")
if __name__ == "__main__":
file_path = './img/blurred_147_cells.tif'
BlobDetector.detect_3d_blobs(file_path, batch_process=True)
|
StarcoderdataPython
|
1688197
|
<reponame>PwC-FaST/fast-webapp
# Generated by Django 2.1.2 on 2019-01-02 16:42
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('core', '0001_initial'),
('farming', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='FarmParcelCropNeeds',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('target_yield', models.FloatField()),
('is_active', models.BooleanField(default=True)),
('estimated_nitrogen_needed', models.FloatField(blank=True, null=True)),
('estimated_phosphorus_needed', models.FloatField(blank=True, null=True)),
('estimated_potassium_needed', models.FloatField(blank=True, null=True)),
('priority_order', models.IntegerField(blank=True, null=True)),
('farm_parcel', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='crop_needs', to='farming.FarmParcel')),
],
),
migrations.CreateModel(
name='FarmParcelNutrientPlanResult',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('manure_quantity', models.FloatField(default=0)),
('chemical_quantity', models.FloatField(default=0)),
('chemical_type', models.CharField(blank=True, max_length=25, null=True)),
('farm_parcel_crop_needs', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='nutrient_plan_result', to='nmp.FarmParcelCropNeeds')),
],
),
migrations.CreateModel(
name='ImportedOrExportedLivestockManure',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('purity', models.FloatField()),
('nitrogen_content', models.FloatField()),
('phosphorus_content', models.FloatField()),
('potassium_content', models.FloatField()),
('total_quantity', models.FloatField()),
('livestock_species', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='imported_or_exported_livestock_manures', to='farming.LivestockSpecies')),
],
),
migrations.CreateModel(
name='Plan',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('name', models.CharField(max_length=50)),
('is_active', models.BooleanField(default=False)),
('created_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='nmp_plans_created', to='core.FaSTUser')),
('farm', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='nmp_plans', to='farming.Farm')),
('updated_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='nmp_plans_updated', to='core.FaSTUser')),
],
),
migrations.CreateModel(
name='ProducedLivestockManure',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('purity', models.FloatField()),
('nitrogen_content', models.FloatField()),
('phosphorus_content', models.FloatField()),
('potassium_content', models.FloatField()),
('number_of_heads', models.IntegerField()),
('storage_days', models.IntegerField()),
('liters_per_head_per_day', models.FloatField()),
('livestock_species', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='produced_livestock_manures', to='farming.LivestockSpecies')),
('plan', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='produced_livestock_manures', to='nmp.Plan')),
],
),
migrations.AddField(
model_name='importedorexportedlivestockmanure',
name='plan',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='imported_or_exported_livestock_manures', to='nmp.Plan'),
),
migrations.AddField(
model_name='farmparcelcropneeds',
name='plan',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='farm_parcel_crop_needs', to='nmp.Plan'),
),
]
|
StarcoderdataPython
|
8041778
|
<filename>src/smart_enums/option.py
"""The `Option` type represents an optional value. An Option is either Some, which contains a value or Nothing, which doesn't.
"""
from typing import Any
class Some:
content: str
def __init__(self, content: Any) -> None:
self.content: Any = content
class Nothing:
pass
class Option:
Some: Some
Nothing: Nothing
def __init__(self, option: Any) -> None:
if isinstance(option, Some):
self.Some = option
elif isinstance(option, Nothing):
self.Nothing = option
elif hasattr(option, "__name__") and option.__name__ == "Nothing":
self.Nothing = option
else:
raise NotImplementedError(
"Option only takes a Some or Nothing object")
def is_some(self) -> bool:
"""Returns true if the `Option` contains the `Some` enum.
Returns:
bool: Returns true if the Some value contains a value.
"""
if hasattr(self, "Some"):
return True
return False
def is_nothing(self) -> bool:
"""Returns true if the `Option` contains the `Nothing` enum.
Returns:
bool: Returns true if the `Option` is a `Nothing` value.
"""
if hasattr(self, "Nothing"):
return True
return False
def get_content(self) -> Any:
"""Returns the content of the `Some` value.
Returns:
Any: Returns the content of `Some`
"""
if hasattr(self, "Some"):
return self.Some.content
return None
|
StarcoderdataPython
|
4847006
|
from collections import OrderedDict
import numpy as np
from ..Operation import Operation
inputs = OrderedDict(x_y_arrays=[])
outputs = OrderedDict(x_ymean=None)
class ArrayYMean(Operation):
"""
Average the second column of one or more n-by-2 arrays
"""
def __init__(self):
super(ArrayYMean, self).__init__(inputs,outputs)
self.input_doc['x_y_arrays'] = 'list of n-by-2 arrays'
self.output_doc['x_ymean'] = 'n-by-2 array of x and mean(y)'
def run(self):
x_y_arrays = self.inputs['x_y_arrays']
x_ymean = None
if len(x_y_arrays) > 0:
x_ymean = np.zeros(x_y_arrays[0].shape)
x_ymean[:,0] = x_y_arrays[0][:,0]
x_ymean[:,1] = np.mean([xy[:,1] for xy in x_y_arrays],axis=0)
self.outputs['x_ymean'] = x_ymean
return self.outputs
|
StarcoderdataPython
|
4996463
|
#!/usr/bin/env python
from argparse import ArgumentParser
from subprocess import Popen
app = "britecore-test"
def build(*args, **kwargs):
cmd = "docker build -t %s:latest ." % app
execute_cmd(cmd)
def start(*args, **kwargs):
cmd = "docker run --name %s --rm -p 8080:8080 %s:latest" % (app, app)
execute_cmd(cmd)
def stop(*args, **kwargs):
cmd = "docker rm -f -v %s" % app
execute_cmd(cmd)
def ssh(*args, **kwargs):
cmd = "docker exec -it %s /bin/bash" % app
execute_cmd(cmd)
def run_tests(*args, **kwargs):
cmd = "docker exec -it %s /app/run_tests.sh" % app
execute_cmd(cmd)
def watch_sass(*args, **kwargs):
sass_cmd = "sass --watch /app/web/static/css:/app/web/static/css"
cmd = "docker exec -it %s sh -c '%s'" % (app, sass_cmd)
execute_cmd(cmd)
def execute_cmd(cmd):
res = Popen(cmd, shell=True)
output, error = res.communicate()
if res.returncode != 0 and error is not None:
print error
if __name__ == "__main__":
parser = ArgumentParser(description="Docker Helper")
subparser = parser.add_subparsers(title="Available commands", dest="command")
sp_build_docker = subparser.add_parser("build")
sp_start_docker = subparser.add_parser("start")
sp_stop_docker = subparser.add_parser("stop")
sp_ssh_docker = subparser.add_parser("ssh")
sp_run_tests_docker = subparser.add_parser("run_tests")
sp_watch_sass_docker = subparser.add_parser("watch_sass")
args = parser.parse_args()
params = dict(vars(args))
try:
locals()[args.command](**params)
except Exception as e:
print(e)
|
StarcoderdataPython
|
225001
|
<filename>get-of-metrics/usr/bin/get-of-metrics.py
#!/usr/bin/python3
import threading
import json
import logging
import paramiko
import argparse
from time import sleep
from prometheus_client import start_http_server
from prometheus_client.core import REGISTRY, CounterMetricFamily
from datetime import datetime
from re import finditer, sub
data = ''
ALIAS = "alias"
HOST = "host"
HOSTS = "hosts"
USER_NAME = "user"
USER_PASSWORD = "password"
DELAY_TIME = "delay"
PORT = "port"
RX_PACKETS = 'rx_packets'
TX_PACKETS = 'tx_packets'
RX_BYTES = 'rx_bytes'
TX_BYTES = 'tx_bytes'
RX_ERRORS = 'rx_errors'
TX_ERRORS = 'tx_errors'
RX_DROPS = 'rx_drops'
TX_DROPS = 'tx_drops'
RX_FRAME_ERR = 'rx_frame_err'
RX_OVER_ERR = 'rx_over_err'
RX_CRC_ERR = 'rx_crc_err'
COLLISIONS = 'collisions'
NODE_NAME = 'node_name'
DEVICE = 'device'
DESCRIPTION = 'Custom metrics'
class Collector(object):
def __init__(self, alias_name=''):
self.alias_name = alias_name
self.log = logging.getLogger(alias_name)
self.log.addHandler(logging.StreamHandler())
self.log.setLevel(logging.INFO)
def collect(self):
# metric list to be exposed
metrics = {
TX_PACKETS: CounterMetricFamily(TX_PACKETS, DESCRIPTION, labels=[NODE_NAME, DEVICE]),
RX_PACKETS: CounterMetricFamily(RX_PACKETS, DESCRIPTION, labels=[NODE_NAME, DEVICE]),
RX_BYTES: CounterMetricFamily(RX_BYTES, DESCRIPTION, labels=[NODE_NAME, DEVICE]),
TX_BYTES: CounterMetricFamily(TX_BYTES, DESCRIPTION, labels=[NODE_NAME, DEVICE]),
RX_ERRORS: CounterMetricFamily(RX_ERRORS, DESCRIPTION, labels=[NODE_NAME, DEVICE]),
TX_ERRORS: CounterMetricFamily(TX_ERRORS, DESCRIPTION, labels=[NODE_NAME, DEVICE]),
RX_DROPS: CounterMetricFamily(RX_DROPS, DESCRIPTION, labels=[NODE_NAME, DEVICE]),
TX_DROPS: CounterMetricFamily(TX_DROPS, DESCRIPTION, labels=[NODE_NAME, DEVICE]),
RX_FRAME_ERR: CounterMetricFamily(RX_FRAME_ERR, DESCRIPTION, labels=[NODE_NAME, DEVICE]),
RX_OVER_ERR: CounterMetricFamily(RX_OVER_ERR, DESCRIPTION, labels=[NODE_NAME, DEVICE]),
RX_CRC_ERR: CounterMetricFamily(RX_CRC_ERR, DESCRIPTION, labels=[NODE_NAME, DEVICE]),
COLLISIONS: CounterMetricFamily(COLLISIONS, DESCRIPTION, labels=[NODE_NAME, DEVICE])
}
# Regex (Regular Expression) allows us to find and group the parts of the content that meet certain rules.
# The finditer in the re library scans left-to-right, and matches are returned in the order found
# As a result, key and value are obtained with match.group(1) and match.group(2).
# Also, finditer saves time and memory. To see how the regex expression reacts to the .prom file content
# check the link : regex101.com/r/biJY82/3
# This regex expression finds "key = value" strings and groups them.
# "[]" matches a single character present in the set such [\w. ].
# "\w" matches any word character (equal to [a-zA-Z0-9_]).
# "+" matches between one and unlimited times, as many times as possible, giving back as needed (greedy).
# "\s" matches any whitespace character (equal to [\r\n\t\f\v ]).
# (?!word|word|..) matches the words in the set.
regex = r"\s(?!mac|config|state|speed)(\w+)\s=\s([\w.]+)"
# logs the output data from switch
self.save_output()
try:
matches = finditer(regex, data)
port = 'port'
for match in matches:
key = match.group(1)
value = match.group(2)
if key == 'index':
port = 'port%s' % value
# otherwise, it writes the metrics in the .prom file
else:
metrics[key].add_metric([self.alias_name, port], float(value))
for _ in metrics:
yield metrics[_]
except Exception as e:
connect_error_msg1 = 'Regex Error:'
self.save_log(connect_error_msg1, data)
self.save_log(connect_error_msg1, e)
self.log.info('%s %s' %(connect_error_msg1, e))
# save_log, to record the error that occurs in the functions
def save_log(self, err_msg1, err_msg2):
try:
error_log_file = open('/var/log/get-of-metrics/logs/regex_errors_%s.log' % self.alias_name, 'a+')
error_log_file.write('%s %s %s\n' % (str(datetime.now()), err_msg1, err_msg2))
finally:
error_log_file.close()
# save_output to record the last output from switch
def save_output(self):
try:
output_log_file = open('/var/log/get-of-metrics/logs/output_log_%s.log' % self.alias_name, 'w+')
output_log_file.write('%s:\n%s\n' % (str(datetime.now()), data))
finally:
output_log_file.close()
# parse_args function allows us to control the script and get the parameters in commandline
def parse_args():
parent_parser = argparse.ArgumentParser(add_help=False)
parent_parser.add_argument("-t", dest=DELAY_TIME, required=False, help="<Optional> Enter a delay time. Every time "
"it waits for the next scraping. Default "
"value is 5 seconds ",
default=5, type=float)
argument_parser = argparse.ArgumentParser(
description="This Python script enables to scrape and parse the scaled data from Broadcom switches for "
"Prometheus and Node Exporter. Based on github.com/Broadcom-Switch/of-dpa. "
"Saves the files as _*alias_name*_.prom and in specified directory or if not "
"specified the directory, the same directory where the script placed. "
"Host Name, Host HOST, Username and User Password must be entered to run the script "
"It has a time delay to wait for the next scraping and default delay is 5 seconds "
"The directory must be created before the script run. Because Node Exporter will read the "
"directory you defined in the Node Exporter config file.", parents=[parent_parser])
argument_parser.add_argument("-a", dest=ALIAS, required=True, help="<Required> Enter a alias name",
type=str)
argument_parser.add_argument("-i", dest=HOST, required=True, help="<Required> Enter a host ip or host name",
type=str)
argument_parser.add_argument("-u", dest=USER_NAME, required=True, help="<Required> Enter the root username",
type=str)
argument_parser.add_argument("-p", dest=USER_PASSWORD, required=True,
help="<Required> Enter the user password",
type=str)
args = vars(argument_parser.parse_args())
return args
class GetMetrics:
def __init__(self, alias_name, ip, user_name, user_password, delay_time):
self.alias_name = alias_name
self.ip = ip
self.user_name = user_name
self.user_password = <PASSWORD>
self.delay_time = delay_time
self.ssh = paramiko.SSHClient()
self.log = logging.getLogger(str(ip))
self.log.addHandler(logging.StreamHandler())
self.log.setLevel(logging.INFO)
# connect function, to establish connection and reconnection. If in the first connection, an error occurs script
# will stop running. If the connection lost while script running. It tries to reconnect with 60 seconds intervals.
# set_connect is set to 1 to say this is the first connection. With this way, if connection lost, it will enter
# the reconnection phase while the script running.
def connect(self, set_connect):
status_code = 3
try:
# in reconnection, close the session
if set_connect == 0:
self.log.info("Connection manually closed")
self.ssh.close()
# connects to the server via ssh
self.ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
self.ssh.connect(self.ip, username=self.user_name, password=self.user_password)
except paramiko.AuthenticationException:
connect_error_msg1 = 'Connect Error: Failed to connect'
connect_error_msg2 = 'due to wrong username/password'
self.log.info("Failed to connect to %s due to wrong username/password" % self.ip)
status_code = 1
self.ssh.close()
except Exception as e:
self.log.info('Not connected to %s' % self.ip)
connect_error_msg1 = 'Connect Error:'
connect_error_msg2 = str(e)
self.ssh.close()
if set_connect == 1:
status_code = 1
else:
status_code = 0
connect_error_msg1 = 'Reconnect Error:'
self.log.info("Server is down. Reconnecting...")
sleep(60)
finally:
if status_code == 1:
self.save_log(connect_error_msg1, connect_error_msg2)
exit(status_code)
elif status_code == 0:
self.save_log(connect_error_msg1, connect_error_msg2)
else:
if set_connect == 1:
self.log.info("Connected to %s" % self.ip)
self.log.info("Scraping the metrics has been initialized...")
elif set_connect == 0:
self.log.info("Reconnected")
# collect function, executing the shell code and extracting the output
def collect(self, set_connect):
SHELL_CODE = 'client_port_table_dump --stats'
try:
# the data is in std_out
std_in, std_out, std_err = self.ssh.exec_command(SHELL_CODE, timeout=10)
# wait for the given time, and it is important that we have to put the delay here to get the data
# https://stackoverflow.com/a/32758464/14091937
sleep(self.delay_time)
# if exit_status is 0, it means command executed successfully
if (out := ''.join(std_out.readlines())) != "None":
return out
# if not, there is a problem.
else:
err = ''.join(std_err.readlines())
connect_error_msg1 = 'Collect Error: %s' % str(err)
connect_error_msg2 = 'stdError Return Code'
self.save_log(connect_error_msg1, connect_error_msg2)
except Exception as e:
connect_error_msg1 = 'Collect Error:'
connect_error_msg2 = str(e)
self.save_log(connect_error_msg1, connect_error_msg2)
# reconnection
self.connect(0)
# save_log, to record the error that occurs in the functions
def save_log(self, err_msg1, err_msg2):
try:
error_log_file = open('/var/log/get-of-metrics/errors_%s.log' % self.alias_name, 'a+')
error_log_file.write('%s %s %s\n' % (str(datetime.now()), err_msg1, err_msg2))
finally:
error_log_file.close()
# execute function to execute the all the function in the exact order and checks the connection and output
def execute(self):
global data
self.connect(1)
# constantly registers the metrics constantly and works in their own threads
REGISTRY.register(Collector(self.alias_name))
while True:
data = self.collect(0)
sleep(self.delay_time)
# the main function to execute the all the function in the exact order and checks the connection and output
if __name__ == "__main__":
# Start up the server to expose the metrics.
start_http_server(8080)
connection_list = parse_args()
GetMetrics(connection_list[ALIAS], connection_list[HOST], connection_list[USER_NAME], \
connection_list[USER_PASSWORD], connection_list[DELAY_TIME]).execute()
|
StarcoderdataPython
|
1658851
|
class range:
def __init__(self, a, b=None):
if b:
self.index = a
self.end = b
else:
self.index = 0
self.end = a
def __iter__(self):
return self
def __next__(self):
if self.index < self.end:
index = self.index
self.index += 1
return index
raise StopIteration
def range(a, b=None):
out = list()
while index < end:
out.append(index)
index += 1
return out
def repr(obj):
if obj is object:
return "<class 'object'>"
return obj.__repr__()
def print(*args):
out = JSArray()
for arg in args:
if jscode('arg.__class__ !== undefined'):
r = jstype(repr(arg))
jscode('out.push(r)')
elif jscode('arg.__metaclass__ !== undefined'):
name = jscode('arg.__name__')
jscode("""out.push("<class '"+ name +"'>")""")
else:
jscode('out.push(arg)')
jscode('console.log.apply(console, out)')
class map:
def __init__(self, func, iterable):
self.func = func
self.iterable = iter(iterable)
def __iter__(self):
return self
def __next__(self):
n = next(self.iterable)
r = self.func(n)
return r
def __repr__(self):
return '<builtins.map xxx>'
def jstype(obj):
return obj.__jstype__()
def hash(obj):
return obj.__hash__()
def iter(obj):
return obj.__iter__()
def next(obj):
if jscode('obj.next !== undefined'):
r = jscode('obj.next()')
if jscode('r.done'):
raise StopIteration
else:
return jscode('r.value')
return obj.__next__()
def len(obj):
return obj.__len__()
def abs(obj):
return obj.__abs__()
def all(iterable):
for element in iterable:
if not element:
return False
return True
def any(iterable):
for element in iterable:
if element:
return True
return False
def callable(obj):
if jscode("{}.toString.call(obj) == '[object Function]'"):
return True
if jscode('obj.__metaclass__ !== undefined'):
return True
if jscode("lookup(obj, '__call__')"):
return True
return False
def classmethod(func):
func.classmethod = True
return func
def staticmethod(func):
func.staticmethod = True
return func
class enumerate:
def __init__(self, iterator):
self.iterator = iter(iterator)
self.index = 0
def __repr__(self):
return '<enumerate object at 0x1234567890>'
def __iter__(self):
return self
def __next__(self):
index = self.index
self.index = self.index + 1
return (index, next(self.iterator))
def getattr(obj, attr, d):
r = lookup(obj, attr)
if jscode('r === undefined'):
if jscode('d === undefined'):
raise AttributeError
else:
return d
else:
return r
|
StarcoderdataPython
|
8086375
|
import requests
from bs4 import BeautifulSoup
import json
def get_pinned(github_user):
URL = f"https://github.com/{github_user}"
page = requests.get(URL)
soup = BeautifulSoup(page.content, "html.parser")
pinned_data = soup.find_all("div", {"class": "pinned-item-list-item-content"})
pinned_posts = []
for post in pinned_data:
pinned_posts.append(post.find("a")["href"])
return pinned_posts
def get_projects(github_user, query):
URL = f"https://github.com/{github_user}?tab=repositories&q={query}&type=source"
page = requests.get(URL)
soup = BeautifulSoup(page.content, "html.parser")
projects = soup.body.find("ul", {"data-filterable-for": "your-repos-filter"})
if not projects:
return []
projects = projects.find_all("li")
projects_parsed = []
for project in projects:
project_data = {}
title = project.find("h3").a
project_data["name"] = title.text.strip().replace("-", " ").capitalize()
project_data["link"] = title["href"]
project_data["tags"] = [query]
impact = project.find("div", class_="f6 color-text-secondary mt-2")
if impact:
impact = impact.find_all("a")
for data in impact:
project_data[data["href"].split("/")[-1]] = int(data.text.strip())
if "stargazers" not in project_data:
project_data["stargazers"] = 0
if "members" not in project_data:
project_data["members"] = 0
project_data["score"] = project_data["stargazers"] + project_data["members"] * 5
else:
project_data["score"] = 0
projects_parsed.append(project_data)
return projects_parsed
def get_youtube_data(youtube_username):
initial_data = "var ytInitialData = "
final_data = ";"
url = f"https://www.youtube.com/{youtube_username}/videos"
page = requests.get(url)
soup = BeautifulSoup(page.content, "html.parser")
scripts = soup.body.find_all("script")
videos_data = []
for script in scripts:
data = script.encode_contents().decode(errors="replace")
if initial_data not in data:
continue
data = data.replace(initial_data, "").replace(final_data, "")
tab_renderers = json.loads(data)["contents"]
tab_renderers = tab_renderers["twoColumnBrowseResultsRenderer"]["tabs"]
for tab in tab_renderers:
if "tabRenderer" not in tab:
continue
if tab["tabRenderer"]["title"] != "Videos":
continue
videos = tab["tabRenderer"]["content"]["sectionListRenderer"]
videos = videos["contents"][0]["itemSectionRenderer"]
videos = videos["contents"][0]["gridRenderer"]["items"]
for video in videos:
if "gridVideoRenderer" not in video:
continue
video = video["gridVideoRenderer"]
published = ""
if "publishedTimeText" in video:
published = video["publishedTimeText"]["simpleText"]
view_count_text = ""
if "simpleText" in video["viewCountText"]:
view_count_text = video["viewCountText"]["simpleText"]
video_data = {
"thumbnail": video["thumbnail"]["thumbnails"][-1]["url"],
"title": video["title"]["runs"][0]["text"],
"published": published,
"viewCountText": view_count_text,
"url": f"https://www.youtube.com/watch?v={video['videoId']}",
}
videos_data.append(video_data)
return videos_data
|
StarcoderdataPython
|
5036123
|
#!/usr/bin/env python
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
# <NAME>
# California Institute of Technology
# (C) 1998-2005 All Rights Reserved
#
# <LicenseText>
#
# ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
#
from .AbstractNode import AbstractNode
class Composition(AbstractNode):
def onBlock(self, body):
self._setOperand(body)
return
def onCone(self, body):
self._setOperand(body)
return
def onCylinder(self, body):
self._setOperand(body)
return
def onPrism(self, body):
self._setOperand(body)
return
def onPyramid(self, body):
self._setOperand(body)
return
def onSphere(self, body):
self._setOperand(body)
return
def onTorus(self, body):
self._setOperand(body)
return
def onGeneralizedCone(self, body):
self._setOperand(body)
return
def onDifference(self, body):
self._setOperand(body)
return
def onIntersection(self, body):
self._setOperand(body)
return
def onUnion(self, body):
self._setOperand(body)
return
def onDilation(self, body):
self._setOperand(body)
return
def onReflection(self, body):
self._setOperand(body)
return
def onReversal(self, body):
self._setOperand(body)
return
def onRotation(self, body):
self._setOperand(body)
return
def onTranslation(self, body):
self._setOperand(body)
return
# version
__id__ = "$Id: Composition.py,v 1.1.1.1 2005/03/08 16:13:45 aivazis Exp $"
# End of file
|
StarcoderdataPython
|
5193325
|
from mountequist.impostors.http import Http
from mountequist.impostors.https import Https
|
StarcoderdataPython
|
1960892
|
<filename>src/features/.ipynb_checkpoints/word2vec_features-checkpoint.py
import pickle
import numpy as np
from sklearn.preprocessing import MultiLabelBinarizer
from src.eval_metrics import *
from sklearn.model_selection import train_test_split
with open('data/processed/movies_with_overviews.pkl','rb') as f:
final_movies_set=pickle.load(f)
from gensim import models
model2 = models.KeyedVectors.load_word2vec_format('data/external/GoogleNews-vectors-negative300.bin', binary=True)
from nltk.tokenize import RegexpTokenizer
from stop_words import get_stop_words
tokenizer = RegexpTokenizer(r'\w+')
# create English stop words list
en_stop = get_stop_words('en')
movie_mean_wordvec=np.zeros((len(final_movies_set),300))
genres=[]
rows_to_delete=[]
for i in range(len(final_movies_set)):
mov=final_movies_set[i]
movie_genres=mov['genre_ids']
genres.append(movie_genres)
overview=mov['overview']
tokens = tokenizer.tokenize(overview)
stopped_tokens = [k for k in tokens if not k in en_stop]
count_in_vocab=0
s=0
if len(stopped_tokens)==0:
rows_to_delete.append(i)
genres.pop(-1)
# print overview
# print "sample ",i,"had no nonstops"
else:
for tok in stopped_tokens:
if tok.lower() in model2.vocab:
count_in_vocab+=1
s+=model2[tok.lower()]
if count_in_vocab!=0:
movie_mean_wordvec[i]=s/float(count_in_vocab)
else:
rows_to_delete.append(i)
genres.pop(-1)
# print overview
# print "sample ",i,"had no word2vec"
mask2=[]
for row in range(len(movie_mean_wordvec)):
if row in rows_to_delete:
mask2.append(False)
else:
mask2.append(True)
X=movie_mean_wordvec[mask2]
mlb=MultiLabelBinarizer()
Y=mlb.fit_transform(genres)
textual_features=(X,Y)
with open('data/processed/textual_features.pkl','wb') as f:
pickle.dump(textual_features,f)
with open('models/mlb.pkl','wb') as f:
pickle.dump(mlb,f)
|
StarcoderdataPython
|
6679747
|
global_context = {}
|
StarcoderdataPython
|
3342080
|
<reponame>brightway-lca/brightway2-regional
from bw2data import geomapping
from voluptuous import Invalid
from bw2regional.intersection import Intersection
from bw2regional.tests import BW2RegionalTest
class IntersectionTestCase(BW2RegionalTest):
def test_add_geomappings(self):
inter = Intersection(("foo", "bar"))
inter.register()
self.assertFalse(("foo", "bar") in geomapping)
self.assertFalse("baz" in geomapping)
inter.write([[("foo", "bar"), "baz", 42]])
self.assertTrue(("foo", "bar") in geomapping)
self.assertTrue("baz" in geomapping)
def test_validation(self):
inter = Intersection(("foo", "bar"))
self.assertTrue(inter.validate([]))
self.assertTrue(inter.validate([[1, 2, 3]]))
self.assertTrue(inter.validate([["foo", "bar", 3.0]]))
with self.assertRaises(Invalid):
inter.validate(())
with self.assertRaises(Invalid):
inter.validate([[1, 2]])
with self.assertRaises(Invalid):
inter.validate([[1, 2, {"amount": 3.0}]])
|
StarcoderdataPython
|
5043240
|
from subsystems.lightsubsystem import LightSubsystem
import typing
from commands2 import CommandBase
class RelayControl(CommandBase):
def __init__(self, controller: LightSubsystem,
controlPercent: typing.Callable[[], float]) -> None:
CommandBase.__init__(self)
self.control = controller
self.controlPercentCommand = controlPercent
self.setOutputPercent = lambda percent: self.control.light.set(percent)
self.addRequirements([self.control])
self.setName(__class__.__name__)
def execute(self) -> None:
self.setOutputPercent(self.controlPercentCommand())
def end(self, interrupted: bool) -> None:
self.setOutputPercent(0.0)
|
StarcoderdataPython
|
8037974
|
<gh_stars>10-100
import json
import os
import shutil
from datetime import date
from pathlib import Path
import pytest
from ruamel.yaml import YAML
from typing import List, Dict, Any
from vcr import VCR
from vcr.persisters.filesystem import FilesystemPersister
from simple_smartsheet import Smartsheet, AsyncSmartsheet
from simple_smartsheet.models import Column, Sheet, Row, ColumnType
SMARTSHEET_TOKEN = os.getenv("SMARTSHEET_API_TOKEN", "")
yaml = YAML(typ="safe")
yaml.default_flow_style = False
class MyPersister(FilesystemPersister):
@classmethod
def load_cassette(cls, cassette_path, serializer):
path = Path(cassette_path)
if path.is_file():
path.unlink()
raise ValueError("Cassette was deleted")
def pytest_addoption(parser):
"""Add custom command line options"""
parser.addoption(
"--delete-cassettes", action="store_true", help="Delete all cassettes",
)
parser.addoption(
"--disable-vcr", action="store_true", help="Disable VCR",
)
@pytest.fixture(scope="session")
def custom_cassette_dir(pytestconfig):
return Path(pytestconfig.rootdir) / "tests/sandbox/crud/cassettes"
@pytest.fixture(scope="session")
def custom_vcr(pytestconfig, custom_cassette_dir):
"""Session VCR fixture for fixture setup/teardown"""
record_mode = pytestconfig.getoption("--record-mode")
config = {
"cassette_library_dir": str(custom_cassette_dir),
"decode_compressed_response": True,
"filter_headers": [("authorization", "[REDACTED]")],
"record_mode": record_mode,
}
if pytestconfig.getoption("--disable-vcr"):
config["record_mode"] = "new_episodes"
config["before_record_response"] = lambda *args, **kwargs: None
vcr = VCR(**config)
# if record_mode == "all":
# vcr.register_persister(MyPersister)
return vcr
@pytest.fixture(scope="module")
def vcr_config(pytestconfig):
"""Overwriting pytest-recording 'vcr-config' fixture"""
config = {
"decode_compressed_response": True,
"filter_headers": [("authorization", "[REDACTED]")],
}
if pytestconfig.getoption("--disable-vcr"):
config["record_mode"] = "new_episodes"
config["before_record_response"] = lambda *args, **kwargs: None
return config
# @pytest.fixture
# def vcr(vcr, pytestconfig):
# """Overwriting pytest-recording 'vcr' fixture"""
# record_mode = pytestconfig.getoption("--record-mode")
# if record_mode == "all":
# vcr.register_persister(MyPersister)
# return vcr
def columns_gen() -> List[Column]:
return [
Column(primary=True, title="Full Name", type=ColumnType.TEXT_NUMBER),
Column(title="Email address", type=ColumnType.TEXT_NUMBER),
Column(title="Company", type=ColumnType.TEXT_NUMBER),
Column(title="Number of children", type=ColumnType.TEXT_NUMBER),
Column(
title="Maintains",
type=ColumnType.MULTI_PICKLIST,
options=["simple-smartsheet", "nornir", "napalm", "netmiko", "pydantic"],
),
Column(title="Birth date", type=ColumnType.DATE),
Column(title="Married", type=ColumnType.CHECKBOX),
]
# noinspection PyArgumentList
@pytest.fixture
def placeholder_sheet() -> Sheet:
return Sheet(name="[TEST] Placeholder", columns=columns_gen())
def rows_data_gen() -> List[Dict[str, Any]]:
return [
{
"Full Name": "<NAME>",
"Email address": "<EMAIL>",
"Company": "ACME",
"Number of children": 2,
"Married": True,
"Maintains": ["simple-smartsheet", "nornir"],
},
{
"Full Name": "<NAME>",
"Email address": "<EMAIL>.com",
"Company": "Globex",
"Maintains": ["napalm", "nornir"],
},
{
"Full Name": "<NAME>",
"Email address": "<EMAIL>",
"Company": "ACME",
"Number of children": 1,
"Birth date": date(1990, 1, 1),
"Married": False,
"Maintains": ["napalm", "netmiko", "nornir"],
},
]
def additional_row_data_gen() -> Dict[str, Any]:
return {
"Full Name": "<NAME>",
"Email address": "<EMAIL>",
"Company": "Globex",
"Number of children": 3,
"Birth date": date(1980, 1, 1),
"Married": True,
"Maintains": ["pydantic"],
}
@pytest.fixture
def rows_data() -> List[Dict[str, Any]]:
return rows_data_gen()
@pytest.fixture
def additional_row_data() -> Dict[str, Any]:
return additional_row_data_gen()
@pytest.fixture
def additional_rows_data() -> List[Dict[str, Any]]:
return [
{
"Full Name": "<NAME>",
"Email address": "<EMAIL>",
"Company": "Globex",
"Number of children": 3,
"Birth date": date(1980, 1, 1),
"Married": True,
"Maintains": ["pydantic"],
},
{
"Full Name": "<NAME>",
"Email address": "<EMAIL>",
"Company": "ACME",
"Number of children": 2,
"Birth date": date(1985, 1, 1),
"Married": True,
},
]
@pytest.fixture
def mocked_sheet(pytestconfig) -> Sheet:
path = Path(pytestconfig.rootdir) / "tests/sandbox/data/mocked_sheet.json"
with open(path) as f:
data = json.load(f)
sheet = Sheet.load(data)
return sheet
def fix_cassette(path: Path):
# noinspection PyTypeChecker
with open(path, "r+") as f:
data = yaml.load(f)
changed = False
for i, req_resp in enumerate(data["interactions"]):
request = req_resp["request"]
url = request["uri"]
method = request["method"]
response = req_resp["response"]
tests_sheets_data = []
if method == "GET" and url in {
"https://api.smartsheet.com/2.0/sheets?includeAll=true",
"https://api.smartsheet.com/2.0/reports?includeAll=true",
}:
body = json.loads(response["body"]["string"])
num_sheets = body["totalCount"]
sheets = body["data"]
for sheet_data in sheets:
sheet_name = sheet_data["name"]
if sheet_name.startswith("[TEST]"):
tests_sheets_data.append(sheet_data)
if len(tests_sheets_data) != num_sheets:
body["totalCount"] = len(tests_sheets_data)
body["data"] = tests_sheets_data
changed = True
f.seek(0)
response["body"]["string"] = json.dumps(body)
f.truncate()
if changed:
f.seek(0)
yaml.dump(data, f)
def remove_all_rw_objects(custom_vcr):
with custom_vcr.use_cassette("remove_all_rw_objects.yaml"):
with Smartsheet(SMARTSHEET_TOKEN) as smartsheet:
for sheet in smartsheet.sheets.list():
if sheet.name.startswith("[TEST]") and not any(
pattern in sheet.name for pattern in ("[TEST] Report",)
):
smartsheet.sheets.delete(id=sheet.id)
def create_sheets_for_reports(vcr):
with vcr.use_cassette("setup_sheets_for_reports.yaml"):
with Smartsheet(SMARTSHEET_TOKEN) as smartsheet:
report_sheet1 = Sheet(name="[TEST] Report Sheet 1", columns=columns_gen())
result = smartsheet.sheets.create(report_sheet1)
report_sheet1 = result.obj
rows = [
Row(to_top=True, cells=report_sheet1.make_cells(row_data))
for row_data in rows_data_gen()
]
smartsheet.sheets.add_rows(report_sheet1.id, rows)
report_sheet2 = Sheet(name="[TEST] Report Sheet 2", columns=columns_gen())
result = smartsheet.sheets.create(report_sheet2)
report_sheet2 = result.obj
row = Row(
to_top=True, cells=report_sheet2.make_cells(additional_row_data_gen())
)
smartsheet.sheets.add_row(report_sheet2.id, row)
def create_session_objects(vcr):
with vcr.use_cassette("create_session_objects.yaml"):
with Smartsheet(SMARTSHEET_TOKEN) as smartsheet:
read_only_sheet = Sheet(
name="[TEST] Read-only Sheet", columns=columns_gen()
)
result = smartsheet.sheets.create(read_only_sheet)
read_only_sheet = result.obj
rows = [
Row(to_top=True, cells=read_only_sheet.make_cells(row_data))
for row_data in rows_data_gen()
]
smartsheet.sheets.add_rows(read_only_sheet.id, rows)
@pytest.fixture(scope="session", autouse=True)
def setup_teardown(request, custom_vcr, custom_cassette_dir):
os.environ["SIMPLE_SMARTSHEET_STRICT_VALIDATION"] = "1"
delete_cassettes = request.config.getoption("--delete-cassettes")
if delete_cassettes:
shutil.rmtree(custom_cassette_dir)
custom_cassette_dir.mkdir(parents=True, exist_ok=True)
remove_all_rw_objects(custom_vcr)
create_session_objects(custom_vcr)
yield
@pytest.fixture
def smartsheet():
with Smartsheet(SMARTSHEET_TOKEN) as smartsheet:
yield smartsheet
@pytest.fixture
async def async_smartsheet():
async with AsyncSmartsheet(SMARTSHEET_TOKEN) as smartsheet:
yield smartsheet
|
StarcoderdataPython
|
8194477
|
<reponame>mycolab/ncbi-blast<gh_stars>0
#!/usr/bin/env python
# $Id: python-config.py 503831 2016-06-08 14:54:36Z ucko $
from distutils import sysconfig
import sys
def lookup(want):
if want == 'VERSION':
return sysconfig.get_config_var('VERSION')
elif want == 'INCLUDE':
return ('-I%s -I%s' % (sysconfig.get_python_inc(),
sysconfig.get_python_inc(True)))
elif want == 'LIBPATH':
return ' '.join(sysconfig.get_config_vars('LIBDIR', 'LIBPL'))
elif want == 'DEPS':
return ' '.join(sysconfig.get_config_vars('LIBS', 'SYSLIBS'))
elif want == 'LDVERSION':
return (sysconfig.get_config_var('LDVERSION')
or sysconfig.get_config_var('VERSION'))
elif want == 'LIBS':
return '-lpython' + lookup('LDVERSION') + ' ' + lookup('DEPS')
else:
raise RuntimeError('Unsupported mode ' + want)
print(lookup(sys.argv[1].lstrip('-').upper()))
|
StarcoderdataPython
|
1870113
|
# coding=utf8
# Copyright 2018 JDCLOUD.COM
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# NOTE: This class is auto generated by the jdcloud code generator program.
class CCProtectionConfigSpec(object):
def __init__(self, level, ccThreshold=None, hostQps=None, hostUrlQps=None, ipHostQps=None, ipHostUrlQps=None):
"""
:param level: 防护等级, 0: 正常, 1: 宽松, 2: 紧急, 3: 自定义
:param ccThreshold: (Optional) HTTP 请求数阈值, 防护等级为自定义时必传
:param hostQps: (Optional) Host 的防护阈值, 防护等级为自定义时必传
:param hostUrlQps: (Optional) Host + Url 的防护阈值, 防护等级为自定义时必传
:param ipHostQps: (Optional) 每个源 IP 对 Host 的防护阈值, 防护等级为自定义时必传
:param ipHostUrlQps: (Optional) 每个源 IP 对 Host + Url 的防护阈值, 防护等级为自定义时必传
"""
self.level = level
self.ccThreshold = ccThreshold
self.hostQps = hostQps
self.hostUrlQps = hostUrlQps
self.ipHostQps = ipHostQps
self.ipHostUrlQps = ipHostUrlQps
|
StarcoderdataPython
|
29087
|
from django.core.serializers.json import DjangoJSONEncoder
class CallableJSONEncoder(DjangoJSONEncoder):
def default(self, obj):
if callable(obj):
return obj()
return super().default(obj)
|
StarcoderdataPython
|
1773680
|
#!/usr/bin/env python
############################################
#
# Login to supercomputer and cd to current work directory
# Apr11 ; greatly simplified by using read_pm functions
# Apr4 ; add option for custom ip addr
# Mar19 ; translated to python by genki
# next logs will be on the git commits
#
############################################
import argparse
import os
import subprocess
import read_data_pm as readpm
parser = argparse.ArgumentParser()
parser.add_argument('--cluster', '-c', type=str)
parser.add_argument('--username', '-u', type=str)
parser.add_argument('--dir', '-d', type=str)
parser.add_argument('--ip', '-i', type=str)
parser.add_argument('--shell', '-s', type=str, choices=['tcsh', 'bash'], default='bash')
args = parser.parse_args()
cname = args.cluster if args.cluster else readpm.specify_cluster()
uname = args.username if args.username else readpm.get_uname(cname)
datapm = readpm.read_perl_module_hashes(readpm.DATAPM)
c1 = datapm['clusters']['unified']
c2 = datapm['clusters'][cname]
path = args.dir if args.dir else readpm.c2path(c1, c2, uname, cname)
sshcd = [ 'ssh', '-t', readpm.c2ip(c2, uname), 'cd {}; {}'.format(path, args.shell) ]
print(' '.join(sshcd))
subprocess.call(sshcd)
|
StarcoderdataPython
|
1996783
|
# #!/usr/bin/env python
#
# """
# @package ion.agents.platform.rsn.test.oms_simple
# @file ion/agents/platform/rsn/test/oms_simple.py
# @author <NAME>
# @brief Program that connects to the real RSN OMS endpoint to do basic
# verification of the operations. Note that VPN is required.
# Also, port 5000 on the localhost (via corresponding fully-qualified
# domain name as returned by socket.getfqdn()) needs to be accessible
# from OMS for the event notification to be received here.
#
# For usage, call:
# bin/python ion/agents/platform/rsn/test/oms_simple.py --help
#
# @see https://confluence.oceanobservatories.org/display/CIDev/RSN+OMS+endpoint+implementation+verification
# @see https://confluence.oceanobservatories.org/display/syseng/CIAD+MI+SV+CI-OMS+interface
# """
#
# __author__ = '<NAME>'
# __license__ = 'Apache 2.0'
#
#
# from ion.agents.platform.rsn.oms_event_listener import OmsEventListener
# from ion.agents.platform.responses import InvalidResponse
# from pyon.util.breakpoint import breakpoint
#
# import xmlrpclib
# import sys
# import pprint
# import socket
#
#
# DEFAULT_RSN_OMS_URI = "http://alice:[email protected]:9021/"
# DEFAULT_MAX_WAIT = 70
#
# INVALID_PLATFORM_ID = InvalidResponse.PLATFORM_ID
#
# # use full-qualified domain name as the external host for the registration
# HTTP_SERVER_HOST = socket.getfqdn()
# HTTP_SERVER_PORT = 5000
#
# EVENT_LISTENER_URL = "http://%s:%d/oms" % (HTTP_SERVER_HOST, HTTP_SERVER_PORT)
#
# # max time to wait to receive the test event
# max_wait = 0
#
# # launch IPython shell?
# launch_breakpoint = False
#
# tried = {}
#
#
# def launch_listener(): # pragma: no cover
# def notify_driver_event(evt):
# print("notify_driver_event received: %s" % str(evt.event_instance))
#
# print 'launching listener, port=%d ...' % HTTP_SERVER_PORT
# oms_event_listener = OmsEventListener("dummy_plat_id", notify_driver_event)
# oms_event_listener.keep_notifications()
# oms_event_listener.start_http_server(host='', port=HTTP_SERVER_PORT)
# print 'listener launched'
# return oms_event_listener
#
#
# def main(uri): # pragma: no cover
# oms_event_listener = launch_listener()
#
# print '\nconnecting to %r ...' % uri
# proxy = xmlrpclib.ServerProxy(uri, allow_none=True)
# print 'connection established.'
#
# pp = pprint.PrettyPrinter()
#
# def show_listeners():
# from datetime import datetime
# from ion.agents.platform.util import ntp_2_ion_ts
#
# event_listeners = proxy.event.get_registered_event_listeners()
# print("Event listeners (%d):" % len(event_listeners))
# for a, b in sorted(event_listeners.iteritems(),
# lambda a, b: int(a[1] - b[1])):
# time = datetime.fromtimestamp(float(ntp_2_ion_ts(b)) / 1000)
# print(" %s %s" % (time, a))
# print
#
# def format_val(value):
# prefix = "\t\t"
# print "\n%s%s" % (prefix, pp.pformat(value).replace("\n", "\n" + prefix))
#
# def format_err(msg):
# prefix = "\t\t"
# print "\n%s%s" % (prefix, msg.replace("\n", "\n" + prefix))
#
# def get_method(handler_name, method_name):
# """
# Gets the method from the proxy.
# @param handler_name Name of the handler; can be None to indicate get
# method directly from proxy.
# @param method_name Method's name
#
# @return callable; None if any error getting the method
# """
#
# # get method:
# if handler_name:
# # get handler:
# try:
# handler = getattr(proxy, handler_name)
# except Exception as e:
# print "error getting handler %s: %s: %s" % (handler_name, type(e), str(e))
# return None
# try:
# method = getattr(handler, method_name)
# return method
# except Exception as e:
# print "error method %s.%s: %s: %s" % (handler_name, method_name, type(e), str(e))
# return None
# else:
# try:
# method = getattr(proxy, method_name)
# return method
# except Exception as e:
# print "error getting proxy's method %s: %s: %s" % (method_name, type(e), str(e))
# return None
#
# def run(full_method_name, *args):
# """
# Runs a method against the proxy.
#
# @param full_method_name
# @param args
# """
# global tried
#
# tried[full_method_name] = ""
#
# handler_name, method_name = full_method_name.split(".")
#
# # get the method
# method = get_method(handler_name, method_name)
# if method is None:
# tried[full_method_name] = "could not get handler or method"
# return
#
# sargs = ", ".join(["%r" % a for a in args])
#
# sys.stdout.write("\n%s(%s) -> " % (full_method_name, sargs))
# sys.stdout.flush()
#
# # run method
# retval, reterr = None, None
# try:
# retval = method(*args)
# tried[full_method_name] = "OK"
# # print "%r" % retval
# format_val(retval)
# except xmlrpclib.Fault as e:
# if e.faultCode == 8001:
# reterr = "-- NOT FOUND (fault %s)" % e.faultCode
# else:
# reterr = "-- Fault %d: %s" % (e.faultCode, e.faultString)
# # raise
# # print "Exception: %s: %s" % (type(e), str(e))
# # tried[full_method_name] = str(e)
#
# tried[full_method_name] = reterr
# format_err(reterr)
#
# return retval, reterr
#
# def verify_entry_in_dict(retval, reterr, entry):
# if reterr is not None:
# return retval, reterr
#
# if not isinstance(retval, dict):
# reterr = "-- expecting a dict with entry %r" % entry
# elif entry not in retval:
# reterr = "-- expecting a dict with entry %r" % entry
# else:
# retval = retval[entry]
#
# print("full_method_name = %s" % full_method_name)
# if reterr:
# tried[full_method_name] = reterr
# format_err(reterr)
#
# return retval, reterr
#
# def verify_test_event_notified(retval, reterr, event):
# print("waiting for a max of %d secs for test event to be notified..." % max_wait)
# import time
#
# wait_until = time.time() + max_wait
# got_it = False
# while not got_it and time.time() <= wait_until:
# time.sleep(1)
# for evt in oms_event_listener.notifications:
# if event['message'] == evt['message']:
# got_it = True
# break
#
# # print("Received external events: %s" % oms_event_listener.notifications)
# if not got_it:
# reterr = "error: didn't get expected test event notification within %d " \
# "secs. (Got %d event notifications.)" % (
# max_wait, len(oms_event_listener.notifications))
#
# print("full_method_name = %s" % full_method_name)
# if reterr:
# tried[full_method_name] = reterr
# format_err(reterr)
#
# return retval, reterr
#
# show_listeners()
#
# if launch_breakpoint:
# breakpoint(locals())
#
# print "Basic verification of the operations:\n"
#
# #----------------------------------------------------------------------
# full_method_name = "hello.ping"
# retval, reterr = run(full_method_name)
# if retval and retval.lower() != "pong":
# error = "expecting 'pong'"
# tried[full_method_name] = error
# format_err(error)
#
# #----------------------------------------------------------------------
# full_method_name = "config.get_platform_types"
# retval, reterr = run(full_method_name)
# if retval and not isinstance(retval, dict):
# error = "expecting a dict"
# tried[full_method_name] = error
# format_err(error)
#
# platform_id = "dummy_platform_id"
#
# #----------------------------------------------------------------------
# full_method_name = "config.get_platform_map"
# retval, reterr = run(full_method_name)
# if retval is not None:
# if isinstance(retval, list):
# if len(retval):
# if isinstance(retval[0], (tuple, list)):
# platform_id = retval[0][0]
# else:
# reterr = "expecting a list of tuples or lists"
# else:
# reterr = "expecting a non-empty list"
# else:
# reterr = "expecting a list"
# if reterr:
# tried[full_method_name] = reterr
# format_err(reterr)
#
# #----------------------------------------------------------------------
# full_method_name = "config.get_platform_metadata"
# retval, reterr = run(full_method_name, platform_id)
# retval, reterr = verify_entry_in_dict(retval, reterr, platform_id)
#
# #----------------------------------------------------------------------
# full_method_name = "attr.get_platform_attributes"
# retval, reterr = run(full_method_name, platform_id)
# retval, reterr = verify_entry_in_dict(retval, reterr, platform_id)
#
# #----------------------------------------------------------------------
# full_method_name = "attr.get_platform_attribute_values"
# retval, reterr = run(full_method_name, platform_id, [])
# retval, reterr = verify_entry_in_dict(retval, reterr, platform_id)
#
# #----------------------------------------------------------------------
# full_method_name = "attr.set_platform_attribute_values"
# retval, reterr = run(full_method_name, platform_id, {})
# retval, reterr = verify_entry_in_dict(retval, reterr, platform_id)
#
# port_id = "dummy_port_id"
#
# #----------------------------------------------------------------------
# full_method_name = "port.get_platform_ports"
# retval, reterr = run(full_method_name, platform_id)
# retval, reterr = verify_entry_in_dict(retval, reterr, platform_id)
# if retval is not None:
# if isinstance(retval, dict):
# if len(retval):
# port_id = retval.keys()[0]
# else:
# reterr = "empty dict of ports for platform %r" % platform_id
# else:
# reterr = "expecting a dict {%r: ...}. got: %s" % (platform_id, type(retval))
# if reterr:
# tried[full_method_name] = reterr
# format_err(reterr)
#
# instrument_id = "dummy_instrument_id"
#
# if reterr is None:
# full_method_name = "port.get_platform_ports"
# retval, reterr = run(full_method_name, "dummy_platform_id")
# orig_retval = retval
# retval, reterr = verify_entry_in_dict(retval, reterr, "dummy_platform_id")
# if retval != INVALID_PLATFORM_ID:
# reterr = "expecting dict {%r: %r}. got: %r" % (
# "dummy_platform_id", INVALID_PLATFORM_ID, orig_retval)
# tried[full_method_name] = reterr
# format_err(reterr)
#
# instrument_id = "dummy_instrument_id"
#
# #----------------------------------------------------------------------
# full_method_name = "instr.connect_instrument"
# retval, reterr = run(full_method_name, platform_id, port_id, instrument_id, {})
# retval, reterr = verify_entry_in_dict(retval, reterr, platform_id)
# retval, reterr = verify_entry_in_dict(retval, reterr, port_id)
# retval, reterr = verify_entry_in_dict(retval, reterr, instrument_id)
#
# connect_instrument_error = reterr
#
# #----------------------------------------------------------------------
# full_method_name = "instr.get_connected_instruments"
# retval, reterr = run(full_method_name, platform_id, port_id)
# retval, reterr = verify_entry_in_dict(retval, reterr, platform_id)
# retval, reterr = verify_entry_in_dict(retval, reterr, port_id)
# # note, in case of error in instr.connect_instrument, don't expect the
# # instrument_id to be reported:
# if connect_instrument_error is None:
# retval, reterr = verify_entry_in_dict(retval, reterr, instrument_id)
#
# #----------------------------------------------------------------------
# full_method_name = "instr.disconnect_instrument"
# retval, reterr = run(full_method_name, platform_id, port_id, instrument_id)
# retval, reterr = verify_entry_in_dict(retval, reterr, platform_id)
# retval, reterr = verify_entry_in_dict(retval, reterr, port_id)
# retval, reterr = verify_entry_in_dict(retval, reterr, instrument_id)
#
# #----------------------------------------------------------------------
# full_method_name = "port.turn_on_platform_port"
# retval, reterr = run(full_method_name, platform_id, port_id)
#
# #----------------------------------------------------------------------
# full_method_name = "port.turn_off_platform_port"
# retval, reterr = run(full_method_name, platform_id, port_id)
#
# #----------------------------------------------------------------------
# url = EVENT_LISTENER_URL
#
# #----------------------------------------------------------------------
# full_method_name = "event.register_event_listener"
# retval, reterr = run(full_method_name, url)
# retval, reterr = verify_entry_in_dict(retval, reterr, url)
#
# #----------------------------------------------------------------------
# full_method_name = "event.get_registered_event_listeners"
# retval, reterr = run(full_method_name)
# urls = retval
# retval, reterr = verify_entry_in_dict(retval, reterr, url)
#
# #----------------------------------------------------------------------
# full_method_name = "event.unregister_event_listener"
# if isinstance(urls, dict):
# # this part just as a convenience to unregister listeners that were
# # left registered by some error in a prior interaction.
# prefix = "http://127.0.0.1:" # or some other needed prefix
# for url2 in urls:
# if url2.find(prefix) >= 0:
# retval, reterr = run(full_method_name, url2)
# retval, reterr = verify_entry_in_dict(retval, reterr, url2)
# if reterr is not None:
# break
# if reterr is None:
# retval, reterr = run(full_method_name, url)
# retval, reterr = verify_entry_in_dict(retval, reterr, url)
#
# #----------------------------------------------------------------------
# full_method_name = "config.get_checksum"
# retval, reterr = run(full_method_name, platform_id)
#
# # the following to specifically verify reception of test event
# if max_wait:
# full_method_name = "event.register_event_listener"
# retval, reterr = run(full_method_name, EVENT_LISTENER_URL)
# retval, reterr = verify_entry_in_dict(retval, reterr, EVENT_LISTENER_URL)
#
# full_method_name = "event.generate_test_event"
# event = {
# 'message' : "fake event triggered from CI using OMS' generate_test_event",
# 'platform_id' : "fake_platform_id",
# 'severity' : "3",
# 'group ' : "power",
# }
# retval, reterr = run(full_method_name, event)
#
# if max_wait:
# verify_test_event_notified(retval, reterr, event)
#
# full_method_name = "event.unregister_event_listener"
# retval, reterr = run(full_method_name, EVENT_LISTENER_URL)
# retval, reterr = verify_entry_in_dict(retval, reterr, EVENT_LISTENER_URL)
# elif not reterr:
# ok_but = "OK (but verification of event reception was not performed)"
# tried[full_method_name] = ok_but
# format_err(ok_but)
#
# show_listeners()
#
# #######################################################################
# print("\nSummary of basic verification:")
# okeys = 0
# for full_method_name, result in sorted(tried.iteritems()):
# print("%20s %-40s: %s" % ("", full_method_name, result))
# if result.startswith("OK"):
# okeys += 1
# print("OK methods %d out of %s" % (okeys, len(tried)))
#
#
# if __name__ == "__main__": # pragma: no cover
#
# import argparse
#
# parser = argparse.ArgumentParser(description="Basic CI-OMS verification program")
# parser.add_argument("-u", "--uri",
# help="RSN OMS URI (default: %s)" % DEFAULT_RSN_OMS_URI,
# default=DEFAULT_RSN_OMS_URI)
# parser.add_argument("-w", "--wait",
# help="Max wait time for test event (default: %d)" % DEFAULT_MAX_WAIT,
# default=DEFAULT_MAX_WAIT)
# parser.add_argument("-b", "--breakpoint",
# help="Launch IPython shell at beginning",
# action='store_const', const=True)
#
# opts = parser.parse_args()
#
# uri = opts.uri
# max_wait = int(opts.wait)
# launch_breakpoint = bool(opts.breakpoint)
#
# main(uri)
|
StarcoderdataPython
|
1765850
|
import cv2
import numpy as np
img = cv2.imread("4.2 face.png")
# casede dosyamızı ekliyoruz
face_cascade = cv2.CascadeClassifier("4.3 frontalface.xml")
gray = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
faces = face_cascade.detectMultiScale(gray, 1.3,
7) # 1.3 ölçeklemek için ve 4 değeri 4 pencere ile kıyaslayıp yüz olduğunu anlamak için
# faces 4 parametreye sahiptir
# x ve y başlangış noktaları w ve h en boy oranları
for (x, y, w, h) in faces:
cv2.rectangle(img, (x, y), (x + w, y + h), (0, 0, 255), 2)
cv2.imshow("Image", img)
cv2.waitKey(0)
cv2.destroyAllWindows()
|
StarcoderdataPython
|
9739927
|
"""The Modified Differential Multiplier Method (MDMM) for PyTorch."""
from .mdmm import (ConstraintReturn, Constraint, EqConstraint, MaxConstraint, MaxConstraintHard,
MinConstraint, MinConstraintHard, BoundConstraintHard, MDMMReturn, MDMM)
__version__ = '0.1.3'
|
StarcoderdataPython
|
4870823
|
from ..core.ControllerService import ControllerService
class GCPCredentialsControllerService(ControllerService):
def __init__(self, name=None, credentials_location=None, json_path=None, raw_json=None):
super(GCPCredentialsControllerService, self).__init__(name=name)
self.service_class = 'GCPCredentialsControllerService'
if credentials_location is not None:
self.properties['Credentials Location'] = credentials_location
if json_path is not None:
self.properties['Service Account JSON File'] = json_path
if raw_json is not None:
self.properties['Service Account JSON'] = raw_json
|
StarcoderdataPython
|
1804461
|
from django.db import models
from django.utils.translation import gettext_lazy as _
# class MyModel(models.Model):
#
# title = models.CharField(_('Title'), max_length=100, help_text=_('Description'))
#
# class Meta:
# verbose_name = _('Model')
# verbose_name_plural = _('Models')
#
# def __str__(self):
# return self.title
|
StarcoderdataPython
|
8066411
|
<reponame>git-wwts/pysyte
"""Some keyboard handling code"""
import sys
from pysyte.oss import getch
def _get_chosen_chars(chooser):
while True:
key = getch.get_as_key()
try:
if chooser(key):
return key
except AttributeError:
print(key)
continue
def get_digit():
return _get_chosen_chars(lambda x: x.isdigit())
def get_letter():
return _get_chosen_chars(lambda x: x.isupper() or x.islower())
def quit_on_q():
try:
key = getch.get_as_key()
if key in "qQ":
sys.exit()
return key
except KeyboardInterrupt:
sys.exit()
|
StarcoderdataPython
|
6448
|
<gh_stars>0
# MINLP written by GAMS Convert at 01/15/21 11:37:33
#
# Equation counts
# Total E G L N X C B
# 1486 571 111 804 0 0 0 0
#
# Variable counts
# x b i s1s s2s sc si
# Total cont binary integer sos1 sos2 scont sint
# 865 685 180 0 0 0 0 0
# FX 0 0 0 0 0 0 0 0
#
# Nonzero counts
# Total const NL DLL
# 3373 3193 180 0
#
# Reformulation has removed 1 variable and 1 equation
from pyomo.environ import *
model = m = ConcreteModel()
m.x2 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x3 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x4 = Var(within=Reals,bounds=(0,40),initialize=0)
m.x5 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x6 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x7 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x8 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x9 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x10 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x11 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x12 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x13 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x14 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x15 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x16 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x17 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x18 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x19 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x20 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x21 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x22 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x23 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x24 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x25 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x26 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x27 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x28 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x29 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x30 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x31 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x32 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x33 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x34 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x35 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x36 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x37 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x38 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x39 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x40 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x41 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x42 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x43 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x44 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x45 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x46 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x47 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x48 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x49 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x50 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x51 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x52 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x53 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x54 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x55 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x56 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x57 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x58 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x59 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x60 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x61 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x62 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x63 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x64 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x65 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x66 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x67 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x68 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x69 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x70 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x71 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x72 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x73 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x74 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x75 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x76 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x77 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x78 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x79 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x80 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x81 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x82 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x83 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x84 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x85 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x86 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x87 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x88 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x89 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x90 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x91 = Var(within=Reals,bounds=(0,20),initialize=0)
m.x92 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x93 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x94 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x95 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x96 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x97 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x98 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x99 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x100 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x101 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x102 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x103 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x104 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x105 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x106 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x107 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x108 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x109 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x110 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x111 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x112 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x113 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x114 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x115 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x116 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x117 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x118 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x119 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x120 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x121 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x122 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x123 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x124 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x125 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x126 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x127 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x128 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x129 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x130 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x131 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x132 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x133 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x134 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x135 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x136 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x137 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x138 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x139 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x140 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x141 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x142 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x143 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x144 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x145 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x146 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x147 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x148 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x149 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x150 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x151 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x152 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x153 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x154 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x155 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x156 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x157 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x158 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x159 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x160 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x161 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x162 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x163 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x164 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x165 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x166 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x167 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x168 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x169 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x170 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x171 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x172 = Var(within=Reals,bounds=(0,30),initialize=0)
m.x173 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x174 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x175 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x176 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x177 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x178 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x179 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x180 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x181 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x182 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x183 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x184 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x185 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x186 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x187 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x188 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x189 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x190 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x191 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x192 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x193 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x194 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x195 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x196 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x197 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x198 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x199 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x200 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x201 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x202 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x203 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x204 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x205 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x206 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x207 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x208 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x209 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x210 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x211 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x212 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x213 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x214 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x215 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x216 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x217 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x218 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x219 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x220 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x221 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x222 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x223 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x224 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x225 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x226 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x227 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x228 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x229 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x230 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x231 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x232 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x233 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x234 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x235 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x236 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x237 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x238 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x239 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x240 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x241 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x242 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x243 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x244 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x245 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x246 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x247 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x248 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x249 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x250 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x251 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x252 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x253 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x254 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x255 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x256 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x257 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x258 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x259 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x260 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x261 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x262 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x263 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x264 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x265 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x266 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x267 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x268 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x269 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x270 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x271 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x272 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x273 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x274 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x275 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x276 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x277 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x278 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x279 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x280 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x281 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x282 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x283 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x284 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x285 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x286 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x287 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x288 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x289 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x290 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x291 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x292 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x293 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x294 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x295 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x296 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x297 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x298 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x299 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x300 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x301 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x302 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x303 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x304 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x305 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x306 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x307 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x308 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x309 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x310 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x311 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x312 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x313 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x314 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x315 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x316 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x317 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x318 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x319 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x320 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x321 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x322 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x323 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x324 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x325 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x326 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x327 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x328 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x329 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x330 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x331 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x332 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x333 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x334 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x335 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x336 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x337 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x338 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x339 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x340 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x341 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x342 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x343 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x344 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x345 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x346 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x347 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x348 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x349 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x350 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x351 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x352 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x353 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x354 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x355 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x356 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x357 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x358 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x359 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x360 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x361 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x362 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x363 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x364 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x365 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x366 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x367 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x368 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x369 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x370 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x371 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x372 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x373 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x374 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x375 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x376 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x377 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x378 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x379 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x380 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x381 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x382 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x383 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x384 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x385 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x386 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x387 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x388 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x389 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x390 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x391 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x392 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x393 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x394 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x395 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x396 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x397 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x398 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x399 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x400 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x401 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x402 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x403 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x404 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x405 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x406 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x407 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x408 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x409 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x410 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x411 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x412 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x413 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x414 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x415 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x416 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x417 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x418 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x419 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x420 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x421 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x422 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x423 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x424 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x425 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x426 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x427 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x428 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x429 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x430 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x431 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x432 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x433 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x434 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x435 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x436 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x437 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x438 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x439 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x440 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x441 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x442 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x443 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x444 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x445 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x446 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x447 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x448 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x449 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x450 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x451 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x452 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x453 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x454 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x455 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x456 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x457 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x458 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x459 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x460 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x461 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x462 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x463 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x464 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x465 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x466 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x467 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x468 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x469 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x470 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x471 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x472 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x473 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x474 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x475 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x476 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x477 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x478 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x479 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x480 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x481 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x482 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x483 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x484 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x485 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x486 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x487 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x488 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x489 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x490 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x491 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x492 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x493 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x494 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x495 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x496 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x497 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x498 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x499 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x500 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x501 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x502 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x503 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x504 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x505 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x506 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x507 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x508 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x509 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x510 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x511 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x512 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x513 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x514 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x515 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x516 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x517 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x518 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x519 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x520 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x521 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x522 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x523 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x524 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x525 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x526 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x527 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x528 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x529 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x530 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x531 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x532 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x533 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x534 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x535 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x536 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x537 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x538 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x539 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x540 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x541 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x542 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x543 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x544 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x545 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x546 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x547 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x548 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x549 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x550 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x551 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x552 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x553 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x554 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x555 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x556 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x557 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x558 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x559 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x560 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x561 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x562 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x563 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x564 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x565 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x566 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x567 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x568 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x569 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x570 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x571 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x572 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x573 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x574 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x575 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x576 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x577 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x578 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x579 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x580 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x581 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x582 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x583 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x584 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x585 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x586 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x587 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x588 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x589 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x590 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x591 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x592 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x593 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x594 = Var(within=Reals,bounds=(0,None),initialize=0)
m.x595 = Var(within=Reals,bounds=(0,None),initialize=0)
m.b596 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b597 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b598 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b599 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b600 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b601 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b602 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b603 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b604 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b605 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b606 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b607 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b608 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b609 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b610 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b611 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b612 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b613 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b614 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b615 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b616 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b617 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b618 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b619 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b620 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b621 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b622 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b623 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b624 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b625 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b626 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b627 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b628 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b629 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b630 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b631 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b632 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b633 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b634 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b635 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b636 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b637 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b638 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b639 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b640 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b641 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b642 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b643 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b644 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b645 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b646 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b647 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b648 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b649 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b650 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b651 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b652 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b653 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b654 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b655 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b656 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b657 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b658 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b659 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b660 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b661 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b662 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b663 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b664 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b665 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b666 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b667 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b668 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b669 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b670 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b671 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b672 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b673 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b674 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b675 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b676 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b677 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b678 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b679 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b680 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b681 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b682 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b683 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b684 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b685 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b686 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b687 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b688 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b689 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b690 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b691 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b692 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b693 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b694 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b695 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b696 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b697 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b698 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b699 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b700 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b701 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b702 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b703 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b704 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b705 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b706 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b707 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b708 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b709 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b710 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b711 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b712 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b713 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b714 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b715 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b716 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b717 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b718 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b719 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b720 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b721 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b722 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b723 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b724 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b725 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b726 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b727 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b728 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b729 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b730 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b731 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b732 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b733 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b734 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b735 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b736 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b737 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b738 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b739 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b740 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b741 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b742 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b743 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b744 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b745 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b746 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b747 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b748 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b749 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b750 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b751 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b752 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b753 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b754 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b755 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b756 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b757 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b758 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b759 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b760 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b761 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b762 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b763 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b764 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b765 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b766 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b767 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b768 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b769 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b770 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b771 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b772 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b773 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b774 = Var(within=Binary,bounds=(0,1),initialize=0)
m.b775 = Var(within=Binary,bounds=(0,1),initialize=0)
m.x776 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x777 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x778 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x779 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x780 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x781 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x782 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x783 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x784 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x785 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x786 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x787 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x788 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x789 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x790 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x791 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x792 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x793 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x794 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x795 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x796 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x797 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x798 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x799 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x800 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x801 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x802 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x803 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x804 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x805 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x806 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x807 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x808 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x809 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x810 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x811 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x812 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x813 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x814 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x815 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x816 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x817 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x818 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x819 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x820 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x821 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x822 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x823 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x824 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x825 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x826 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x827 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x828 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x829 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x830 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x831 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x832 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x833 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x834 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x835 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x836 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x837 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x838 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x839 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x840 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x841 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x842 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x843 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x844 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x845 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x846 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x847 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x848 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x849 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x850 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x851 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x852 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x853 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x854 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x855 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x856 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x857 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x858 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x859 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x860 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x861 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x862 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x863 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x864 = Var(within=Reals,bounds=(None,None),initialize=0)
m.x865 = Var(within=Reals,bounds=(None,None),initialize=0)
m.obj = Objective(expr= - m.x2 - m.x3 - m.x4 + 5*m.x20 + 10*m.x21 + 5*m.x22 - 2*m.x35 - m.x36 - 2*m.x37 - 10*m.x86
- 5*m.x87 - 5*m.x88 - 5*m.x89 - 5*m.x90 - 5*m.x91 + 40*m.x110 + 30*m.x111 + 15*m.x112
+ 15*m.x113 + 20*m.x114 + 25*m.x115 + 10*m.x116 + 30*m.x117 + 40*m.x118 + 30*m.x119 + 20*m.x120
+ 20*m.x121 + 35*m.x122 + 50*m.x123 + 20*m.x124 + 20*m.x125 + 30*m.x126 + 35*m.x127 + 25*m.x128
+ 50*m.x129 + 10*m.x130 + 15*m.x131 + 20*m.x132 + 20*m.x133 + 30*m.x155 + 40*m.x156 + 40*m.x157
- m.x170 - m.x171 - m.x172 + 80*m.x194 + 90*m.x195 + 120*m.x196 + 285*m.x197 + 390*m.x198
+ 350*m.x199 + 290*m.x200 + 405*m.x201 + 190*m.x202 + 280*m.x203 + 400*m.x204 + 430*m.x205
+ 290*m.x206 + 300*m.x207 + 240*m.x208 + 350*m.x209 + 250*m.x210 + 300*m.x211 - 5*m.b686
- 4*m.b687 - 6*m.b688 - 8*m.b689 - 7*m.b690 - 6*m.b691 - 6*m.b692 - 9*m.b693 - 4*m.b694
- 10*m.b695 - 9*m.b696 - 5*m.b697 - 6*m.b698 - 10*m.b699 - 6*m.b700 - 7*m.b701 - 7*m.b702
- 4*m.b703 - 4*m.b704 - 3*m.b705 - 2*m.b706 - 5*m.b707 - 6*m.b708 - 7*m.b709 - 2*m.b710
- 5*m.b711 - 2*m.b712 - 4*m.b713 - 7*m.b714 - 4*m.b715 - 3*m.b716 - 9*m.b717 - 3*m.b718
- 7*m.b719 - 2*m.b720 - 9*m.b721 - 3*m.b722 - m.b723 - 9*m.b724 - 2*m.b725 - 6*m.b726 - 3*m.b727
- 4*m.b728 - 8*m.b729 - m.b730 - 2*m.b731 - 5*m.b732 - 2*m.b733 - 3*m.b734 - 4*m.b735 - 3*m.b736
- 5*m.b737 - 7*m.b738 - 6*m.b739 - 2*m.b740 - 8*m.b741 - 4*m.b742 - m.b743 - 4*m.b744 - m.b745
- 2*m.b746 - 5*m.b747 - 2*m.b748 - 9*m.b749 - 2*m.b750 - 9*m.b751 - 5*m.b752 - 8*m.b753
- 4*m.b754 - 2*m.b755 - 3*m.b756 - 8*m.b757 - 10*m.b758 - 6*m.b759 - 3*m.b760 - 4*m.b761
- 8*m.b762 - 7*m.b763 - 7*m.b764 - 3*m.b765 - 9*m.b766 - 4*m.b767 - 8*m.b768 - 6*m.b769
- 2*m.b770 - m.b771 - 3*m.b772 - 8*m.b773 - 3*m.b774 - 4*m.b775, sense=maximize)
m.c2 = Constraint(expr= m.x2 - m.x5 - m.x8 == 0)
m.c3 = Constraint(expr= m.x3 - m.x6 - m.x9 == 0)
m.c4 = Constraint(expr= m.x4 - m.x7 - m.x10 == 0)
m.c5 = Constraint(expr= - m.x11 - m.x14 + m.x17 == 0)
m.c6 = Constraint(expr= - m.x12 - m.x15 + m.x18 == 0)
m.c7 = Constraint(expr= - m.x13 - m.x16 + m.x19 == 0)
m.c8 = Constraint(expr= m.x17 - m.x20 - m.x23 == 0)
m.c9 = Constraint(expr= m.x18 - m.x21 - m.x24 == 0)
m.c10 = Constraint(expr= m.x19 - m.x22 - m.x25 == 0)
m.c11 = Constraint(expr= m.x23 - m.x26 - m.x29 - m.x32 == 0)
m.c12 = Constraint(expr= m.x24 - m.x27 - m.x30 - m.x33 == 0)
m.c13 = Constraint(expr= m.x25 - m.x28 - m.x31 - m.x34 == 0)
m.c14 = Constraint(expr= m.x38 - m.x47 - m.x50 == 0)
m.c15 = Constraint(expr= m.x39 - m.x48 - m.x51 == 0)
m.c16 = Constraint(expr= m.x40 - m.x49 - m.x52 == 0)
m.c17 = Constraint(expr= m.x44 - m.x53 - m.x56 - m.x59 == 0)
m.c18 = Constraint(expr= m.x45 - m.x54 - m.x57 - m.x60 == 0)
m.c19 = Constraint(expr= m.x46 - m.x55 - m.x58 - m.x61 == 0)
m.c20 = Constraint(expr= m.x68 - m.x80 - m.x83 == 0)
m.c21 = Constraint(expr= m.x69 - m.x81 - m.x84 == 0)
m.c22 = Constraint(expr= m.x70 - m.x82 - m.x85 == 0)
m.c23 = Constraint(expr= - m.x71 - m.x89 + m.x92 == 0)
m.c24 = Constraint(expr= - m.x72 - m.x90 + m.x93 == 0)
m.c25 = Constraint(expr= - m.x73 - m.x91 + m.x94 == 0)
m.c26 = Constraint(expr= m.x74 - m.x95 - m.x98 == 0)
m.c27 = Constraint(expr= m.x75 - m.x96 - m.x99 == 0)
m.c28 = Constraint(expr= m.x76 - m.x97 - m.x100 == 0)
m.c29 = Constraint(expr= m.x77 - m.x101 - m.x104 - m.x107 == 0)
m.c30 = Constraint(expr= m.x78 - m.x102 - m.x105 - m.x108 == 0)
m.c31 = Constraint(expr= m.x79 - m.x103 - m.x106 - m.x109 == 0)
m.c32 = Constraint(expr= m.x134 - m.x137 == 0)
m.c33 = Constraint(expr= m.x135 - m.x138 == 0)
m.c34 = Constraint(expr= m.x136 - m.x139 == 0)
m.c35 = Constraint(expr= m.x137 - m.x140 - m.x143 == 0)
m.c36 = Constraint(expr= m.x138 - m.x141 - m.x144 == 0)
m.c37 = Constraint(expr= m.x139 - m.x142 - m.x145 == 0)
m.c38 = Constraint(expr= - m.x146 - m.x149 + m.x152 == 0)
m.c39 = Constraint(expr= - m.x147 - m.x150 + m.x153 == 0)
m.c40 = Constraint(expr= - m.x148 - m.x151 + m.x154 == 0)
m.c41 = Constraint(expr= m.x152 - m.x155 - m.x158 == 0)
m.c42 = Constraint(expr= m.x153 - m.x156 - m.x159 == 0)
m.c43 = Constraint(expr= m.x154 - m.x157 - m.x160 == 0)
m.c44 = Constraint(expr= m.x158 - m.x161 - m.x164 - m.x167 == 0)
m.c45 = Constraint(expr= m.x159 - m.x162 - m.x165 - m.x168 == 0)
m.c46 = Constraint(expr= m.x160 - m.x163 - m.x166 - m.x169 == 0)
m.c47 = Constraint(expr= m.x173 - m.x182 - m.x185 == 0)
m.c48 = Constraint(expr= m.x174 - m.x183 - m.x186 == 0)
m.c49 = Constraint(expr= m.x175 - m.x184 - m.x187 == 0)
m.c50 = Constraint(expr= m.x179 - m.x188 - m.x191 - m.x194 == 0)
m.c51 = Constraint(expr= m.x180 - m.x189 - m.x192 - m.x195 == 0)
m.c52 = Constraint(expr= m.x181 - m.x190 - m.x193 - m.x196 == 0)
m.c53 = Constraint(expr=(m.x224/(0.001 + 0.999*m.b596) - log(1 + m.x212/(0.001 + 0.999*m.b596)))*(0.001 + 0.999*m.b596)
<= 0)
m.c54 = Constraint(expr=(m.x225/(0.001 + 0.999*m.b597) - log(1 + m.x213/(0.001 + 0.999*m.b597)))*(0.001 + 0.999*m.b597)
<= 0)
m.c55 = Constraint(expr=(m.x226/(0.001 + 0.999*m.b598) - log(1 + m.x214/(0.001 + 0.999*m.b598)))*(0.001 + 0.999*m.b598)
<= 0)
m.c56 = Constraint(expr= m.x215 == 0)
m.c57 = Constraint(expr= m.x216 == 0)
m.c58 = Constraint(expr= m.x217 == 0)
m.c59 = Constraint(expr= m.x227 == 0)
m.c60 = Constraint(expr= m.x228 == 0)
m.c61 = Constraint(expr= m.x229 == 0)
m.c62 = Constraint(expr= m.x5 - m.x212 - m.x215 == 0)
m.c63 = Constraint(expr= m.x6 - m.x213 - m.x216 == 0)
m.c64 = Constraint(expr= m.x7 - m.x214 - m.x217 == 0)
m.c65 = Constraint(expr= m.x11 - m.x224 - m.x227 == 0)
m.c66 = Constraint(expr= m.x12 - m.x225 - m.x228 == 0)
m.c67 = Constraint(expr= m.x13 - m.x226 - m.x229 == 0)
m.c68 = Constraint(expr= m.x212 - 40*m.b596 <= 0)
m.c69 = Constraint(expr= m.x213 - 40*m.b597 <= 0)
m.c70 = Constraint(expr= m.x214 - 40*m.b598 <= 0)
m.c71 = Constraint(expr= m.x215 + 40*m.b596 <= 40)
m.c72 = Constraint(expr= m.x216 + 40*m.b597 <= 40)
m.c73 = Constraint(expr= m.x217 + 40*m.b598 <= 40)
m.c74 = Constraint(expr= m.x224 - 3.71357206670431*m.b596 <= 0)
m.c75 = Constraint(expr= m.x225 - 3.71357206670431*m.b597 <= 0)
m.c76 = Constraint(expr= m.x226 - 3.71357206670431*m.b598 <= 0)
m.c77 = Constraint(expr= m.x227 + 3.71357206670431*m.b596 <= 3.71357206670431)
m.c78 = Constraint(expr= m.x228 + 3.71357206670431*m.b597 <= 3.71357206670431)
m.c79 = Constraint(expr= m.x229 + 3.71357206670431*m.b598 <= 3.71357206670431)
m.c80 = Constraint(expr=(m.x230/(0.001 + 0.999*m.b599) - 1.2*log(1 + m.x218/(0.001 + 0.999*m.b599)))*(0.001 + 0.999*
m.b599) <= 0)
m.c81 = Constraint(expr=(m.x231/(0.001 + 0.999*m.b600) - 1.2*log(1 + m.x219/(0.001 + 0.999*m.b600)))*(0.001 + 0.999*
m.b600) <= 0)
m.c82 = Constraint(expr=(m.x232/(0.001 + 0.999*m.b601) - 1.2*log(1 + m.x220/(0.001 + 0.999*m.b601)))*(0.001 + 0.999*
m.b601) <= 0)
m.c83 = Constraint(expr= m.x221 == 0)
m.c84 = Constraint(expr= m.x222 == 0)
m.c85 = Constraint(expr= m.x223 == 0)
m.c86 = Constraint(expr= m.x233 == 0)
m.c87 = Constraint(expr= m.x234 == 0)
m.c88 = Constraint(expr= m.x235 == 0)
m.c89 = Constraint(expr= m.x8 - m.x218 - m.x221 == 0)
m.c90 = Constraint(expr= m.x9 - m.x219 - m.x222 == 0)
m.c91 = Constraint(expr= m.x10 - m.x220 - m.x223 == 0)
m.c92 = Constraint(expr= m.x14 - m.x230 - m.x233 == 0)
m.c93 = Constraint(expr= m.x15 - m.x231 - m.x234 == 0)
m.c94 = Constraint(expr= m.x16 - m.x232 - m.x235 == 0)
m.c95 = Constraint(expr= m.x218 - 40*m.b599 <= 0)
m.c96 = Constraint(expr= m.x219 - 40*m.b600 <= 0)
m.c97 = Constraint(expr= m.x220 - 40*m.b601 <= 0)
m.c98 = Constraint(expr= m.x221 + 40*m.b599 <= 40)
m.c99 = Constraint(expr= m.x222 + 40*m.b600 <= 40)
m.c100 = Constraint(expr= m.x223 + 40*m.b601 <= 40)
m.c101 = Constraint(expr= m.x230 - 4.45628648004517*m.b599 <= 0)
m.c102 = Constraint(expr= m.x231 - 4.45628648004517*m.b600 <= 0)
m.c103 = Constraint(expr= m.x232 - 4.45628648004517*m.b601 <= 0)
m.c104 = Constraint(expr= m.x233 + 4.45628648004517*m.b599 <= 4.45628648004517)
m.c105 = Constraint(expr= m.x234 + 4.45628648004517*m.b600 <= 4.45628648004517)
m.c106 = Constraint(expr= m.x235 + 4.45628648004517*m.b601 <= 4.45628648004517)
m.c107 = Constraint(expr= - 0.75*m.x236 + m.x260 == 0)
m.c108 = Constraint(expr= - 0.75*m.x237 + m.x261 == 0)
m.c109 = Constraint(expr= - 0.75*m.x238 + m.x262 == 0)
m.c110 = Constraint(expr= m.x239 == 0)
m.c111 = Constraint(expr= m.x240 == 0)
m.c112 = Constraint(expr= m.x241 == 0)
m.c113 = Constraint(expr= m.x263 == 0)
m.c114 = Constraint(expr= m.x264 == 0)
m.c115 = Constraint(expr= m.x265 == 0)
m.c116 = Constraint(expr= m.x26 - m.x236 - m.x239 == 0)
m.c117 = Constraint(expr= m.x27 - m.x237 - m.x240 == 0)
m.c118 = Constraint(expr= m.x28 - m.x238 - m.x241 == 0)
m.c119 = Constraint(expr= m.x38 - m.x260 - m.x263 == 0)
m.c120 = Constraint(expr= m.x39 - m.x261 - m.x264 == 0)
m.c121 = Constraint(expr= m.x40 - m.x262 - m.x265 == 0)
m.c122 = Constraint(expr= m.x236 - 4.45628648004517*m.b602 <= 0)
m.c123 = Constraint(expr= m.x237 - 4.45628648004517*m.b603 <= 0)
m.c124 = Constraint(expr= m.x238 - 4.45628648004517*m.b604 <= 0)
m.c125 = Constraint(expr= m.x239 + 4.45628648004517*m.b602 <= 4.45628648004517)
m.c126 = Constraint(expr= m.x240 + 4.45628648004517*m.b603 <= 4.45628648004517)
m.c127 = Constraint(expr= m.x241 + 4.45628648004517*m.b604 <= 4.45628648004517)
m.c128 = Constraint(expr= m.x260 - 3.34221486003388*m.b602 <= 0)
m.c129 = Constraint(expr= m.x261 - 3.34221486003388*m.b603 <= 0)
m.c130 = Constraint(expr= m.x262 - 3.34221486003388*m.b604 <= 0)
m.c131 = Constraint(expr= m.x263 + 3.34221486003388*m.b602 <= 3.34221486003388)
m.c132 = Constraint(expr= m.x264 + 3.34221486003388*m.b603 <= 3.34221486003388)
m.c133 = Constraint(expr= m.x265 + 3.34221486003388*m.b604 <= 3.34221486003388)
m.c134 = Constraint(expr=(m.x266/(0.001 + 0.999*m.b605) - 1.5*log(1 + m.x242/(0.001 + 0.999*m.b605)))*(0.001 + 0.999*
m.b605) <= 0)
m.c135 = Constraint(expr=(m.x267/(0.001 + 0.999*m.b606) - 1.5*log(1 + m.x243/(0.001 + 0.999*m.b606)))*(0.001 + 0.999*
m.b606) <= 0)
m.c136 = Constraint(expr=(m.x268/(0.001 + 0.999*m.b607) - 1.5*log(1 + m.x244/(0.001 + 0.999*m.b607)))*(0.001 + 0.999*
m.b607) <= 0)
m.c137 = Constraint(expr= m.x245 == 0)
m.c138 = Constraint(expr= m.x246 == 0)
m.c139 = Constraint(expr= m.x247 == 0)
m.c140 = Constraint(expr= m.x272 == 0)
m.c141 = Constraint(expr= m.x273 == 0)
m.c142 = Constraint(expr= m.x274 == 0)
m.c143 = Constraint(expr= m.x29 - m.x242 - m.x245 == 0)
m.c144 = Constraint(expr= m.x30 - m.x243 - m.x246 == 0)
m.c145 = Constraint(expr= m.x31 - m.x244 - m.x247 == 0)
m.c146 = Constraint(expr= m.x41 - m.x266 - m.x272 == 0)
m.c147 = Constraint(expr= m.x42 - m.x267 - m.x273 == 0)
m.c148 = Constraint(expr= m.x43 - m.x268 - m.x274 == 0)
m.c149 = Constraint(expr= m.x242 - 4.45628648004517*m.b605 <= 0)
m.c150 = Constraint(expr= m.x243 - 4.45628648004517*m.b606 <= 0)
m.c151 = Constraint(expr= m.x244 - 4.45628648004517*m.b607 <= 0)
m.c152 = Constraint(expr= m.x245 + 4.45628648004517*m.b605 <= 4.45628648004517)
m.c153 = Constraint(expr= m.x246 + 4.45628648004517*m.b606 <= 4.45628648004517)
m.c154 = Constraint(expr= m.x247 + 4.45628648004517*m.b607 <= 4.45628648004517)
m.c155 = Constraint(expr= m.x266 - 2.54515263975353*m.b605 <= 0)
m.c156 = Constraint(expr= m.x267 - 2.54515263975353*m.b606 <= 0)
m.c157 = Constraint(expr= m.x268 - 2.54515263975353*m.b607 <= 0)
m.c158 = Constraint(expr= m.x272 + 2.54515263975353*m.b605 <= 2.54515263975353)
m.c159 = Constraint(expr= m.x273 + 2.54515263975353*m.b606 <= 2.54515263975353)
m.c160 = Constraint(expr= m.x274 + 2.54515263975353*m.b607 <= 2.54515263975353)
m.c161 = Constraint(expr= - m.x248 + m.x278 == 0)
m.c162 = Constraint(expr= - m.x249 + m.x279 == 0)
m.c163 = Constraint(expr= - m.x250 + m.x280 == 0)
m.c164 = Constraint(expr= - 0.5*m.x254 + m.x278 == 0)
m.c165 = Constraint(expr= - 0.5*m.x255 + m.x279 == 0)
m.c166 = Constraint(expr= - 0.5*m.x256 + m.x280 == 0)
m.c167 = Constraint(expr= m.x251 == 0)
m.c168 = Constraint(expr= m.x252 == 0)
m.c169 = Constraint(expr= m.x253 == 0)
m.c170 = Constraint(expr= m.x257 == 0)
m.c171 = Constraint(expr= m.x258 == 0)
m.c172 = Constraint(expr= m.x259 == 0)
m.c173 = Constraint(expr= m.x281 == 0)
m.c174 = Constraint(expr= m.x282 == 0)
m.c175 = Constraint(expr= m.x283 == 0)
m.c176 = Constraint(expr= m.x32 - m.x248 - m.x251 == 0)
m.c177 = Constraint(expr= m.x33 - m.x249 - m.x252 == 0)
m.c178 = Constraint(expr= m.x34 - m.x250 - m.x253 == 0)
m.c179 = Constraint(expr= m.x35 - m.x254 - m.x257 == 0)
m.c180 = Constraint(expr= m.x36 - m.x255 - m.x258 == 0)
m.c181 = Constraint(expr= m.x37 - m.x256 - m.x259 == 0)
m.c182 = Constraint(expr= m.x44 - m.x278 - m.x281 == 0)
m.c183 = Constraint(expr= m.x45 - m.x279 - m.x282 == 0)
m.c184 = Constraint(expr= m.x46 - m.x280 - m.x283 == 0)
m.c185 = Constraint(expr= m.x248 - 4.45628648004517*m.b608 <= 0)
m.c186 = Constraint(expr= m.x249 - 4.45628648004517*m.b609 <= 0)
m.c187 = Constraint(expr= m.x250 - 4.45628648004517*m.b610 <= 0)
m.c188 = Constraint(expr= m.x251 + 4.45628648004517*m.b608 <= 4.45628648004517)
m.c189 = Constraint(expr= m.x252 + 4.45628648004517*m.b609 <= 4.45628648004517)
m.c190 = Constraint(expr= m.x253 + 4.45628648004517*m.b610 <= 4.45628648004517)
m.c191 = Constraint(expr= m.x254 - 30*m.b608 <= 0)
m.c192 = Constraint(expr= m.x255 - 30*m.b609 <= 0)
m.c193 = Constraint(expr= m.x256 - 30*m.b610 <= 0)
m.c194 = Constraint(expr= m.x257 + 30*m.b608 <= 30)
m.c195 = Constraint(expr= m.x258 + 30*m.b609 <= 30)
m.c196 = Constraint(expr= m.x259 + 30*m.b610 <= 30)
m.c197 = Constraint(expr= m.x278 - 15*m.b608 <= 0)
m.c198 = Constraint(expr= m.x279 - 15*m.b609 <= 0)
m.c199 = Constraint(expr= m.x280 - 15*m.b610 <= 0)
m.c200 = Constraint(expr= m.x281 + 15*m.b608 <= 15)
m.c201 = Constraint(expr= m.x282 + 15*m.b609 <= 15)
m.c202 = Constraint(expr= m.x283 + 15*m.b610 <= 15)
m.c203 = Constraint(expr=(m.x314/(0.001 + 0.999*m.b611) - 1.25*log(1 + m.x284/(0.001 + 0.999*m.b611)))*(0.001 + 0.999*
m.b611) <= 0)
m.c204 = Constraint(expr=(m.x315/(0.001 + 0.999*m.b612) - 1.25*log(1 + m.x285/(0.001 + 0.999*m.b612)))*(0.001 + 0.999*
m.b612) <= 0)
m.c205 = Constraint(expr=(m.x316/(0.001 + 0.999*m.b613) - 1.25*log(1 + m.x286/(0.001 + 0.999*m.b613)))*(0.001 + 0.999*
m.b613) <= 0)
m.c206 = Constraint(expr= m.x287 == 0)
m.c207 = Constraint(expr= m.x288 == 0)
m.c208 = Constraint(expr= m.x289 == 0)
m.c209 = Constraint(expr= m.x320 == 0)
m.c210 = Constraint(expr= m.x321 == 0)
m.c211 = Constraint(expr= m.x322 == 0)
m.c212 = Constraint(expr= m.x47 - m.x284 - m.x287 == 0)
m.c213 = Constraint(expr= m.x48 - m.x285 - m.x288 == 0)
m.c214 = Constraint(expr= m.x49 - m.x286 - m.x289 == 0)
m.c215 = Constraint(expr= m.x62 - m.x314 - m.x320 == 0)
m.c216 = Constraint(expr= m.x63 - m.x315 - m.x321 == 0)
m.c217 = Constraint(expr= m.x64 - m.x316 - m.x322 == 0)
m.c218 = Constraint(expr= m.x284 - 3.34221486003388*m.b611 <= 0)
m.c219 = Constraint(expr= m.x285 - 3.34221486003388*m.b612 <= 0)
m.c220 = Constraint(expr= m.x286 - 3.34221486003388*m.b613 <= 0)
m.c221 = Constraint(expr= m.x287 + 3.34221486003388*m.b611 <= 3.34221486003388)
m.c222 = Constraint(expr= m.x288 + 3.34221486003388*m.b612 <= 3.34221486003388)
m.c223 = Constraint(expr= m.x289 + 3.34221486003388*m.b613 <= 3.34221486003388)
m.c224 = Constraint(expr= m.x314 - 1.83548069293539*m.b611 <= 0)
m.c225 = Constraint(expr= m.x315 - 1.83548069293539*m.b612 <= 0)
m.c226 = Constraint(expr= m.x316 - 1.83548069293539*m.b613 <= 0)
m.c227 = Constraint(expr= m.x320 + 1.83548069293539*m.b611 <= 1.83548069293539)
m.c228 = Constraint(expr= m.x321 + 1.83548069293539*m.b612 <= 1.83548069293539)
m.c229 = Constraint(expr= m.x322 + 1.83548069293539*m.b613 <= 1.83548069293539)
m.c230 = Constraint(expr=(m.x326/(0.001 + 0.999*m.b614) - 0.9*log(1 + m.x290/(0.001 + 0.999*m.b614)))*(0.001 + 0.999*
m.b614) <= 0)
m.c231 = Constraint(expr=(m.x327/(0.001 + 0.999*m.b615) - 0.9*log(1 + m.x291/(0.001 + 0.999*m.b615)))*(0.001 + 0.999*
m.b615) <= 0)
m.c232 = Constraint(expr=(m.x328/(0.001 + 0.999*m.b616) - 0.9*log(1 + m.x292/(0.001 + 0.999*m.b616)))*(0.001 + 0.999*
m.b616) <= 0)
m.c233 = Constraint(expr= m.x293 == 0)
m.c234 = Constraint(expr= m.x294 == 0)
m.c235 = Constraint(expr= m.x295 == 0)
m.c236 = Constraint(expr= m.x332 == 0)
m.c237 = Constraint(expr= m.x333 == 0)
m.c238 = Constraint(expr= m.x334 == 0)
m.c239 = Constraint(expr= m.x50 - m.x290 - m.x293 == 0)
m.c240 = Constraint(expr= m.x51 - m.x291 - m.x294 == 0)
m.c241 = Constraint(expr= m.x52 - m.x292 - m.x295 == 0)
m.c242 = Constraint(expr= m.x65 - m.x326 - m.x332 == 0)
m.c243 = Constraint(expr= m.x66 - m.x327 - m.x333 == 0)
m.c244 = Constraint(expr= m.x67 - m.x328 - m.x334 == 0)
m.c245 = Constraint(expr= m.x290 - 3.34221486003388*m.b614 <= 0)
m.c246 = Constraint(expr= m.x291 - 3.34221486003388*m.b615 <= 0)
m.c247 = Constraint(expr= m.x292 - 3.34221486003388*m.b616 <= 0)
m.c248 = Constraint(expr= m.x293 + 3.34221486003388*m.b614 <= 3.34221486003388)
m.c249 = Constraint(expr= m.x294 + 3.34221486003388*m.b615 <= 3.34221486003388)
m.c250 = Constraint(expr= m.x295 + 3.34221486003388*m.b616 <= 3.34221486003388)
m.c251 = Constraint(expr= m.x326 - 1.32154609891348*m.b614 <= 0)
m.c252 = Constraint(expr= m.x327 - 1.32154609891348*m.b615 <= 0)
m.c253 = Constraint(expr= m.x328 - 1.32154609891348*m.b616 <= 0)
m.c254 = Constraint(expr= m.x332 + 1.32154609891348*m.b614 <= 1.32154609891348)
m.c255 = Constraint(expr= m.x333 + 1.32154609891348*m.b615 <= 1.32154609891348)
m.c256 = Constraint(expr= m.x334 + 1.32154609891348*m.b616 <= 1.32154609891348)
m.c257 = Constraint(expr=(m.x338/(0.001 + 0.999*m.b617) - log(1 + m.x269/(0.001 + 0.999*m.b617)))*(0.001 + 0.999*m.b617)
<= 0)
m.c258 = Constraint(expr=(m.x339/(0.001 + 0.999*m.b618) - log(1 + m.x270/(0.001 + 0.999*m.b618)))*(0.001 + 0.999*m.b618)
<= 0)
m.c259 = Constraint(expr=(m.x340/(0.001 + 0.999*m.b619) - log(1 + m.x271/(0.001 + 0.999*m.b619)))*(0.001 + 0.999*m.b619)
<= 0)
m.c260 = Constraint(expr= m.x275 == 0)
m.c261 = Constraint(expr= m.x276 == 0)
m.c262 = Constraint(expr= m.x277 == 0)
m.c263 = Constraint(expr= m.x341 == 0)
m.c264 = Constraint(expr= m.x342 == 0)
m.c265 = Constraint(expr= m.x343 == 0)
m.c266 = Constraint(expr= m.x41 - m.x269 - m.x275 == 0)
m.c267 = Constraint(expr= m.x42 - m.x270 - m.x276 == 0)
m.c268 = Constraint(expr= m.x43 - m.x271 - m.x277 == 0)
m.c269 = Constraint(expr= m.x68 - m.x338 - m.x341 == 0)
m.c270 = Constraint(expr= m.x69 - m.x339 - m.x342 == 0)
m.c271 = Constraint(expr= m.x70 - m.x340 - m.x343 == 0)
m.c272 = Constraint(expr= m.x269 - 2.54515263975353*m.b617 <= 0)
m.c273 = Constraint(expr= m.x270 - 2.54515263975353*m.b618 <= 0)
m.c274 = Constraint(expr= m.x271 - 2.54515263975353*m.b619 <= 0)
m.c275 = Constraint(expr= m.x275 + 2.54515263975353*m.b617 <= 2.54515263975353)
m.c276 = Constraint(expr= m.x276 + 2.54515263975353*m.b618 <= 2.54515263975353)
m.c277 = Constraint(expr= m.x277 + 2.54515263975353*m.b619 <= 2.54515263975353)
m.c278 = Constraint(expr= m.x338 - 1.26558121681553*m.b617 <= 0)
m.c279 = Constraint(expr= m.x339 - 1.26558121681553*m.b618 <= 0)
m.c280 = Constraint(expr= m.x340 - 1.26558121681553*m.b619 <= 0)
m.c281 = Constraint(expr= m.x341 + 1.26558121681553*m.b617 <= 1.26558121681553)
m.c282 = Constraint(expr= m.x342 + 1.26558121681553*m.b618 <= 1.26558121681553)
m.c283 = Constraint(expr= m.x343 + 1.26558121681553*m.b619 <= 1.26558121681553)
m.c284 = Constraint(expr= - 0.9*m.x296 + m.x344 == 0)
m.c285 = Constraint(expr= - 0.9*m.x297 + m.x345 == 0)
m.c286 = Constraint(expr= - 0.9*m.x298 + m.x346 == 0)
m.c287 = Constraint(expr= m.x299 == 0)
m.c288 = Constraint(expr= m.x300 == 0)
m.c289 = Constraint(expr= m.x301 == 0)
m.c290 = Constraint(expr= m.x347 == 0)
m.c291 = Constraint(expr= m.x348 == 0)
m.c292 = Constraint(expr= m.x349 == 0)
m.c293 = Constraint(expr= m.x53 - m.x296 - m.x299 == 0)
m.c294 = Constraint(expr= m.x54 - m.x297 - m.x300 == 0)
m.c295 = Constraint(expr= m.x55 - m.x298 - m.x301 == 0)
m.c296 = Constraint(expr= m.x71 - m.x344 - m.x347 == 0)
m.c297 = Constraint(expr= m.x72 - m.x345 - m.x348 == 0)
m.c298 = Constraint(expr= m.x73 - m.x346 - m.x349 == 0)
m.c299 = Constraint(expr= m.x296 - 15*m.b620 <= 0)
m.c300 = Constraint(expr= m.x297 - 15*m.b621 <= 0)
m.c301 = Constraint(expr= m.x298 - 15*m.b622 <= 0)
m.c302 = Constraint(expr= m.x299 + 15*m.b620 <= 15)
m.c303 = Constraint(expr= m.x300 + 15*m.b621 <= 15)
m.c304 = Constraint(expr= m.x301 + 15*m.b622 <= 15)
m.c305 = Constraint(expr= m.x344 - 13.5*m.b620 <= 0)
m.c306 = Constraint(expr= m.x345 - 13.5*m.b621 <= 0)
m.c307 = Constraint(expr= m.x346 - 13.5*m.b622 <= 0)
m.c308 = Constraint(expr= m.x347 + 13.5*m.b620 <= 13.5)
m.c309 = Constraint(expr= m.x348 + 13.5*m.b621 <= 13.5)
m.c310 = Constraint(expr= m.x349 + 13.5*m.b622 <= 13.5)
m.c311 = Constraint(expr= - 0.6*m.x302 + m.x350 == 0)
m.c312 = Constraint(expr= - 0.6*m.x303 + m.x351 == 0)
m.c313 = Constraint(expr= - 0.6*m.x304 + m.x352 == 0)
m.c314 = Constraint(expr= m.x305 == 0)
m.c315 = Constraint(expr= m.x306 == 0)
m.c316 = Constraint(expr= m.x307 == 0)
m.c317 = Constraint(expr= m.x353 == 0)
m.c318 = Constraint(expr= m.x354 == 0)
m.c319 = Constraint(expr= m.x355 == 0)
m.c320 = Constraint(expr= m.x56 - m.x302 - m.x305 == 0)
m.c321 = Constraint(expr= m.x57 - m.x303 - m.x306 == 0)
m.c322 = Constraint(expr= m.x58 - m.x304 - m.x307 == 0)
m.c323 = Constraint(expr= m.x74 - m.x350 - m.x353 == 0)
m.c324 = Constraint(expr= m.x75 - m.x351 - m.x354 == 0)
m.c325 = Constraint(expr= m.x76 - m.x352 - m.x355 == 0)
m.c326 = Constraint(expr= m.x302 - 15*m.b623 <= 0)
m.c327 = Constraint(expr= m.x303 - 15*m.b624 <= 0)
m.c328 = Constraint(expr= m.x304 - 15*m.b625 <= 0)
m.c329 = Constraint(expr= m.x305 + 15*m.b623 <= 15)
m.c330 = Constraint(expr= m.x306 + 15*m.b624 <= 15)
m.c331 = Constraint(expr= m.x307 + 15*m.b625 <= 15)
m.c332 = Constraint(expr= m.x350 - 9*m.b623 <= 0)
m.c333 = Constraint(expr= m.x351 - 9*m.b624 <= 0)
m.c334 = Constraint(expr= m.x352 - 9*m.b625 <= 0)
m.c335 = Constraint(expr= m.x353 + 9*m.b623 <= 9)
m.c336 = Constraint(expr= m.x354 + 9*m.b624 <= 9)
m.c337 = Constraint(expr= m.x355 + 9*m.b625 <= 9)
m.c338 = Constraint(expr=(m.x356/(0.001 + 0.999*m.b626) - 1.1*log(1 + m.x308/(0.001 + 0.999*m.b626)))*(0.001 + 0.999*
m.b626) <= 0)
m.c339 = Constraint(expr=(m.x357/(0.001 + 0.999*m.b627) - 1.1*log(1 + m.x309/(0.001 + 0.999*m.b627)))*(0.001 + 0.999*
m.b627) <= 0)
m.c340 = Constraint(expr=(m.x358/(0.001 + 0.999*m.b628) - 1.1*log(1 + m.x310/(0.001 + 0.999*m.b628)))*(0.001 + 0.999*
m.b628) <= 0)
m.c341 = Constraint(expr= m.x311 == 0)
m.c342 = Constraint(expr= m.x312 == 0)
m.c343 = Constraint(expr= m.x313 == 0)
m.c344 = Constraint(expr= m.x359 == 0)
m.c345 = Constraint(expr= m.x360 == 0)
m.c346 = Constraint(expr= m.x361 == 0)
m.c347 = Constraint(expr= m.x59 - m.x308 - m.x311 == 0)
m.c348 = Constraint(expr= m.x60 - m.x309 - m.x312 == 0)
m.c349 = Constraint(expr= m.x61 - m.x310 - m.x313 == 0)
m.c350 = Constraint(expr= m.x77 - m.x356 - m.x359 == 0)
m.c351 = Constraint(expr= m.x78 - m.x357 - m.x360 == 0)
m.c352 = Constraint(expr= m.x79 - m.x358 - m.x361 == 0)
m.c353 = Constraint(expr= m.x308 - 15*m.b626 <= 0)
m.c354 = Constraint(expr= m.x309 - 15*m.b627 <= 0)
m.c355 = Constraint(expr= m.x310 - 15*m.b628 <= 0)
m.c356 = Constraint(expr= m.x311 + 15*m.b626 <= 15)
m.c357 = Constraint(expr= m.x312 + 15*m.b627 <= 15)
m.c358 = Constraint(expr= m.x313 + 15*m.b628 <= 15)
m.c359 = Constraint(expr= m.x356 - 3.04984759446376*m.b626 <= 0)
m.c360 = Constraint(expr= m.x357 - 3.04984759446376*m.b627 <= 0)
m.c361 = Constraint(expr= m.x358 - 3.04984759446376*m.b628 <= 0)
m.c362 = Constraint(expr= m.x359 + 3.04984759446376*m.b626 <= 3.04984759446376)
m.c363 = Constraint(expr= m.x360 + 3.04984759446376*m.b627 <= 3.04984759446376)
m.c364 = Constraint(expr= m.x361 + 3.04984759446376*m.b628 <= 3.04984759446376)
m.c365 = Constraint(expr= - 0.9*m.x317 + m.x416 == 0)
m.c366 = Constraint(expr= - 0.9*m.x318 + m.x417 == 0)
m.c367 = Constraint(expr= - 0.9*m.x319 + m.x418 == 0)
m.c368 = Constraint(expr= - m.x374 + m.x416 == 0)
m.c369 = Constraint(expr= - m.x375 + m.x417 == 0)
m.c370 = Constraint(expr= - m.x376 + m.x418 == 0)
m.c371 = Constraint(expr= m.x323 == 0)
m.c372 = Constraint(expr= m.x324 == 0)
m.c373 = Constraint(expr= m.x325 == 0)
m.c374 = Constraint(expr= m.x377 == 0)
m.c375 = Constraint(expr= m.x378 == 0)
m.c376 = Constraint(expr= m.x379 == 0)
m.c377 = Constraint(expr= m.x419 == 0)
m.c378 = Constraint(expr= m.x420 == 0)
m.c379 = Constraint(expr= m.x421 == 0)
m.c380 = Constraint(expr= m.x62 - m.x317 - m.x323 == 0)
m.c381 = Constraint(expr= m.x63 - m.x318 - m.x324 == 0)
m.c382 = Constraint(expr= m.x64 - m.x319 - m.x325 == 0)
m.c383 = Constraint(expr= m.x86 - m.x374 - m.x377 == 0)
m.c384 = Constraint(expr= m.x87 - m.x375 - m.x378 == 0)
m.c385 = Constraint(expr= m.x88 - m.x376 - m.x379 == 0)
m.c386 = Constraint(expr= m.x110 - m.x416 - m.x419 == 0)
m.c387 = Constraint(expr= m.x111 - m.x417 - m.x420 == 0)
m.c388 = Constraint(expr= m.x112 - m.x418 - m.x421 == 0)
m.c389 = Constraint(expr= m.x317 - 1.83548069293539*m.b629 <= 0)
m.c390 = Constraint(expr= m.x318 - 1.83548069293539*m.b630 <= 0)
m.c391 = Constraint(expr= m.x319 - 1.83548069293539*m.b631 <= 0)
m.c392 = Constraint(expr= m.x323 + 1.83548069293539*m.b629 <= 1.83548069293539)
m.c393 = Constraint(expr= m.x324 + 1.83548069293539*m.b630 <= 1.83548069293539)
m.c394 = Constraint(expr= m.x325 + 1.83548069293539*m.b631 <= 1.83548069293539)
m.c395 = Constraint(expr= m.x374 - 20*m.b629 <= 0)
m.c396 = Constraint(expr= m.x375 - 20*m.b630 <= 0)
m.c397 = Constraint(expr= m.x376 - 20*m.b631 <= 0)
m.c398 = Constraint(expr= m.x377 + 20*m.b629 <= 20)
m.c399 = Constraint(expr= m.x378 + 20*m.b630 <= 20)
m.c400 = Constraint(expr= m.x379 + 20*m.b631 <= 20)
m.c401 = Constraint(expr= m.x416 - 20*m.b629 <= 0)
m.c402 = Constraint(expr= m.x417 - 20*m.b630 <= 0)
m.c403 = Constraint(expr= m.x418 - 20*m.b631 <= 0)
m.c404 = Constraint(expr= m.x419 + 20*m.b629 <= 20)
m.c405 = Constraint(expr= m.x420 + 20*m.b630 <= 20)
m.c406 = Constraint(expr= m.x421 + 20*m.b631 <= 20)
m.c407 = Constraint(expr=(m.x422/(0.001 + 0.999*m.b632) - log(1 + m.x329/(0.001 + 0.999*m.b632)))*(0.001 + 0.999*m.b632)
<= 0)
m.c408 = Constraint(expr=(m.x423/(0.001 + 0.999*m.b633) - log(1 + m.x330/(0.001 + 0.999*m.b633)))*(0.001 + 0.999*m.b633)
<= 0)
m.c409 = Constraint(expr=(m.x424/(0.001 + 0.999*m.b634) - log(1 + m.x331/(0.001 + 0.999*m.b634)))*(0.001 + 0.999*m.b634)
<= 0)
m.c410 = Constraint(expr= m.x335 == 0)
m.c411 = Constraint(expr= m.x336 == 0)
m.c412 = Constraint(expr= m.x337 == 0)
m.c413 = Constraint(expr= m.x425 == 0)
m.c414 = Constraint(expr= m.x426 == 0)
m.c415 = Constraint(expr= m.x427 == 0)
m.c416 = Constraint(expr= m.x65 - m.x329 - m.x335 == 0)
m.c417 = Constraint(expr= m.x66 - m.x330 - m.x336 == 0)
m.c418 = Constraint(expr= m.x67 - m.x331 - m.x337 == 0)
m.c419 = Constraint(expr= m.x113 - m.x422 - m.x425 == 0)
m.c420 = Constraint(expr= m.x114 - m.x423 - m.x426 == 0)
m.c421 = Constraint(expr= m.x115 - m.x424 - m.x427 == 0)
m.c422 = Constraint(expr= m.x329 - 1.32154609891348*m.b632 <= 0)
m.c423 = Constraint(expr= m.x330 - 1.32154609891348*m.b633 <= 0)
m.c424 = Constraint(expr= m.x331 - 1.32154609891348*m.b634 <= 0)
m.c425 = Constraint(expr= m.x335 + 1.32154609891348*m.b632 <= 1.32154609891348)
m.c426 = Constraint(expr= m.x336 + 1.32154609891348*m.b633 <= 1.32154609891348)
m.c427 = Constraint(expr= m.x337 + 1.32154609891348*m.b634 <= 1.32154609891348)
m.c428 = Constraint(expr= m.x422 - 0.842233385663186*m.b632 <= 0)
m.c429 = Constraint(expr= m.x423 - 0.842233385663186*m.b633 <= 0)
m.c430 = Constraint(expr= m.x424 - 0.842233385663186*m.b634 <= 0)
m.c431 = Constraint(expr= m.x425 + 0.842233385663186*m.b632 <= 0.842233385663186)
m.c432 = Constraint(expr= m.x426 + 0.842233385663186*m.b633 <= 0.842233385663186)
m.c433 = Constraint(expr= m.x427 + 0.842233385663186*m.b634 <= 0.842233385663186)
m.c434 = Constraint(expr=(m.x428/(0.001 + 0.999*m.b635) - 0.7*log(1 + m.x362/(0.001 + 0.999*m.b635)))*(0.001 + 0.999*
m.b635) <= 0)
m.c435 = Constraint(expr=(m.x429/(0.001 + 0.999*m.b636) - 0.7*log(1 + m.x363/(0.001 + 0.999*m.b636)))*(0.001 + 0.999*
m.b636) <= 0)
m.c436 = Constraint(expr=(m.x430/(0.001 + 0.999*m.b637) - 0.7*log(1 + m.x364/(0.001 + 0.999*m.b637)))*(0.001 + 0.999*
m.b637) <= 0)
m.c437 = Constraint(expr= m.x365 == 0)
m.c438 = Constraint(expr= m.x366 == 0)
m.c439 = Constraint(expr= m.x367 == 0)
m.c440 = Constraint(expr= m.x431 == 0)
m.c441 = Constraint(expr= m.x432 == 0)
m.c442 = Constraint(expr= m.x433 == 0)
m.c443 = Constraint(expr= m.x80 - m.x362 - m.x365 == 0)
m.c444 = Constraint(expr= m.x81 - m.x363 - m.x366 == 0)
m.c445 = Constraint(expr= m.x82 - m.x364 - m.x367 == 0)
m.c446 = Constraint(expr= m.x116 - m.x428 - m.x431 == 0)
m.c447 = Constraint(expr= m.x117 - m.x429 - m.x432 == 0)
m.c448 = Constraint(expr= m.x118 - m.x430 - m.x433 == 0)
m.c449 = Constraint(expr= m.x362 - 1.26558121681553*m.b635 <= 0)
m.c450 = Constraint(expr= m.x363 - 1.26558121681553*m.b636 <= 0)
m.c451 = Constraint(expr= m.x364 - 1.26558121681553*m.b637 <= 0)
m.c452 = Constraint(expr= m.x365 + 1.26558121681553*m.b635 <= 1.26558121681553)
m.c453 = Constraint(expr= m.x366 + 1.26558121681553*m.b636 <= 1.26558121681553)
m.c454 = Constraint(expr= m.x367 + 1.26558121681553*m.b637 <= 1.26558121681553)
m.c455 = Constraint(expr= m.x428 - 0.572481933717686*m.b635 <= 0)
m.c456 = Constraint(expr= m.x429 - 0.572481933717686*m.b636 <= 0)
m.c457 = Constraint(expr= m.x430 - 0.572481933717686*m.b637 <= 0)
m.c458 = Constraint(expr= m.x431 + 0.572481933717686*m.b635 <= 0.572481933717686)
m.c459 = Constraint(expr= m.x432 + 0.572481933717686*m.b636 <= 0.572481933717686)
m.c460 = Constraint(expr= m.x433 + 0.572481933717686*m.b637 <= 0.572481933717686)
m.c461 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x368/(0.001 + 0.999*m.b638)))*(0.001 + 0.999*
m.b638) <= 0)
m.c462 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x369/(0.001 + 0.999*m.b639)))*(0.001 + 0.999*
m.b639) <= 0)
m.c463 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x370/(0.001 + 0.999*m.b640)))*(0.001 + 0.999*
m.b640) <= 0)
m.c464 = Constraint(expr=(m.x434/(0.001 + 0.999*m.b638) - 0.65*log(1 + m.x380/(0.001 + 0.999*m.b638)))*(0.001 + 0.999*
m.b638) <= 0)
m.c465 = Constraint(expr=(m.x435/(0.001 + 0.999*m.b639) - 0.65*log(1 + m.x381/(0.001 + 0.999*m.b639)))*(0.001 + 0.999*
m.b639) <= 0)
m.c466 = Constraint(expr=(m.x436/(0.001 + 0.999*m.b640) - 0.65*log(1 + m.x382/(0.001 + 0.999*m.b640)))*(0.001 + 0.999*
m.b640) <= 0)
m.c467 = Constraint(expr= m.x371 == 0)
m.c468 = Constraint(expr= m.x372 == 0)
m.c469 = Constraint(expr= m.x373 == 0)
m.c470 = Constraint(expr= m.x383 == 0)
m.c471 = Constraint(expr= m.x384 == 0)
m.c472 = Constraint(expr= m.x385 == 0)
m.c473 = Constraint(expr= m.x437 == 0)
m.c474 = Constraint(expr= m.x438 == 0)
m.c475 = Constraint(expr= m.x439 == 0)
m.c476 = Constraint(expr= m.x83 - m.x368 - m.x371 == 0)
m.c477 = Constraint(expr= m.x84 - m.x369 - m.x372 == 0)
m.c478 = Constraint(expr= m.x85 - m.x370 - m.x373 == 0)
m.c479 = Constraint(expr= m.x92 - m.x380 - m.x383 == 0)
m.c480 = Constraint(expr= m.x93 - m.x381 - m.x384 == 0)
m.c481 = Constraint(expr= m.x94 - m.x382 - m.x385 == 0)
m.c482 = Constraint(expr= m.x119 - m.x434 - m.x437 == 0)
m.c483 = Constraint(expr= m.x120 - m.x435 - m.x438 == 0)
m.c484 = Constraint(expr= m.x121 - m.x436 - m.x439 == 0)
m.c485 = Constraint(expr= m.x368 - 1.26558121681553*m.b638 <= 0)
m.c486 = Constraint(expr= m.x369 - 1.26558121681553*m.b639 <= 0)
m.c487 = Constraint(expr= m.x370 - 1.26558121681553*m.b640 <= 0)
m.c488 = Constraint(expr= m.x371 + 1.26558121681553*m.b638 <= 1.26558121681553)
m.c489 = Constraint(expr= m.x372 + 1.26558121681553*m.b639 <= 1.26558121681553)
m.c490 = Constraint(expr= m.x373 + 1.26558121681553*m.b640 <= 1.26558121681553)
m.c491 = Constraint(expr= m.x380 - 33.5*m.b638 <= 0)
m.c492 = Constraint(expr= m.x381 - 33.5*m.b639 <= 0)
m.c493 = Constraint(expr= m.x382 - 33.5*m.b640 <= 0)
m.c494 = Constraint(expr= m.x383 + 33.5*m.b638 <= 33.5)
m.c495 = Constraint(expr= m.x384 + 33.5*m.b639 <= 33.5)
m.c496 = Constraint(expr= m.x385 + 33.5*m.b640 <= 33.5)
m.c497 = Constraint(expr= m.x434 - 2.30162356062425*m.b638 <= 0)
m.c498 = Constraint(expr= m.x435 - 2.30162356062425*m.b639 <= 0)
m.c499 = Constraint(expr= m.x436 - 2.30162356062425*m.b640 <= 0)
m.c500 = Constraint(expr= m.x437 + 2.30162356062425*m.b638 <= 2.30162356062425)
m.c501 = Constraint(expr= m.x438 + 2.30162356062425*m.b639 <= 2.30162356062425)
m.c502 = Constraint(expr= m.x439 + 2.30162356062425*m.b640 <= 2.30162356062425)
m.c503 = Constraint(expr= - m.x386 + m.x440 == 0)
m.c504 = Constraint(expr= - m.x387 + m.x441 == 0)
m.c505 = Constraint(expr= - m.x388 + m.x442 == 0)
m.c506 = Constraint(expr= m.x389 == 0)
m.c507 = Constraint(expr= m.x390 == 0)
m.c508 = Constraint(expr= m.x391 == 0)
m.c509 = Constraint(expr= m.x443 == 0)
m.c510 = Constraint(expr= m.x444 == 0)
m.c511 = Constraint(expr= m.x445 == 0)
m.c512 = Constraint(expr= m.x95 - m.x386 - m.x389 == 0)
m.c513 = Constraint(expr= m.x96 - m.x387 - m.x390 == 0)
m.c514 = Constraint(expr= m.x97 - m.x388 - m.x391 == 0)
m.c515 = Constraint(expr= m.x122 - m.x440 - m.x443 == 0)
m.c516 = Constraint(expr= m.x123 - m.x441 - m.x444 == 0)
m.c517 = Constraint(expr= m.x124 - m.x442 - m.x445 == 0)
m.c518 = Constraint(expr= m.x386 - 9*m.b641 <= 0)
m.c519 = Constraint(expr= m.x387 - 9*m.b642 <= 0)
m.c520 = Constraint(expr= m.x388 - 9*m.b643 <= 0)
m.c521 = Constraint(expr= m.x389 + 9*m.b641 <= 9)
m.c522 = Constraint(expr= m.x390 + 9*m.b642 <= 9)
m.c523 = Constraint(expr= m.x391 + 9*m.b643 <= 9)
m.c524 = Constraint(expr= m.x440 - 9*m.b641 <= 0)
m.c525 = Constraint(expr= m.x441 - 9*m.b642 <= 0)
m.c526 = Constraint(expr= m.x442 - 9*m.b643 <= 0)
m.c527 = Constraint(expr= m.x443 + 9*m.b641 <= 9)
m.c528 = Constraint(expr= m.x444 + 9*m.b642 <= 9)
m.c529 = Constraint(expr= m.x445 + 9*m.b643 <= 9)
m.c530 = Constraint(expr= - m.x392 + m.x446 == 0)
m.c531 = Constraint(expr= - m.x393 + m.x447 == 0)
m.c532 = Constraint(expr= - m.x394 + m.x448 == 0)
m.c533 = Constraint(expr= m.x395 == 0)
m.c534 = Constraint(expr= m.x396 == 0)
m.c535 = Constraint(expr= m.x397 == 0)
m.c536 = Constraint(expr= m.x449 == 0)
m.c537 = Constraint(expr= m.x450 == 0)
m.c538 = Constraint(expr= m.x451 == 0)
m.c539 = Constraint(expr= m.x98 - m.x392 - m.x395 == 0)
m.c540 = Constraint(expr= m.x99 - m.x393 - m.x396 == 0)
m.c541 = Constraint(expr= m.x100 - m.x394 - m.x397 == 0)
m.c542 = Constraint(expr= m.x125 - m.x446 - m.x449 == 0)
m.c543 = Constraint(expr= m.x126 - m.x447 - m.x450 == 0)
m.c544 = Constraint(expr= m.x127 - m.x448 - m.x451 == 0)
m.c545 = Constraint(expr= m.x392 - 9*m.b644 <= 0)
m.c546 = Constraint(expr= m.x393 - 9*m.b645 <= 0)
m.c547 = Constraint(expr= m.x394 - 9*m.b646 <= 0)
m.c548 = Constraint(expr= m.x395 + 9*m.b644 <= 9)
m.c549 = Constraint(expr= m.x396 + 9*m.b645 <= 9)
m.c550 = Constraint(expr= m.x397 + 9*m.b646 <= 9)
m.c551 = Constraint(expr= m.x446 - 9*m.b644 <= 0)
m.c552 = Constraint(expr= m.x447 - 9*m.b645 <= 0)
m.c553 = Constraint(expr= m.x448 - 9*m.b646 <= 0)
m.c554 = Constraint(expr= m.x449 + 9*m.b644 <= 9)
m.c555 = Constraint(expr= m.x450 + 9*m.b645 <= 9)
m.c556 = Constraint(expr= m.x451 + 9*m.b646 <= 9)
m.c557 = Constraint(expr=(m.x452/(0.001 + 0.999*m.b647) - 0.75*log(1 + m.x398/(0.001 + 0.999*m.b647)))*(0.001 + 0.999*
m.b647) <= 0)
m.c558 = Constraint(expr=(m.x453/(0.001 + 0.999*m.b648) - 0.75*log(1 + m.x399/(0.001 + 0.999*m.b648)))*(0.001 + 0.999*
m.b648) <= 0)
m.c559 = Constraint(expr=(m.x454/(0.001 + 0.999*m.b649) - 0.75*log(1 + m.x400/(0.001 + 0.999*m.b649)))*(0.001 + 0.999*
m.b649) <= 0)
m.c560 = Constraint(expr= m.x401 == 0)
m.c561 = Constraint(expr= m.x402 == 0)
m.c562 = Constraint(expr= m.x403 == 0)
m.c563 = Constraint(expr= m.x455 == 0)
m.c564 = Constraint(expr= m.x456 == 0)
m.c565 = Constraint(expr= m.x457 == 0)
m.c566 = Constraint(expr= m.x101 - m.x398 - m.x401 == 0)
m.c567 = Constraint(expr= m.x102 - m.x399 - m.x402 == 0)
m.c568 = Constraint(expr= m.x103 - m.x400 - m.x403 == 0)
m.c569 = Constraint(expr= m.x128 - m.x452 - m.x455 == 0)
m.c570 = Constraint(expr= m.x129 - m.x453 - m.x456 == 0)
m.c571 = Constraint(expr= m.x130 - m.x454 - m.x457 == 0)
m.c572 = Constraint(expr= m.x398 - 3.04984759446376*m.b647 <= 0)
m.c573 = Constraint(expr= m.x399 - 3.04984759446376*m.b648 <= 0)
m.c574 = Constraint(expr= m.x400 - 3.04984759446376*m.b649 <= 0)
m.c575 = Constraint(expr= m.x401 + 3.04984759446376*m.b647 <= 3.04984759446376)
m.c576 = Constraint(expr= m.x402 + 3.04984759446376*m.b648 <= 3.04984759446376)
m.c577 = Constraint(expr= m.x403 + 3.04984759446376*m.b649 <= 3.04984759446376)
m.c578 = Constraint(expr= m.x452 - 1.04900943706034*m.b647 <= 0)
m.c579 = Constraint(expr= m.x453 - 1.04900943706034*m.b648 <= 0)
m.c580 = Constraint(expr= m.x454 - 1.04900943706034*m.b649 <= 0)
m.c581 = Constraint(expr= m.x455 + 1.04900943706034*m.b647 <= 1.04900943706034)
m.c582 = Constraint(expr= m.x456 + 1.04900943706034*m.b648 <= 1.04900943706034)
m.c583 = Constraint(expr= m.x457 + 1.04900943706034*m.b649 <= 1.04900943706034)
m.c584 = Constraint(expr=(m.x458/(0.001 + 0.999*m.b650) - 0.8*log(1 + m.x404/(0.001 + 0.999*m.b650)))*(0.001 + 0.999*
m.b650) <= 0)
m.c585 = Constraint(expr=(m.x459/(0.001 + 0.999*m.b651) - 0.8*log(1 + m.x405/(0.001 + 0.999*m.b651)))*(0.001 + 0.999*
m.b651) <= 0)
m.c586 = Constraint(expr=(m.x460/(0.001 + 0.999*m.b652) - 0.8*log(1 + m.x406/(0.001 + 0.999*m.b652)))*(0.001 + 0.999*
m.b652) <= 0)
m.c587 = Constraint(expr= m.x407 == 0)
m.c588 = Constraint(expr= m.x408 == 0)
m.c589 = Constraint(expr= m.x409 == 0)
m.c590 = Constraint(expr= m.x461 == 0)
m.c591 = Constraint(expr= m.x462 == 0)
m.c592 = Constraint(expr= m.x463 == 0)
m.c593 = Constraint(expr= m.x104 - m.x404 - m.x407 == 0)
m.c594 = Constraint(expr= m.x105 - m.x405 - m.x408 == 0)
m.c595 = Constraint(expr= m.x106 - m.x406 - m.x409 == 0)
m.c596 = Constraint(expr= m.x131 - m.x458 - m.x461 == 0)
m.c597 = Constraint(expr= m.x132 - m.x459 - m.x462 == 0)
m.c598 = Constraint(expr= m.x133 - m.x460 - m.x463 == 0)
m.c599 = Constraint(expr= m.x404 - 3.04984759446376*m.b650 <= 0)
m.c600 = Constraint(expr= m.x405 - 3.04984759446376*m.b651 <= 0)
m.c601 = Constraint(expr= m.x406 - 3.04984759446376*m.b652 <= 0)
m.c602 = Constraint(expr= m.x407 + 3.04984759446376*m.b650 <= 3.04984759446376)
m.c603 = Constraint(expr= m.x408 + 3.04984759446376*m.b651 <= 3.04984759446376)
m.c604 = Constraint(expr= m.x409 + 3.04984759446376*m.b652 <= 3.04984759446376)
m.c605 = Constraint(expr= m.x458 - 1.11894339953103*m.b650 <= 0)
m.c606 = Constraint(expr= m.x459 - 1.11894339953103*m.b651 <= 0)
m.c607 = Constraint(expr= m.x460 - 1.11894339953103*m.b652 <= 0)
m.c608 = Constraint(expr= m.x461 + 1.11894339953103*m.b650 <= 1.11894339953103)
m.c609 = Constraint(expr= m.x462 + 1.11894339953103*m.b651 <= 1.11894339953103)
m.c610 = Constraint(expr= m.x463 + 1.11894339953103*m.b652 <= 1.11894339953103)
m.c611 = Constraint(expr=(m.x464/(0.001 + 0.999*m.b653) - 0.85*log(1 + m.x410/(0.001 + 0.999*m.b653)))*(0.001 + 0.999*
m.b653) <= 0)
m.c612 = Constraint(expr=(m.x465/(0.001 + 0.999*m.b654) - 0.85*log(1 + m.x411/(0.001 + 0.999*m.b654)))*(0.001 + 0.999*
m.b654) <= 0)
m.c613 = Constraint(expr=(m.x466/(0.001 + 0.999*m.b655) - 0.85*log(1 + m.x412/(0.001 + 0.999*m.b655)))*(0.001 + 0.999*
m.b655) <= 0)
m.c614 = Constraint(expr= m.x413 == 0)
m.c615 = Constraint(expr= m.x414 == 0)
m.c616 = Constraint(expr= m.x415 == 0)
m.c617 = Constraint(expr= m.x467 == 0)
m.c618 = Constraint(expr= m.x468 == 0)
m.c619 = Constraint(expr= m.x469 == 0)
m.c620 = Constraint(expr= m.x107 - m.x410 - m.x413 == 0)
m.c621 = Constraint(expr= m.x108 - m.x411 - m.x414 == 0)
m.c622 = Constraint(expr= m.x109 - m.x412 - m.x415 == 0)
m.c623 = Constraint(expr= m.x134 - m.x464 - m.x467 == 0)
m.c624 = Constraint(expr= m.x135 - m.x465 - m.x468 == 0)
m.c625 = Constraint(expr= m.x136 - m.x466 - m.x469 == 0)
m.c626 = Constraint(expr= m.x410 - 3.04984759446376*m.b653 <= 0)
m.c627 = Constraint(expr= m.x411 - 3.04984759446376*m.b654 <= 0)
m.c628 = Constraint(expr= m.x412 - 3.04984759446376*m.b655 <= 0)
m.c629 = Constraint(expr= m.x413 + 3.04984759446376*m.b653 <= 3.04984759446376)
m.c630 = Constraint(expr= m.x414 + 3.04984759446376*m.b654 <= 3.04984759446376)
m.c631 = Constraint(expr= m.x415 + 3.04984759446376*m.b655 <= 3.04984759446376)
m.c632 = Constraint(expr= m.x464 - 1.18887736200171*m.b653 <= 0)
m.c633 = Constraint(expr= m.x465 - 1.18887736200171*m.b654 <= 0)
m.c634 = Constraint(expr= m.x466 - 1.18887736200171*m.b655 <= 0)
m.c635 = Constraint(expr= m.x467 + 1.18887736200171*m.b653 <= 1.18887736200171)
m.c636 = Constraint(expr= m.x468 + 1.18887736200171*m.b654 <= 1.18887736200171)
m.c637 = Constraint(expr= m.x469 + 1.18887736200171*m.b655 <= 1.18887736200171)
m.c638 = Constraint(expr=(m.x482/(0.001 + 0.999*m.b656) - log(1 + m.x470/(0.001 + 0.999*m.b656)))*(0.001 + 0.999*m.b656)
<= 0)
m.c639 = Constraint(expr=(m.x483/(0.001 + 0.999*m.b657) - log(1 + m.x471/(0.001 + 0.999*m.b657)))*(0.001 + 0.999*m.b657)
<= 0)
m.c640 = Constraint(expr=(m.x484/(0.001 + 0.999*m.b658) - log(1 + m.x472/(0.001 + 0.999*m.b658)))*(0.001 + 0.999*m.b658)
<= 0)
m.c641 = Constraint(expr= m.x473 == 0)
m.c642 = Constraint(expr= m.x474 == 0)
m.c643 = Constraint(expr= m.x475 == 0)
m.c644 = Constraint(expr= m.x485 == 0)
m.c645 = Constraint(expr= m.x486 == 0)
m.c646 = Constraint(expr= m.x487 == 0)
m.c647 = Constraint(expr= m.x140 - m.x470 - m.x473 == 0)
m.c648 = Constraint(expr= m.x141 - m.x471 - m.x474 == 0)
m.c649 = Constraint(expr= m.x142 - m.x472 - m.x475 == 0)
m.c650 = Constraint(expr= m.x146 - m.x482 - m.x485 == 0)
m.c651 = Constraint(expr= m.x147 - m.x483 - m.x486 == 0)
m.c652 = Constraint(expr= m.x148 - m.x484 - m.x487 == 0)
m.c653 = Constraint(expr= m.x470 - 1.18887736200171*m.b656 <= 0)
m.c654 = Constraint(expr= m.x471 - 1.18887736200171*m.b657 <= 0)
m.c655 = Constraint(expr= m.x472 - 1.18887736200171*m.b658 <= 0)
m.c656 = Constraint(expr= m.x473 + 1.18887736200171*m.b656 <= 1.18887736200171)
m.c657 = Constraint(expr= m.x474 + 1.18887736200171*m.b657 <= 1.18887736200171)
m.c658 = Constraint(expr= m.x475 + 1.18887736200171*m.b658 <= 1.18887736200171)
m.c659 = Constraint(expr= m.x482 - 0.78338879230327*m.b656 <= 0)
m.c660 = Constraint(expr= m.x483 - 0.78338879230327*m.b657 <= 0)
m.c661 = Constraint(expr= m.x484 - 0.78338879230327*m.b658 <= 0)
m.c662 = Constraint(expr= m.x485 + 0.78338879230327*m.b656 <= 0.78338879230327)
m.c663 = Constraint(expr= m.x486 + 0.78338879230327*m.b657 <= 0.78338879230327)
m.c664 = Constraint(expr= m.x487 + 0.78338879230327*m.b658 <= 0.78338879230327)
m.c665 = Constraint(expr=(m.x488/(0.001 + 0.999*m.b659) - 1.2*log(1 + m.x476/(0.001 + 0.999*m.b659)))*(0.001 + 0.999*
m.b659) <= 0)
m.c666 = Constraint(expr=(m.x489/(0.001 + 0.999*m.b660) - 1.2*log(1 + m.x477/(0.001 + 0.999*m.b660)))*(0.001 + 0.999*
m.b660) <= 0)
m.c667 = Constraint(expr=(m.x490/(0.001 + 0.999*m.b661) - 1.2*log(1 + m.x478/(0.001 + 0.999*m.b661)))*(0.001 + 0.999*
m.b661) <= 0)
m.c668 = Constraint(expr= m.x479 == 0)
m.c669 = Constraint(expr= m.x480 == 0)
m.c670 = Constraint(expr= m.x481 == 0)
m.c671 = Constraint(expr= m.x491 == 0)
m.c672 = Constraint(expr= m.x492 == 0)
m.c673 = Constraint(expr= m.x493 == 0)
m.c674 = Constraint(expr= m.x143 - m.x476 - m.x479 == 0)
m.c675 = Constraint(expr= m.x144 - m.x477 - m.x480 == 0)
m.c676 = Constraint(expr= m.x145 - m.x478 - m.x481 == 0)
m.c677 = Constraint(expr= m.x149 - m.x488 - m.x491 == 0)
m.c678 = Constraint(expr= m.x150 - m.x489 - m.x492 == 0)
m.c679 = Constraint(expr= m.x151 - m.x490 - m.x493 == 0)
m.c680 = Constraint(expr= m.x476 - 1.18887736200171*m.b659 <= 0)
m.c681 = Constraint(expr= m.x477 - 1.18887736200171*m.b660 <= 0)
m.c682 = Constraint(expr= m.x478 - 1.18887736200171*m.b661 <= 0)
m.c683 = Constraint(expr= m.x479 + 1.18887736200171*m.b659 <= 1.18887736200171)
m.c684 = Constraint(expr= m.x480 + 1.18887736200171*m.b660 <= 1.18887736200171)
m.c685 = Constraint(expr= m.x481 + 1.18887736200171*m.b661 <= 1.18887736200171)
m.c686 = Constraint(expr= m.x488 - 0.940066550763924*m.b659 <= 0)
m.c687 = Constraint(expr= m.x489 - 0.940066550763924*m.b660 <= 0)
m.c688 = Constraint(expr= m.x490 - 0.940066550763924*m.b661 <= 0)
m.c689 = Constraint(expr= m.x491 + 0.940066550763924*m.b659 <= 0.940066550763924)
m.c690 = Constraint(expr= m.x492 + 0.940066550763924*m.b660 <= 0.940066550763924)
m.c691 = Constraint(expr= m.x493 + 0.940066550763924*m.b661 <= 0.940066550763924)
m.c692 = Constraint(expr= - 0.75*m.x494 + m.x518 == 0)
m.c693 = Constraint(expr= - 0.75*m.x495 + m.x519 == 0)
m.c694 = Constraint(expr= - 0.75*m.x496 + m.x520 == 0)
m.c695 = Constraint(expr= m.x497 == 0)
m.c696 = Constraint(expr= m.x498 == 0)
m.c697 = Constraint(expr= m.x499 == 0)
m.c698 = Constraint(expr= m.x521 == 0)
m.c699 = Constraint(expr= m.x522 == 0)
m.c700 = Constraint(expr= m.x523 == 0)
m.c701 = Constraint(expr= m.x161 - m.x494 - m.x497 == 0)
m.c702 = Constraint(expr= m.x162 - m.x495 - m.x498 == 0)
m.c703 = Constraint(expr= m.x163 - m.x496 - m.x499 == 0)
m.c704 = Constraint(expr= m.x173 - m.x518 - m.x521 == 0)
m.c705 = Constraint(expr= m.x174 - m.x519 - m.x522 == 0)
m.c706 = Constraint(expr= m.x175 - m.x520 - m.x523 == 0)
m.c707 = Constraint(expr= m.x494 - 0.940066550763924*m.b662 <= 0)
m.c708 = Constraint(expr= m.x495 - 0.940066550763924*m.b663 <= 0)
m.c709 = Constraint(expr= m.x496 - 0.940066550763924*m.b664 <= 0)
m.c710 = Constraint(expr= m.x497 + 0.940066550763924*m.b662 <= 0.940066550763924)
m.c711 = Constraint(expr= m.x498 + 0.940066550763924*m.b663 <= 0.940066550763924)
m.c712 = Constraint(expr= m.x499 + 0.940066550763924*m.b664 <= 0.940066550763924)
m.c713 = Constraint(expr= m.x518 - 0.705049913072943*m.b662 <= 0)
m.c714 = Constraint(expr= m.x519 - 0.705049913072943*m.b663 <= 0)
m.c715 = Constraint(expr= m.x520 - 0.705049913072943*m.b664 <= 0)
m.c716 = Constraint(expr= m.x521 + 0.705049913072943*m.b662 <= 0.705049913072943)
m.c717 = Constraint(expr= m.x522 + 0.705049913072943*m.b663 <= 0.705049913072943)
m.c718 = Constraint(expr= m.x523 + 0.705049913072943*m.b664 <= 0.705049913072943)
m.c719 = Constraint(expr=(m.x524/(0.001 + 0.999*m.b665) - 1.5*log(1 + m.x500/(0.001 + 0.999*m.b665)))*(0.001 + 0.999*
m.b665) <= 0)
m.c720 = Constraint(expr=(m.x525/(0.001 + 0.999*m.b666) - 1.5*log(1 + m.x501/(0.001 + 0.999*m.b666)))*(0.001 + 0.999*
m.b666) <= 0)
m.c721 = Constraint(expr=(m.x526/(0.001 + 0.999*m.b667) - 1.5*log(1 + m.x502/(0.001 + 0.999*m.b667)))*(0.001 + 0.999*
m.b667) <= 0)
m.c722 = Constraint(expr= m.x503 == 0)
m.c723 = Constraint(expr= m.x504 == 0)
m.c724 = Constraint(expr= m.x505 == 0)
m.c725 = Constraint(expr= m.x530 == 0)
m.c726 = Constraint(expr= m.x531 == 0)
m.c727 = Constraint(expr= m.x532 == 0)
m.c728 = Constraint(expr= m.x164 - m.x500 - m.x503 == 0)
m.c729 = Constraint(expr= m.x165 - m.x501 - m.x504 == 0)
m.c730 = Constraint(expr= m.x166 - m.x502 - m.x505 == 0)
m.c731 = Constraint(expr= m.x176 - m.x524 - m.x530 == 0)
m.c732 = Constraint(expr= m.x177 - m.x525 - m.x531 == 0)
m.c733 = Constraint(expr= m.x178 - m.x526 - m.x532 == 0)
m.c734 = Constraint(expr= m.x500 - 0.940066550763924*m.b665 <= 0)
m.c735 = Constraint(expr= m.x501 - 0.940066550763924*m.b666 <= 0)
m.c736 = Constraint(expr= m.x502 - 0.940066550763924*m.b667 <= 0)
m.c737 = Constraint(expr= m.x503 + 0.940066550763924*m.b665 <= 0.940066550763924)
m.c738 = Constraint(expr= m.x504 + 0.940066550763924*m.b666 <= 0.940066550763924)
m.c739 = Constraint(expr= m.x505 + 0.940066550763924*m.b667 <= 0.940066550763924)
m.c740 = Constraint(expr= m.x524 - 0.994083415506506*m.b665 <= 0)
m.c741 = Constraint(expr= m.x525 - 0.994083415506506*m.b666 <= 0)
m.c742 = Constraint(expr= m.x526 - 0.994083415506506*m.b667 <= 0)
m.c743 = Constraint(expr= m.x530 + 0.994083415506506*m.b665 <= 0.994083415506506)
m.c744 = Constraint(expr= m.x531 + 0.994083415506506*m.b666 <= 0.994083415506506)
m.c745 = Constraint(expr= m.x532 + 0.994083415506506*m.b667 <= 0.994083415506506)
m.c746 = Constraint(expr= - m.x506 + m.x536 == 0)
m.c747 = Constraint(expr= - m.x507 + m.x537 == 0)
m.c748 = Constraint(expr= - m.x508 + m.x538 == 0)
m.c749 = Constraint(expr= - 0.5*m.x512 + m.x536 == 0)
m.c750 = Constraint(expr= - 0.5*m.x513 + m.x537 == 0)
m.c751 = Constraint(expr= - 0.5*m.x514 + m.x538 == 0)
m.c752 = Constraint(expr= m.x509 == 0)
m.c753 = Constraint(expr= m.x510 == 0)
m.c754 = Constraint(expr= m.x511 == 0)
m.c755 = Constraint(expr= m.x515 == 0)
m.c756 = Constraint(expr= m.x516 == 0)
m.c757 = Constraint(expr= m.x517 == 0)
m.c758 = Constraint(expr= m.x539 == 0)
m.c759 = Constraint(expr= m.x540 == 0)
m.c760 = Constraint(expr= m.x541 == 0)
m.c761 = Constraint(expr= m.x167 - m.x506 - m.x509 == 0)
m.c762 = Constraint(expr= m.x168 - m.x507 - m.x510 == 0)
m.c763 = Constraint(expr= m.x169 - m.x508 - m.x511 == 0)
m.c764 = Constraint(expr= m.x170 - m.x512 - m.x515 == 0)
m.c765 = Constraint(expr= m.x171 - m.x513 - m.x516 == 0)
m.c766 = Constraint(expr= m.x172 - m.x514 - m.x517 == 0)
m.c767 = Constraint(expr= m.x179 - m.x536 - m.x539 == 0)
m.c768 = Constraint(expr= m.x180 - m.x537 - m.x540 == 0)
m.c769 = Constraint(expr= m.x181 - m.x538 - m.x541 == 0)
m.c770 = Constraint(expr= m.x506 - 0.940066550763924*m.b668 <= 0)
m.c771 = Constraint(expr= m.x507 - 0.940066550763924*m.b669 <= 0)
m.c772 = Constraint(expr= m.x508 - 0.940066550763924*m.b670 <= 0)
m.c773 = Constraint(expr= m.x509 + 0.940066550763924*m.b668 <= 0.940066550763924)
m.c774 = Constraint(expr= m.x510 + 0.940066550763924*m.b669 <= 0.940066550763924)
m.c775 = Constraint(expr= m.x511 + 0.940066550763924*m.b670 <= 0.940066550763924)
m.c776 = Constraint(expr= m.x512 - 30*m.b668 <= 0)
m.c777 = Constraint(expr= m.x513 - 30*m.b669 <= 0)
m.c778 = Constraint(expr= m.x514 - 30*m.b670 <= 0)
m.c779 = Constraint(expr= m.x515 + 30*m.b668 <= 30)
m.c780 = Constraint(expr= m.x516 + 30*m.b669 <= 30)
m.c781 = Constraint(expr= m.x517 + 30*m.b670 <= 30)
m.c782 = Constraint(expr= m.x536 - 15*m.b668 <= 0)
m.c783 = Constraint(expr= m.x537 - 15*m.b669 <= 0)
m.c784 = Constraint(expr= m.x538 - 15*m.b670 <= 0)
m.c785 = Constraint(expr= m.x539 + 15*m.b668 <= 15)
m.c786 = Constraint(expr= m.x540 + 15*m.b669 <= 15)
m.c787 = Constraint(expr= m.x541 + 15*m.b670 <= 15)
m.c788 = Constraint(expr=(m.x566/(0.001 + 0.999*m.b671) - 1.25*log(1 + m.x542/(0.001 + 0.999*m.b671)))*(0.001 + 0.999*
m.b671) <= 0)
m.c789 = Constraint(expr=(m.x567/(0.001 + 0.999*m.b672) - 1.25*log(1 + m.x543/(0.001 + 0.999*m.b672)))*(0.001 + 0.999*
m.b672) <= 0)
m.c790 = Constraint(expr=(m.x568/(0.001 + 0.999*m.b673) - 1.25*log(1 + m.x544/(0.001 + 0.999*m.b673)))*(0.001 + 0.999*
m.b673) <= 0)
m.c791 = Constraint(expr= m.x545 == 0)
m.c792 = Constraint(expr= m.x546 == 0)
m.c793 = Constraint(expr= m.x547 == 0)
m.c794 = Constraint(expr= m.x569 == 0)
m.c795 = Constraint(expr= m.x570 == 0)
m.c796 = Constraint(expr= m.x571 == 0)
m.c797 = Constraint(expr= m.x182 - m.x542 - m.x545 == 0)
m.c798 = Constraint(expr= m.x183 - m.x543 - m.x546 == 0)
m.c799 = Constraint(expr= m.x184 - m.x544 - m.x547 == 0)
m.c800 = Constraint(expr= m.x197 - m.x566 - m.x569 == 0)
m.c801 = Constraint(expr= m.x198 - m.x567 - m.x570 == 0)
m.c802 = Constraint(expr= m.x199 - m.x568 - m.x571 == 0)
m.c803 = Constraint(expr= m.x542 - 0.705049913072943*m.b671 <= 0)
m.c804 = Constraint(expr= m.x543 - 0.705049913072943*m.b672 <= 0)
m.c805 = Constraint(expr= m.x544 - 0.705049913072943*m.b673 <= 0)
m.c806 = Constraint(expr= m.x545 + 0.705049913072943*m.b671 <= 0.705049913072943)
m.c807 = Constraint(expr= m.x546 + 0.705049913072943*m.b672 <= 0.705049913072943)
m.c808 = Constraint(expr= m.x547 + 0.705049913072943*m.b673 <= 0.705049913072943)
m.c809 = Constraint(expr= m.x566 - 0.666992981045719*m.b671 <= 0)
m.c810 = Constraint(expr= m.x567 - 0.666992981045719*m.b672 <= 0)
m.c811 = Constraint(expr= m.x568 - 0.666992981045719*m.b673 <= 0)
m.c812 = Constraint(expr= m.x569 + 0.666992981045719*m.b671 <= 0.666992981045719)
m.c813 = Constraint(expr= m.x570 + 0.666992981045719*m.b672 <= 0.666992981045719)
m.c814 = Constraint(expr= m.x571 + 0.666992981045719*m.b673 <= 0.666992981045719)
m.c815 = Constraint(expr=(m.x572/(0.001 + 0.999*m.b674) - 0.9*log(1 + m.x548/(0.001 + 0.999*m.b674)))*(0.001 + 0.999*
m.b674) <= 0)
m.c816 = Constraint(expr=(m.x573/(0.001 + 0.999*m.b675) - 0.9*log(1 + m.x549/(0.001 + 0.999*m.b675)))*(0.001 + 0.999*
m.b675) <= 0)
m.c817 = Constraint(expr=(m.x574/(0.001 + 0.999*m.b676) - 0.9*log(1 + m.x550/(0.001 + 0.999*m.b676)))*(0.001 + 0.999*
m.b676) <= 0)
m.c818 = Constraint(expr= m.x551 == 0)
m.c819 = Constraint(expr= m.x552 == 0)
m.c820 = Constraint(expr= m.x553 == 0)
m.c821 = Constraint(expr= m.x575 == 0)
m.c822 = Constraint(expr= m.x576 == 0)
m.c823 = Constraint(expr= m.x577 == 0)
m.c824 = Constraint(expr= m.x185 - m.x548 - m.x551 == 0)
m.c825 = Constraint(expr= m.x186 - m.x549 - m.x552 == 0)
m.c826 = Constraint(expr= m.x187 - m.x550 - m.x553 == 0)
m.c827 = Constraint(expr= m.x200 - m.x572 - m.x575 == 0)
m.c828 = Constraint(expr= m.x201 - m.x573 - m.x576 == 0)
m.c829 = Constraint(expr= m.x202 - m.x574 - m.x577 == 0)
m.c830 = Constraint(expr= m.x548 - 0.705049913072943*m.b674 <= 0)
m.c831 = Constraint(expr= m.x549 - 0.705049913072943*m.b675 <= 0)
m.c832 = Constraint(expr= m.x550 - 0.705049913072943*m.b676 <= 0)
m.c833 = Constraint(expr= m.x551 + 0.705049913072943*m.b674 <= 0.705049913072943)
m.c834 = Constraint(expr= m.x552 + 0.705049913072943*m.b675 <= 0.705049913072943)
m.c835 = Constraint(expr= m.x553 + 0.705049913072943*m.b676 <= 0.705049913072943)
m.c836 = Constraint(expr= m.x572 - 0.480234946352917*m.b674 <= 0)
m.c837 = Constraint(expr= m.x573 - 0.480234946352917*m.b675 <= 0)
m.c838 = Constraint(expr= m.x574 - 0.480234946352917*m.b676 <= 0)
m.c839 = Constraint(expr= m.x575 + 0.480234946352917*m.b674 <= 0.480234946352917)
m.c840 = Constraint(expr= m.x576 + 0.480234946352917*m.b675 <= 0.480234946352917)
m.c841 = Constraint(expr= m.x577 + 0.480234946352917*m.b676 <= 0.480234946352917)
m.c842 = Constraint(expr=(m.x578/(0.001 + 0.999*m.b677) - log(1 + m.x527/(0.001 + 0.999*m.b677)))*(0.001 + 0.999*m.b677)
<= 0)
m.c843 = Constraint(expr=(m.x579/(0.001 + 0.999*m.b678) - log(1 + m.x528/(0.001 + 0.999*m.b678)))*(0.001 + 0.999*m.b678)
<= 0)
m.c844 = Constraint(expr=(m.x580/(0.001 + 0.999*m.b679) - log(1 + m.x529/(0.001 + 0.999*m.b679)))*(0.001 + 0.999*m.b679)
<= 0)
m.c845 = Constraint(expr= m.x533 == 0)
m.c846 = Constraint(expr= m.x534 == 0)
m.c847 = Constraint(expr= m.x535 == 0)
m.c848 = Constraint(expr= m.x581 == 0)
m.c849 = Constraint(expr= m.x582 == 0)
m.c850 = Constraint(expr= m.x583 == 0)
m.c851 = Constraint(expr= m.x176 - m.x527 - m.x533 == 0)
m.c852 = Constraint(expr= m.x177 - m.x528 - m.x534 == 0)
m.c853 = Constraint(expr= m.x178 - m.x529 - m.x535 == 0)
m.c854 = Constraint(expr= m.x203 - m.x578 - m.x581 == 0)
m.c855 = Constraint(expr= m.x204 - m.x579 - m.x582 == 0)
m.c856 = Constraint(expr= m.x205 - m.x580 - m.x583 == 0)
m.c857 = Constraint(expr= m.x527 - 0.994083415506506*m.b677 <= 0)
m.c858 = Constraint(expr= m.x528 - 0.994083415506506*m.b678 <= 0)
m.c859 = Constraint(expr= m.x529 - 0.994083415506506*m.b679 <= 0)
m.c860 = Constraint(expr= m.x533 + 0.994083415506506*m.b677 <= 0.994083415506506)
m.c861 = Constraint(expr= m.x534 + 0.994083415506506*m.b678 <= 0.994083415506506)
m.c862 = Constraint(expr= m.x535 + 0.994083415506506*m.b679 <= 0.994083415506506)
m.c863 = Constraint(expr= m.x578 - 0.690184503917672*m.b677 <= 0)
m.c864 = Constraint(expr= m.x579 - 0.690184503917672*m.b678 <= 0)
m.c865 = Constraint(expr= m.x580 - 0.690184503917672*m.b679 <= 0)
m.c866 = Constraint(expr= m.x581 + 0.690184503917672*m.b677 <= 0.690184503917672)
m.c867 = Constraint(expr= m.x582 + 0.690184503917672*m.b678 <= 0.690184503917672)
m.c868 = Constraint(expr= m.x583 + 0.690184503917672*m.b679 <= 0.690184503917672)
m.c869 = Constraint(expr= - 0.9*m.x554 + m.x584 == 0)
m.c870 = Constraint(expr= - 0.9*m.x555 + m.x585 == 0)
m.c871 = Constraint(expr= - 0.9*m.x556 + m.x586 == 0)
m.c872 = Constraint(expr= m.x557 == 0)
m.c873 = Constraint(expr= m.x558 == 0)
m.c874 = Constraint(expr= m.x559 == 0)
m.c875 = Constraint(expr= m.x587 == 0)
m.c876 = Constraint(expr= m.x588 == 0)
m.c877 = Constraint(expr= m.x589 == 0)
m.c878 = Constraint(expr= m.x188 - m.x554 - m.x557 == 0)
m.c879 = Constraint(expr= m.x189 - m.x555 - m.x558 == 0)
m.c880 = Constraint(expr= m.x190 - m.x556 - m.x559 == 0)
m.c881 = Constraint(expr= m.x206 - m.x584 - m.x587 == 0)
m.c882 = Constraint(expr= m.x207 - m.x585 - m.x588 == 0)
m.c883 = Constraint(expr= m.x208 - m.x586 - m.x589 == 0)
m.c884 = Constraint(expr= m.x554 - 15*m.b680 <= 0)
m.c885 = Constraint(expr= m.x555 - 15*m.b681 <= 0)
m.c886 = Constraint(expr= m.x556 - 15*m.b682 <= 0)
m.c887 = Constraint(expr= m.x557 + 15*m.b680 <= 15)
m.c888 = Constraint(expr= m.x558 + 15*m.b681 <= 15)
m.c889 = Constraint(expr= m.x559 + 15*m.b682 <= 15)
m.c890 = Constraint(expr= m.x584 - 13.5*m.b680 <= 0)
m.c891 = Constraint(expr= m.x585 - 13.5*m.b681 <= 0)
m.c892 = Constraint(expr= m.x586 - 13.5*m.b682 <= 0)
m.c893 = Constraint(expr= m.x587 + 13.5*m.b680 <= 13.5)
m.c894 = Constraint(expr= m.x588 + 13.5*m.b681 <= 13.5)
m.c895 = Constraint(expr= m.x589 + 13.5*m.b682 <= 13.5)
m.c896 = Constraint(expr= - 0.6*m.x560 + m.x590 == 0)
m.c897 = Constraint(expr= - 0.6*m.x561 + m.x591 == 0)
m.c898 = Constraint(expr= - 0.6*m.x562 + m.x592 == 0)
m.c899 = Constraint(expr= m.x563 == 0)
m.c900 = Constraint(expr= m.x564 == 0)
m.c901 = Constraint(expr= m.x565 == 0)
m.c902 = Constraint(expr= m.x593 == 0)
m.c903 = Constraint(expr= m.x594 == 0)
m.c904 = Constraint(expr= m.x595 == 0)
m.c905 = Constraint(expr= m.x191 - m.x560 - m.x563 == 0)
m.c906 = Constraint(expr= m.x192 - m.x561 - m.x564 == 0)
m.c907 = Constraint(expr= m.x193 - m.x562 - m.x565 == 0)
m.c908 = Constraint(expr= m.x209 - m.x590 - m.x593 == 0)
m.c909 = Constraint(expr= m.x210 - m.x591 - m.x594 == 0)
m.c910 = Constraint(expr= m.x211 - m.x592 - m.x595 == 0)
m.c911 = Constraint(expr= m.x560 - 15*m.b683 <= 0)
m.c912 = Constraint(expr= m.x561 - 15*m.b684 <= 0)
m.c913 = Constraint(expr= m.x562 - 15*m.b685 <= 0)
m.c914 = Constraint(expr= m.x563 + 15*m.b683 <= 15)
m.c915 = Constraint(expr= m.x564 + 15*m.b684 <= 15)
m.c916 = Constraint(expr= m.x565 + 15*m.b685 <= 15)
m.c917 = Constraint(expr= m.x590 - 9*m.b683 <= 0)
m.c918 = Constraint(expr= m.x591 - 9*m.b684 <= 0)
m.c919 = Constraint(expr= m.x592 - 9*m.b685 <= 0)
m.c920 = Constraint(expr= m.x593 + 9*m.b683 <= 9)
m.c921 = Constraint(expr= m.x594 + 9*m.b684 <= 9)
m.c922 = Constraint(expr= m.x595 + 9*m.b685 <= 9)
m.c923 = Constraint(expr= 5*m.b686 + m.x776 == 0)
m.c924 = Constraint(expr= 4*m.b687 + m.x777 == 0)
m.c925 = Constraint(expr= 6*m.b688 + m.x778 == 0)
m.c926 = Constraint(expr= 8*m.b689 + m.x779 == 0)
m.c927 = Constraint(expr= 7*m.b690 + m.x780 == 0)
m.c928 = Constraint(expr= 6*m.b691 + m.x781 == 0)
m.c929 = Constraint(expr= 6*m.b692 + m.x782 == 0)
m.c930 = Constraint(expr= 9*m.b693 + m.x783 == 0)
m.c931 = Constraint(expr= 4*m.b694 + m.x784 == 0)
m.c932 = Constraint(expr= 10*m.b695 + m.x785 == 0)
m.c933 = Constraint(expr= 9*m.b696 + m.x786 == 0)
m.c934 = Constraint(expr= 5*m.b697 + m.x787 == 0)
m.c935 = Constraint(expr= 6*m.b698 + m.x788 == 0)
m.c936 = Constraint(expr= 10*m.b699 + m.x789 == 0)
m.c937 = Constraint(expr= 6*m.b700 + m.x790 == 0)
m.c938 = Constraint(expr= 7*m.b701 + m.x791 == 0)
m.c939 = Constraint(expr= 7*m.b702 + m.x792 == 0)
m.c940 = Constraint(expr= 4*m.b703 + m.x793 == 0)
m.c941 = Constraint(expr= 4*m.b704 + m.x794 == 0)
m.c942 = Constraint(expr= 3*m.b705 + m.x795 == 0)
m.c943 = Constraint(expr= 2*m.b706 + m.x796 == 0)
m.c944 = Constraint(expr= 5*m.b707 + m.x797 == 0)
m.c945 = Constraint(expr= 6*m.b708 + m.x798 == 0)
m.c946 = Constraint(expr= 7*m.b709 + m.x799 == 0)
m.c947 = Constraint(expr= 2*m.b710 + m.x800 == 0)
m.c948 = Constraint(expr= 5*m.b711 + m.x801 == 0)
m.c949 = Constraint(expr= 2*m.b712 + m.x802 == 0)
m.c950 = Constraint(expr= 4*m.b713 + m.x803 == 0)
m.c951 = Constraint(expr= 7*m.b714 + m.x804 == 0)
m.c952 = Constraint(expr= 4*m.b715 + m.x805 == 0)
m.c953 = Constraint(expr= 3*m.b716 + m.x806 == 0)
m.c954 = Constraint(expr= 9*m.b717 + m.x807 == 0)
m.c955 = Constraint(expr= 3*m.b718 + m.x808 == 0)
m.c956 = Constraint(expr= 7*m.b719 + m.x809 == 0)
m.c957 = Constraint(expr= 2*m.b720 + m.x810 == 0)
m.c958 = Constraint(expr= 9*m.b721 + m.x811 == 0)
m.c959 = Constraint(expr= 3*m.b722 + m.x812 == 0)
m.c960 = Constraint(expr= m.b723 + m.x813 == 0)
m.c961 = Constraint(expr= 9*m.b724 + m.x814 == 0)
m.c962 = Constraint(expr= 2*m.b725 + m.x815 == 0)
m.c963 = Constraint(expr= 6*m.b726 + m.x816 == 0)
m.c964 = Constraint(expr= 3*m.b727 + m.x817 == 0)
m.c965 = Constraint(expr= 4*m.b728 + m.x818 == 0)
m.c966 = Constraint(expr= 8*m.b729 + m.x819 == 0)
m.c967 = Constraint(expr= m.b730 + m.x820 == 0)
m.c968 = Constraint(expr= 2*m.b731 + m.x821 == 0)
m.c969 = Constraint(expr= 5*m.b732 + m.x822 == 0)
m.c970 = Constraint(expr= 2*m.b733 + m.x823 == 0)
m.c971 = Constraint(expr= 3*m.b734 + m.x824 == 0)
m.c972 = Constraint(expr= 4*m.b735 + m.x825 == 0)
m.c973 = Constraint(expr= 3*m.b736 + m.x826 == 0)
m.c974 = Constraint(expr= 5*m.b737 + m.x827 == 0)
m.c975 = Constraint(expr= 7*m.b738 + m.x828 == 0)
m.c976 = Constraint(expr= 6*m.b739 + m.x829 == 0)
m.c977 = Constraint(expr= 2*m.b740 + m.x830 == 0)
m.c978 = Constraint(expr= 8*m.b741 + m.x831 == 0)
m.c979 = Constraint(expr= 4*m.b742 + m.x832 == 0)
m.c980 = Constraint(expr= m.b743 + m.x833 == 0)
m.c981 = Constraint(expr= 4*m.b744 + m.x834 == 0)
m.c982 = Constraint(expr= m.b745 + m.x835 == 0)
m.c983 = Constraint(expr= 2*m.b746 + m.x836 == 0)
m.c984 = Constraint(expr= 5*m.b747 + m.x837 == 0)
m.c985 = Constraint(expr= 2*m.b748 + m.x838 == 0)
m.c986 = Constraint(expr= 9*m.b749 + m.x839 == 0)
m.c987 = Constraint(expr= 2*m.b750 + m.x840 == 0)
m.c988 = Constraint(expr= 9*m.b751 + m.x841 == 0)
m.c989 = Constraint(expr= 5*m.b752 + m.x842 == 0)
m.c990 = Constraint(expr= 8*m.b753 + m.x843 == 0)
m.c991 = Constraint(expr= 4*m.b754 + m.x844 == 0)
m.c992 = Constraint(expr= 2*m.b755 + m.x845 == 0)
m.c993 = Constraint(expr= 3*m.b756 + m.x846 == 0)
m.c994 = Constraint(expr= 8*m.b757 + m.x847 == 0)
m.c995 = Constraint(expr= 10*m.b758 + m.x848 == 0)
m.c996 = Constraint(expr= 6*m.b759 + m.x849 == 0)
m.c997 = Constraint(expr= 3*m.b760 + m.x850 == 0)
m.c998 = Constraint(expr= 4*m.b761 + m.x851 == 0)
m.c999 = Constraint(expr= 8*m.b762 + m.x852 == 0)
m.c1000 = Constraint(expr= 7*m.b763 + m.x853 == 0)
m.c1001 = Constraint(expr= 7*m.b764 + m.x854 == 0)
m.c1002 = Constraint(expr= 3*m.b765 + m.x855 == 0)
m.c1003 = Constraint(expr= 9*m.b766 + m.x856 == 0)
m.c1004 = Constraint(expr= 4*m.b767 + m.x857 == 0)
m.c1005 = Constraint(expr= 8*m.b768 + m.x858 == 0)
m.c1006 = Constraint(expr= 6*m.b769 + m.x859 == 0)
m.c1007 = Constraint(expr= 2*m.b770 + m.x860 == 0)
m.c1008 = Constraint(expr= m.b771 + m.x861 == 0)
m.c1009 = Constraint(expr= 3*m.b772 + m.x862 == 0)
m.c1010 = Constraint(expr= 8*m.b773 + m.x863 == 0)
m.c1011 = Constraint(expr= 3*m.b774 + m.x864 == 0)
m.c1012 = Constraint(expr= 4*m.b775 + m.x865 == 0)
m.c1013 = Constraint(expr= m.b596 - m.b597 <= 0)
m.c1014 = Constraint(expr= m.b596 - m.b598 <= 0)
m.c1015 = Constraint(expr= m.b597 - m.b598 <= 0)
m.c1016 = Constraint(expr= m.b599 - m.b600 <= 0)
m.c1017 = Constraint(expr= m.b599 - m.b601 <= 0)
m.c1018 = Constraint(expr= m.b600 - m.b601 <= 0)
m.c1019 = Constraint(expr= m.b602 - m.b603 <= 0)
m.c1020 = Constraint(expr= m.b602 - m.b604 <= 0)
m.c1021 = Constraint(expr= m.b603 - m.b604 <= 0)
m.c1022 = Constraint(expr= m.b605 - m.b606 <= 0)
m.c1023 = Constraint(expr= m.b605 - m.b607 <= 0)
m.c1024 = Constraint(expr= m.b606 - m.b607 <= 0)
m.c1025 = Constraint(expr= m.b608 - m.b609 <= 0)
m.c1026 = Constraint(expr= m.b608 - m.b610 <= 0)
m.c1027 = Constraint(expr= m.b609 - m.b610 <= 0)
m.c1028 = Constraint(expr= m.b611 - m.b612 <= 0)
m.c1029 = Constraint(expr= m.b611 - m.b613 <= 0)
m.c1030 = Constraint(expr= m.b612 - m.b613 <= 0)
m.c1031 = Constraint(expr= m.b614 - m.b615 <= 0)
m.c1032 = Constraint(expr= m.b614 - m.b616 <= 0)
m.c1033 = Constraint(expr= m.b615 - m.b616 <= 0)
m.c1034 = Constraint(expr= m.b617 - m.b618 <= 0)
m.c1035 = Constraint(expr= m.b617 - m.b619 <= 0)
m.c1036 = Constraint(expr= m.b618 - m.b619 <= 0)
m.c1037 = Constraint(expr= m.b620 - m.b621 <= 0)
m.c1038 = Constraint(expr= m.b620 - m.b622 <= 0)
m.c1039 = Constraint(expr= m.b621 - m.b622 <= 0)
m.c1040 = Constraint(expr= m.b623 - m.b624 <= 0)
m.c1041 = Constraint(expr= m.b623 - m.b625 <= 0)
m.c1042 = Constraint(expr= m.b624 - m.b625 <= 0)
m.c1043 = Constraint(expr= m.b626 - m.b627 <= 0)
m.c1044 = Constraint(expr= m.b626 - m.b628 <= 0)
m.c1045 = Constraint(expr= m.b627 - m.b628 <= 0)
m.c1046 = Constraint(expr= m.b629 - m.b630 <= 0)
m.c1047 = Constraint(expr= m.b629 - m.b631 <= 0)
m.c1048 = Constraint(expr= m.b630 - m.b631 <= 0)
m.c1049 = Constraint(expr= m.b632 - m.b633 <= 0)
m.c1050 = Constraint(expr= m.b632 - m.b634 <= 0)
m.c1051 = Constraint(expr= m.b633 - m.b634 <= 0)
m.c1052 = Constraint(expr= m.b635 - m.b636 <= 0)
m.c1053 = Constraint(expr= m.b635 - m.b637 <= 0)
m.c1054 = Constraint(expr= m.b636 - m.b637 <= 0)
m.c1055 = Constraint(expr= m.b638 - m.b639 <= 0)
m.c1056 = Constraint(expr= m.b638 - m.b640 <= 0)
m.c1057 = Constraint(expr= m.b639 - m.b640 <= 0)
m.c1058 = Constraint(expr= m.b641 - m.b642 <= 0)
m.c1059 = Constraint(expr= m.b641 - m.b643 <= 0)
m.c1060 = Constraint(expr= m.b642 - m.b643 <= 0)
m.c1061 = Constraint(expr= m.b644 - m.b645 <= 0)
m.c1062 = Constraint(expr= m.b644 - m.b646 <= 0)
m.c1063 = Constraint(expr= m.b645 - m.b646 <= 0)
m.c1064 = Constraint(expr= m.b647 - m.b648 <= 0)
m.c1065 = Constraint(expr= m.b647 - m.b649 <= 0)
m.c1066 = Constraint(expr= m.b648 - m.b649 <= 0)
m.c1067 = Constraint(expr= m.b650 - m.b651 <= 0)
m.c1068 = Constraint(expr= m.b650 - m.b652 <= 0)
m.c1069 = Constraint(expr= m.b651 - m.b652 <= 0)
m.c1070 = Constraint(expr= m.b653 - m.b654 <= 0)
m.c1071 = Constraint(expr= m.b653 - m.b655 <= 0)
m.c1072 = Constraint(expr= m.b654 - m.b655 <= 0)
m.c1073 = Constraint(expr= m.b656 - m.b657 <= 0)
m.c1074 = Constraint(expr= m.b656 - m.b658 <= 0)
m.c1075 = Constraint(expr= m.b657 - m.b658 <= 0)
m.c1076 = Constraint(expr= m.b659 - m.b660 <= 0)
m.c1077 = Constraint(expr= m.b659 - m.b661 <= 0)
m.c1078 = Constraint(expr= m.b660 - m.b661 <= 0)
m.c1079 = Constraint(expr= m.b662 - m.b663 <= 0)
m.c1080 = Constraint(expr= m.b662 - m.b664 <= 0)
m.c1081 = Constraint(expr= m.b663 - m.b664 <= 0)
m.c1082 = Constraint(expr= m.b665 - m.b666 <= 0)
m.c1083 = Constraint(expr= m.b665 - m.b667 <= 0)
m.c1084 = Constraint(expr= m.b666 - m.b667 <= 0)
m.c1085 = Constraint(expr= m.b668 - m.b669 <= 0)
m.c1086 = Constraint(expr= m.b668 - m.b670 <= 0)
m.c1087 = Constraint(expr= m.b669 - m.b670 <= 0)
m.c1088 = Constraint(expr= m.b671 - m.b672 <= 0)
m.c1089 = Constraint(expr= m.b671 - m.b673 <= 0)
m.c1090 = Constraint(expr= m.b672 - m.b673 <= 0)
m.c1091 = Constraint(expr= m.b674 - m.b675 <= 0)
m.c1092 = Constraint(expr= m.b674 - m.b676 <= 0)
m.c1093 = Constraint(expr= m.b675 - m.b676 <= 0)
m.c1094 = Constraint(expr= m.b677 - m.b678 <= 0)
m.c1095 = Constraint(expr= m.b677 - m.b679 <= 0)
m.c1096 = Constraint(expr= m.b678 - m.b679 <= 0)
m.c1097 = Constraint(expr= m.b680 - m.b681 <= 0)
m.c1098 = Constraint(expr= m.b680 - m.b682 <= 0)
m.c1099 = Constraint(expr= m.b681 - m.b682 <= 0)
m.c1100 = Constraint(expr= m.b683 - m.b684 <= 0)
m.c1101 = Constraint(expr= m.b683 - m.b685 <= 0)
m.c1102 = Constraint(expr= m.b684 - m.b685 <= 0)
m.c1103 = Constraint(expr= m.b686 + m.b687 <= 1)
m.c1104 = Constraint(expr= m.b686 + m.b688 <= 1)
m.c1105 = Constraint(expr= m.b686 + m.b687 <= 1)
m.c1106 = Constraint(expr= m.b687 + m.b688 <= 1)
m.c1107 = Constraint(expr= m.b686 + m.b688 <= 1)
m.c1108 = Constraint(expr= m.b687 + m.b688 <= 1)
m.c1109 = Constraint(expr= m.b689 + m.b690 <= 1)
m.c1110 = Constraint(expr= m.b689 + m.b691 <= 1)
m.c1111 = Constraint(expr= m.b689 + m.b690 <= 1)
m.c1112 = Constraint(expr= m.b690 + m.b691 <= 1)
m.c1113 = Constraint(expr= m.b689 + m.b691 <= 1)
m.c1114 = Constraint(expr= m.b690 + m.b691 <= 1)
m.c1115 = Constraint(expr= m.b692 + m.b693 <= 1)
m.c1116 = Constraint(expr= m.b692 + m.b694 <= 1)
m.c1117 = Constraint(expr= m.b692 + m.b693 <= 1)
m.c1118 = Constraint(expr= m.b693 + m.b694 <= 1)
m.c1119 = Constraint(expr= m.b692 + m.b694 <= 1)
m.c1120 = Constraint(expr= m.b693 + m.b694 <= 1)
m.c1121 = Constraint(expr= m.b695 + m.b696 <= 1)
m.c1122 = Constraint(expr= m.b695 + m.b697 <= 1)
m.c1123 = Constraint(expr= m.b695 + m.b696 <= 1)
m.c1124 = Constraint(expr= m.b696 + m.b697 <= 1)
m.c1125 = Constraint(expr= m.b695 + m.b697 <= 1)
m.c1126 = Constraint(expr= m.b696 + m.b697 <= 1)
m.c1127 = Constraint(expr= m.b698 + m.b699 <= 1)
m.c1128 = Constraint(expr= m.b698 + m.b700 <= 1)
m.c1129 = Constraint(expr= m.b698 + m.b699 <= 1)
m.c1130 = Constraint(expr= m.b699 + m.b700 <= 1)
m.c1131 = Constraint(expr= m.b698 + m.b700 <= 1)
m.c1132 = Constraint(expr= m.b699 + m.b700 <= 1)
m.c1133 = Constraint(expr= m.b701 + m.b702 <= 1)
m.c1134 = Constraint(expr= m.b701 + m.b703 <= 1)
m.c1135 = Constraint(expr= m.b701 + m.b702 <= 1)
m.c1136 = Constraint(expr= m.b702 + m.b703 <= 1)
m.c1137 = Constraint(expr= m.b701 + m.b703 <= 1)
m.c1138 = Constraint(expr= m.b702 + m.b703 <= 1)
m.c1139 = Constraint(expr= m.b704 + m.b705 <= 1)
m.c1140 = Constraint(expr= m.b704 + m.b706 <= 1)
m.c1141 = Constraint(expr= m.b704 + m.b705 <= 1)
m.c1142 = Constraint(expr= m.b705 + m.b706 <= 1)
m.c1143 = Constraint(expr= m.b704 + m.b706 <= 1)
m.c1144 = Constraint(expr= m.b705 + m.b706 <= 1)
m.c1145 = Constraint(expr= m.b707 + m.b708 <= 1)
m.c1146 = Constraint(expr= m.b707 + m.b709 <= 1)
m.c1147 = Constraint(expr= m.b707 + m.b708 <= 1)
m.c1148 = Constraint(expr= m.b708 + m.b709 <= 1)
m.c1149 = Constraint(expr= m.b707 + m.b709 <= 1)
m.c1150 = Constraint(expr= m.b708 + m.b709 <= 1)
m.c1151 = Constraint(expr= m.b710 + m.b711 <= 1)
m.c1152 = Constraint(expr= m.b710 + m.b712 <= 1)
m.c1153 = Constraint(expr= m.b710 + m.b711 <= 1)
m.c1154 = Constraint(expr= m.b711 + m.b712 <= 1)
m.c1155 = Constraint(expr= m.b710 + m.b712 <= 1)
m.c1156 = Constraint(expr= m.b711 + m.b712 <= 1)
m.c1157 = Constraint(expr= m.b713 + m.b714 <= 1)
m.c1158 = Constraint(expr= m.b713 + m.b715 <= 1)
m.c1159 = Constraint(expr= m.b713 + m.b714 <= 1)
m.c1160 = Constraint(expr= m.b714 + m.b715 <= 1)
m.c1161 = Constraint(expr= m.b713 + m.b715 <= 1)
m.c1162 = Constraint(expr= m.b714 + m.b715 <= 1)
m.c1163 = Constraint(expr= m.b716 + m.b717 <= 1)
m.c1164 = Constraint(expr= m.b716 + m.b718 <= 1)
m.c1165 = Constraint(expr= m.b716 + m.b717 <= 1)
m.c1166 = Constraint(expr= m.b717 + m.b718 <= 1)
m.c1167 = Constraint(expr= m.b716 + m.b718 <= 1)
m.c1168 = Constraint(expr= m.b717 + m.b718 <= 1)
m.c1169 = Constraint(expr= m.b719 + m.b720 <= 1)
m.c1170 = Constraint(expr= m.b719 + m.b721 <= 1)
m.c1171 = Constraint(expr= m.b719 + m.b720 <= 1)
m.c1172 = Constraint(expr= m.b720 + m.b721 <= 1)
m.c1173 = Constraint(expr= m.b719 + m.b721 <= 1)
m.c1174 = Constraint(expr= m.b720 + m.b721 <= 1)
m.c1175 = Constraint(expr= m.b722 + m.b723 <= 1)
m.c1176 = Constraint(expr= m.b722 + m.b724 <= 1)
m.c1177 = Constraint(expr= m.b722 + m.b723 <= 1)
m.c1178 = Constraint(expr= m.b723 + m.b724 <= 1)
m.c1179 = Constraint(expr= m.b722 + m.b724 <= 1)
m.c1180 = Constraint(expr= m.b723 + m.b724 <= 1)
m.c1181 = Constraint(expr= m.b725 + m.b726 <= 1)
m.c1182 = Constraint(expr= m.b725 + m.b727 <= 1)
m.c1183 = Constraint(expr= m.b725 + m.b726 <= 1)
m.c1184 = Constraint(expr= m.b726 + m.b727 <= 1)
m.c1185 = Constraint(expr= m.b725 + m.b727 <= 1)
m.c1186 = Constraint(expr= m.b726 + m.b727 <= 1)
m.c1187 = Constraint(expr= m.b728 + m.b729 <= 1)
m.c1188 = Constraint(expr= m.b728 + m.b730 <= 1)
m.c1189 = Constraint(expr= m.b728 + m.b729 <= 1)
m.c1190 = Constraint(expr= m.b729 + m.b730 <= 1)
m.c1191 = Constraint(expr= m.b728 + m.b730 <= 1)
m.c1192 = Constraint(expr= m.b729 + m.b730 <= 1)
m.c1193 = Constraint(expr= m.b731 + m.b732 <= 1)
m.c1194 = Constraint(expr= m.b731 + m.b733 <= 1)
m.c1195 = Constraint(expr= m.b731 + m.b732 <= 1)
m.c1196 = Constraint(expr= m.b732 + m.b733 <= 1)
m.c1197 = Constraint(expr= m.b731 + m.b733 <= 1)
m.c1198 = Constraint(expr= m.b732 + m.b733 <= 1)
m.c1199 = Constraint(expr= m.b734 + m.b735 <= 1)
m.c1200 = Constraint(expr= m.b734 + m.b736 <= 1)
m.c1201 = Constraint(expr= m.b734 + m.b735 <= 1)
m.c1202 = Constraint(expr= m.b735 + m.b736 <= 1)
m.c1203 = Constraint(expr= m.b734 + m.b736 <= 1)
m.c1204 = Constraint(expr= m.b735 + m.b736 <= 1)
m.c1205 = Constraint(expr= m.b737 + m.b738 <= 1)
m.c1206 = Constraint(expr= m.b737 + m.b739 <= 1)
m.c1207 = Constraint(expr= m.b737 + m.b738 <= 1)
m.c1208 = Constraint(expr= m.b738 + m.b739 <= 1)
m.c1209 = Constraint(expr= m.b737 + m.b739 <= 1)
m.c1210 = Constraint(expr= m.b738 + m.b739 <= 1)
m.c1211 = Constraint(expr= m.b740 + m.b741 <= 1)
m.c1212 = Constraint(expr= m.b740 + m.b742 <= 1)
m.c1213 = Constraint(expr= m.b740 + m.b741 <= 1)
m.c1214 = Constraint(expr= m.b741 + m.b742 <= 1)
m.c1215 = Constraint(expr= m.b740 + m.b742 <= 1)
m.c1216 = Constraint(expr= m.b741 + m.b742 <= 1)
m.c1217 = Constraint(expr= m.b743 + m.b744 <= 1)
m.c1218 = Constraint(expr= m.b743 + m.b745 <= 1)
m.c1219 = Constraint(expr= m.b743 + m.b744 <= 1)
m.c1220 = Constraint(expr= m.b744 + m.b745 <= 1)
m.c1221 = Constraint(expr= m.b743 + m.b745 <= 1)
m.c1222 = Constraint(expr= m.b744 + m.b745 <= 1)
m.c1223 = Constraint(expr= m.b746 + m.b747 <= 1)
m.c1224 = Constraint(expr= m.b746 + m.b748 <= 1)
m.c1225 = Constraint(expr= m.b746 + m.b747 <= 1)
m.c1226 = Constraint(expr= m.b747 + m.b748 <= 1)
m.c1227 = Constraint(expr= m.b746 + m.b748 <= 1)
m.c1228 = Constraint(expr= m.b747 + m.b748 <= 1)
m.c1229 = Constraint(expr= m.b749 + m.b750 <= 1)
m.c1230 = Constraint(expr= m.b749 + m.b751 <= 1)
m.c1231 = Constraint(expr= m.b749 + m.b750 <= 1)
m.c1232 = Constraint(expr= m.b750 + m.b751 <= 1)
m.c1233 = Constraint(expr= m.b749 + m.b751 <= 1)
m.c1234 = Constraint(expr= m.b750 + m.b751 <= 1)
m.c1235 = Constraint(expr= m.b752 + m.b753 <= 1)
m.c1236 = Constraint(expr= m.b752 + m.b754 <= 1)
m.c1237 = Constraint(expr= m.b752 + m.b753 <= 1)
m.c1238 = Constraint(expr= m.b753 + m.b754 <= 1)
m.c1239 = Constraint(expr= m.b752 + m.b754 <= 1)
m.c1240 = Constraint(expr= m.b753 + m.b754 <= 1)
m.c1241 = Constraint(expr= m.b755 + m.b756 <= 1)
m.c1242 = Constraint(expr= m.b755 + m.b757 <= 1)
m.c1243 = Constraint(expr= m.b755 + m.b756 <= 1)
m.c1244 = Constraint(expr= m.b756 + m.b757 <= 1)
m.c1245 = Constraint(expr= m.b755 + m.b757 <= 1)
m.c1246 = Constraint(expr= m.b756 + m.b757 <= 1)
m.c1247 = Constraint(expr= m.b758 + m.b759 <= 1)
m.c1248 = Constraint(expr= m.b758 + m.b760 <= 1)
m.c1249 = Constraint(expr= m.b758 + m.b759 <= 1)
m.c1250 = Constraint(expr= m.b759 + m.b760 <= 1)
m.c1251 = Constraint(expr= m.b758 + m.b760 <= 1)
m.c1252 = Constraint(expr= m.b759 + m.b760 <= 1)
m.c1253 = Constraint(expr= m.b761 + m.b762 <= 1)
m.c1254 = Constraint(expr= m.b761 + m.b763 <= 1)
m.c1255 = Constraint(expr= m.b761 + m.b762 <= 1)
m.c1256 = Constraint(expr= m.b762 + m.b763 <= 1)
m.c1257 = Constraint(expr= m.b761 + m.b763 <= 1)
m.c1258 = Constraint(expr= m.b762 + m.b763 <= 1)
m.c1259 = Constraint(expr= m.b764 + m.b765 <= 1)
m.c1260 = Constraint(expr= m.b764 + m.b766 <= 1)
m.c1261 = Constraint(expr= m.b764 + m.b765 <= 1)
m.c1262 = Constraint(expr= m.b765 + m.b766 <= 1)
m.c1263 = Constraint(expr= m.b764 + m.b766 <= 1)
m.c1264 = Constraint(expr= m.b765 + m.b766 <= 1)
m.c1265 = Constraint(expr= m.b767 + m.b768 <= 1)
m.c1266 = Constraint(expr= m.b767 + m.b769 <= 1)
m.c1267 = Constraint(expr= m.b767 + m.b768 <= 1)
m.c1268 = Constraint(expr= m.b768 + m.b769 <= 1)
m.c1269 = Constraint(expr= m.b767 + m.b769 <= 1)
m.c1270 = Constraint(expr= m.b768 + m.b769 <= 1)
m.c1271 = Constraint(expr= m.b770 + m.b771 <= 1)
m.c1272 = Constraint(expr= m.b770 + m.b772 <= 1)
m.c1273 = Constraint(expr= m.b770 + m.b771 <= 1)
m.c1274 = Constraint(expr= m.b771 + m.b772 <= 1)
m.c1275 = Constraint(expr= m.b770 + m.b772 <= 1)
m.c1276 = Constraint(expr= m.b771 + m.b772 <= 1)
m.c1277 = Constraint(expr= m.b773 + m.b774 <= 1)
m.c1278 = Constraint(expr= m.b773 + m.b775 <= 1)
m.c1279 = Constraint(expr= m.b773 + m.b774 <= 1)
m.c1280 = Constraint(expr= m.b774 + m.b775 <= 1)
m.c1281 = Constraint(expr= m.b773 + m.b775 <= 1)
m.c1282 = Constraint(expr= m.b774 + m.b775 <= 1)
m.c1283 = Constraint(expr= m.b596 - m.b686 <= 0)
m.c1284 = Constraint(expr= - m.b596 + m.b597 - m.b687 <= 0)
m.c1285 = Constraint(expr= - m.b596 - m.b597 + m.b598 - m.b688 <= 0)
m.c1286 = Constraint(expr= m.b599 - m.b689 <= 0)
m.c1287 = Constraint(expr= - m.b599 + m.b600 - m.b690 <= 0)
m.c1288 = Constraint(expr= - m.b599 - m.b600 + m.b601 - m.b691 <= 0)
m.c1289 = Constraint(expr= m.b602 - m.b692 <= 0)
m.c1290 = Constraint(expr= - m.b602 + m.b603 - m.b693 <= 0)
m.c1291 = Constraint(expr= - m.b602 - m.b603 + m.b604 - m.b694 <= 0)
m.c1292 = Constraint(expr= m.b605 - m.b695 <= 0)
m.c1293 = Constraint(expr= - m.b605 + m.b606 - m.b696 <= 0)
m.c1294 = Constraint(expr= - m.b605 - m.b606 + m.b607 - m.b697 <= 0)
m.c1295 = Constraint(expr= m.b608 - m.b698 <= 0)
m.c1296 = Constraint(expr= - m.b608 + m.b609 - m.b699 <= 0)
m.c1297 = Constraint(expr= - m.b608 - m.b609 + m.b610 - m.b700 <= 0)
m.c1298 = Constraint(expr= m.b611 - m.b701 <= 0)
m.c1299 = Constraint(expr= - m.b611 + m.b612 - m.b702 <= 0)
m.c1300 = Constraint(expr= - m.b611 - m.b612 + m.b613 - m.b703 <= 0)
m.c1301 = Constraint(expr= m.b614 - m.b704 <= 0)
m.c1302 = Constraint(expr= - m.b614 + m.b615 - m.b705 <= 0)
m.c1303 = Constraint(expr= - m.b614 - m.b615 + m.b616 - m.b706 <= 0)
m.c1304 = Constraint(expr= m.b617 - m.b707 <= 0)
m.c1305 = Constraint(expr= - m.b617 + m.b618 - m.b708 <= 0)
m.c1306 = Constraint(expr= - m.b617 - m.b618 + m.b619 - m.b709 <= 0)
m.c1307 = Constraint(expr= m.b620 - m.b710 <= 0)
m.c1308 = Constraint(expr= - m.b620 + m.b621 - m.b711 <= 0)
m.c1309 = Constraint(expr= - m.b620 - m.b621 + m.b622 - m.b712 <= 0)
m.c1310 = Constraint(expr= m.b623 - m.b713 <= 0)
m.c1311 = Constraint(expr= - m.b623 + m.b624 - m.b714 <= 0)
m.c1312 = Constraint(expr= - m.b623 - m.b624 + m.b625 - m.b715 <= 0)
m.c1313 = Constraint(expr= m.b626 - m.b716 <= 0)
m.c1314 = Constraint(expr= - m.b626 + m.b627 - m.b717 <= 0)
m.c1315 = Constraint(expr= - m.b626 - m.b627 + m.b628 - m.b718 <= 0)
m.c1316 = Constraint(expr= m.b629 - m.b719 <= 0)
m.c1317 = Constraint(expr= - m.b629 + m.b630 - m.b720 <= 0)
m.c1318 = Constraint(expr= - m.b629 - m.b630 + m.b631 - m.b721 <= 0)
m.c1319 = Constraint(expr= m.b632 - m.b722 <= 0)
m.c1320 = Constraint(expr= - m.b632 + m.b633 - m.b723 <= 0)
m.c1321 = Constraint(expr= - m.b632 - m.b633 + m.b634 - m.b724 <= 0)
m.c1322 = Constraint(expr= m.b635 - m.b725 <= 0)
m.c1323 = Constraint(expr= - m.b635 + m.b636 - m.b726 <= 0)
m.c1324 = Constraint(expr= - m.b635 - m.b636 + m.b637 - m.b727 <= 0)
m.c1325 = Constraint(expr= m.b638 - m.b728 <= 0)
m.c1326 = Constraint(expr= - m.b638 + m.b639 - m.b729 <= 0)
m.c1327 = Constraint(expr= - m.b638 - m.b639 + m.b640 - m.b730 <= 0)
m.c1328 = Constraint(expr= m.b641 - m.b731 <= 0)
m.c1329 = Constraint(expr= - m.b641 + m.b642 - m.b732 <= 0)
m.c1330 = Constraint(expr= - m.b641 - m.b642 + m.b643 - m.b733 <= 0)
m.c1331 = Constraint(expr= m.b644 - m.b734 <= 0)
m.c1332 = Constraint(expr= - m.b644 + m.b645 - m.b735 <= 0)
m.c1333 = Constraint(expr= - m.b644 - m.b645 + m.b646 - m.b736 <= 0)
m.c1334 = Constraint(expr= m.b647 - m.b737 <= 0)
m.c1335 = Constraint(expr= - m.b647 + m.b648 - m.b738 <= 0)
m.c1336 = Constraint(expr= - m.b647 - m.b648 + m.b649 - m.b739 <= 0)
m.c1337 = Constraint(expr= m.b650 - m.b740 <= 0)
m.c1338 = Constraint(expr= - m.b650 + m.b651 - m.b741 <= 0)
m.c1339 = Constraint(expr= - m.b650 - m.b651 + m.b652 - m.b742 <= 0)
m.c1340 = Constraint(expr= m.b653 - m.b743 <= 0)
m.c1341 = Constraint(expr= - m.b653 + m.b654 - m.b744 <= 0)
m.c1342 = Constraint(expr= - m.b653 - m.b654 + m.b655 - m.b745 <= 0)
m.c1343 = Constraint(expr= m.b656 - m.b746 <= 0)
m.c1344 = Constraint(expr= - m.b656 + m.b657 - m.b747 <= 0)
m.c1345 = Constraint(expr= - m.b656 - m.b657 + m.b658 - m.b748 <= 0)
m.c1346 = Constraint(expr= m.b659 - m.b749 <= 0)
m.c1347 = Constraint(expr= - m.b659 + m.b660 - m.b750 <= 0)
m.c1348 = Constraint(expr= - m.b659 - m.b660 + m.b661 - m.b751 <= 0)
m.c1349 = Constraint(expr= m.b662 - m.b752 <= 0)
m.c1350 = Constraint(expr= - m.b662 + m.b663 - m.b753 <= 0)
m.c1351 = Constraint(expr= - m.b662 - m.b663 + m.b664 - m.b754 <= 0)
m.c1352 = Constraint(expr= m.b665 - m.b755 <= 0)
m.c1353 = Constraint(expr= - m.b665 + m.b666 - m.b756 <= 0)
m.c1354 = Constraint(expr= - m.b665 - m.b666 + m.b667 - m.b757 <= 0)
m.c1355 = Constraint(expr= m.b668 - m.b758 <= 0)
m.c1356 = Constraint(expr= - m.b668 + m.b669 - m.b759 <= 0)
m.c1357 = Constraint(expr= - m.b668 - m.b669 + m.b670 - m.b760 <= 0)
m.c1358 = Constraint(expr= m.b671 - m.b761 <= 0)
m.c1359 = Constraint(expr= - m.b671 + m.b672 - m.b762 <= 0)
m.c1360 = Constraint(expr= - m.b671 - m.b672 + m.b673 - m.b763 <= 0)
m.c1361 = Constraint(expr= m.b674 - m.b764 <= 0)
m.c1362 = Constraint(expr= - m.b674 + m.b675 - m.b765 <= 0)
m.c1363 = Constraint(expr= - m.b674 - m.b675 + m.b676 - m.b766 <= 0)
m.c1364 = Constraint(expr= m.b677 - m.b767 <= 0)
m.c1365 = Constraint(expr= - m.b677 + m.b678 - m.b768 <= 0)
m.c1366 = Constraint(expr= - m.b677 - m.b678 + m.b679 - m.b769 <= 0)
m.c1367 = Constraint(expr= m.b680 - m.b770 <= 0)
m.c1368 = Constraint(expr= - m.b680 + m.b681 - m.b771 <= 0)
m.c1369 = Constraint(expr= - m.b680 - m.b681 + m.b682 - m.b772 <= 0)
m.c1370 = Constraint(expr= m.b683 - m.b773 <= 0)
m.c1371 = Constraint(expr= - m.b683 + m.b684 - m.b774 <= 0)
m.c1372 = Constraint(expr= - m.b683 - m.b684 + m.b685 - m.b775 <= 0)
m.c1373 = Constraint(expr= m.b596 + m.b599 == 1)
m.c1374 = Constraint(expr= m.b597 + m.b600 == 1)
m.c1375 = Constraint(expr= m.b598 + m.b601 == 1)
m.c1376 = Constraint(expr= - m.b602 + m.b611 + m.b614 >= 0)
m.c1377 = Constraint(expr= - m.b603 + m.b612 + m.b615 >= 0)
m.c1378 = Constraint(expr= - m.b604 + m.b613 + m.b616 >= 0)
m.c1379 = Constraint(expr= - m.b611 + m.b629 >= 0)
m.c1380 = Constraint(expr= - m.b612 + m.b630 >= 0)
m.c1381 = Constraint(expr= - m.b613 + m.b631 >= 0)
m.c1382 = Constraint(expr= - m.b614 + m.b632 >= 0)
m.c1383 = Constraint(expr= - m.b615 + m.b633 >= 0)
m.c1384 = Constraint(expr= - m.b616 + m.b634 >= 0)
m.c1385 = Constraint(expr= - m.b605 + m.b617 >= 0)
m.c1386 = Constraint(expr= - m.b606 + m.b618 >= 0)
m.c1387 = Constraint(expr= - m.b607 + m.b619 >= 0)
m.c1388 = Constraint(expr= - m.b617 + m.b635 + m.b638 >= 0)
m.c1389 = Constraint(expr= - m.b618 + m.b636 + m.b639 >= 0)
m.c1390 = Constraint(expr= - m.b619 + m.b637 + m.b640 >= 0)
m.c1391 = Constraint(expr= - m.b608 + m.b620 + m.b623 + m.b626 >= 0)
m.c1392 = Constraint(expr= - m.b609 + m.b621 + m.b624 + m.b627 >= 0)
m.c1393 = Constraint(expr= - m.b610 + m.b622 + m.b625 + m.b628 >= 0)
m.c1394 = Constraint(expr= - m.b620 + m.b638 >= 0)
m.c1395 = Constraint(expr= - m.b621 + m.b639 >= 0)
m.c1396 = Constraint(expr= - m.b622 + m.b640 >= 0)
m.c1397 = Constraint(expr= - m.b623 + m.b641 + m.b644 >= 0)
m.c1398 = Constraint(expr= - m.b624 + m.b642 + m.b645 >= 0)
m.c1399 = Constraint(expr= - m.b625 + m.b643 + m.b646 >= 0)
m.c1400 = Constraint(expr= - m.b626 + m.b647 + m.b650 + m.b653 >= 0)
m.c1401 = Constraint(expr= - m.b627 + m.b648 + m.b651 + m.b654 >= 0)
m.c1402 = Constraint(expr= - m.b628 + m.b649 + m.b652 + m.b655 >= 0)
m.c1403 = Constraint(expr= m.b596 + m.b599 - m.b602 >= 0)
m.c1404 = Constraint(expr= m.b597 + m.b600 - m.b603 >= 0)
m.c1405 = Constraint(expr= m.b598 + m.b601 - m.b604 >= 0)
m.c1406 = Constraint(expr= m.b596 + m.b599 - m.b605 >= 0)
m.c1407 = Constraint(expr= m.b597 + m.b600 - m.b606 >= 0)
m.c1408 = Constraint(expr= m.b598 + m.b601 - m.b607 >= 0)
m.c1409 = Constraint(expr= m.b596 + m.b599 - m.b608 >= 0)
m.c1410 = Constraint(expr= m.b597 + m.b600 - m.b609 >= 0)
m.c1411 = Constraint(expr= m.b598 + m.b601 - m.b610 >= 0)
m.c1412 = Constraint(expr= m.b602 - m.b611 >= 0)
m.c1413 = Constraint(expr= m.b603 - m.b612 >= 0)
m.c1414 = Constraint(expr= m.b604 - m.b613 >= 0)
m.c1415 = Constraint(expr= m.b602 - m.b614 >= 0)
m.c1416 = Constraint(expr= m.b603 - m.b615 >= 0)
m.c1417 = Constraint(expr= m.b604 - m.b616 >= 0)
m.c1418 = Constraint(expr= m.b605 - m.b617 >= 0)
m.c1419 = Constraint(expr= m.b606 - m.b618 >= 0)
m.c1420 = Constraint(expr= m.b607 - m.b619 >= 0)
m.c1421 = Constraint(expr= m.b608 - m.b620 >= 0)
m.c1422 = Constraint(expr= m.b609 - m.b621 >= 0)
m.c1423 = Constraint(expr= m.b610 - m.b622 >= 0)
m.c1424 = Constraint(expr= m.b608 - m.b623 >= 0)
m.c1425 = Constraint(expr= m.b609 - m.b624 >= 0)
m.c1426 = Constraint(expr= m.b610 - m.b625 >= 0)
m.c1427 = Constraint(expr= m.b608 - m.b626 >= 0)
m.c1428 = Constraint(expr= m.b609 - m.b627 >= 0)
m.c1429 = Constraint(expr= m.b610 - m.b628 >= 0)
m.c1430 = Constraint(expr= m.b611 - m.b629 >= 0)
m.c1431 = Constraint(expr= m.b612 - m.b630 >= 0)
m.c1432 = Constraint(expr= m.b613 - m.b631 >= 0)
m.c1433 = Constraint(expr= m.b614 - m.b632 >= 0)
m.c1434 = Constraint(expr= m.b615 - m.b633 >= 0)
m.c1435 = Constraint(expr= m.b616 - m.b634 >= 0)
m.c1436 = Constraint(expr= m.b617 - m.b635 >= 0)
m.c1437 = Constraint(expr= m.b618 - m.b636 >= 0)
m.c1438 = Constraint(expr= m.b619 - m.b637 >= 0)
m.c1439 = Constraint(expr= m.b617 - m.b638 >= 0)
m.c1440 = Constraint(expr= m.b618 - m.b639 >= 0)
m.c1441 = Constraint(expr= m.b619 - m.b640 >= 0)
m.c1442 = Constraint(expr= m.b623 - m.b641 >= 0)
m.c1443 = Constraint(expr= m.b624 - m.b642 >= 0)
m.c1444 = Constraint(expr= m.b625 - m.b643 >= 0)
m.c1445 = Constraint(expr= m.b623 - m.b644 >= 0)
m.c1446 = Constraint(expr= m.b624 - m.b645 >= 0)
m.c1447 = Constraint(expr= m.b625 - m.b646 >= 0)
m.c1448 = Constraint(expr= m.b626 - m.b647 >= 0)
m.c1449 = Constraint(expr= m.b627 - m.b648 >= 0)
m.c1450 = Constraint(expr= m.b628 - m.b649 >= 0)
m.c1451 = Constraint(expr= m.b626 - m.b650 >= 0)
m.c1452 = Constraint(expr= m.b627 - m.b651 >= 0)
m.c1453 = Constraint(expr= m.b628 - m.b652 >= 0)
m.c1454 = Constraint(expr= m.b626 - m.b653 >= 0)
m.c1455 = Constraint(expr= m.b627 - m.b654 >= 0)
m.c1456 = Constraint(expr= m.b628 - m.b655 >= 0)
m.c1457 = Constraint(expr= - m.b653 + m.b656 + m.b659 >= 0)
m.c1458 = Constraint(expr= - m.b654 + m.b657 + m.b660 >= 0)
m.c1459 = Constraint(expr= - m.b655 + m.b658 + m.b661 >= 0)
m.c1460 = Constraint(expr= - m.b662 + m.b671 + m.b674 >= 0)
m.c1461 = Constraint(expr= - m.b663 + m.b672 + m.b675 >= 0)
m.c1462 = Constraint(expr= - m.b664 + m.b673 + m.b676 >= 0)
m.c1463 = Constraint(expr= - m.b665 + m.b677 >= 0)
m.c1464 = Constraint(expr= - m.b666 + m.b678 >= 0)
m.c1465 = Constraint(expr= - m.b667 + m.b679 >= 0)
m.c1466 = Constraint(expr= m.b653 - m.b656 >= 0)
m.c1467 = Constraint(expr= m.b654 - m.b657 >= 0)
m.c1468 = Constraint(expr= m.b655 - m.b658 >= 0)
m.c1469 = Constraint(expr= m.b653 - m.b659 >= 0)
m.c1470 = Constraint(expr= m.b654 - m.b660 >= 0)
m.c1471 = Constraint(expr= m.b655 - m.b661 >= 0)
m.c1472 = Constraint(expr= m.b662 - m.b671 >= 0)
m.c1473 = Constraint(expr= m.b663 - m.b672 >= 0)
m.c1474 = Constraint(expr= m.b664 - m.b673 >= 0)
m.c1475 = Constraint(expr= m.b662 - m.b674 >= 0)
m.c1476 = Constraint(expr= m.b663 - m.b675 >= 0)
m.c1477 = Constraint(expr= m.b664 - m.b676 >= 0)
m.c1478 = Constraint(expr= m.b665 - m.b677 >= 0)
m.c1479 = Constraint(expr= m.b666 - m.b678 >= 0)
m.c1480 = Constraint(expr= m.b667 - m.b679 >= 0)
m.c1481 = Constraint(expr= m.b668 - m.b680 >= 0)
m.c1482 = Constraint(expr= m.b669 - m.b681 >= 0)
m.c1483 = Constraint(expr= m.b670 - m.b682 >= 0)
m.c1484 = Constraint(expr= m.b668 - m.b683 >= 0)
m.c1485 = Constraint(expr= m.b669 - m.b684 >= 0)
m.c1486 = Constraint(expr= m.b670 - m.b685 >= 0)
|
StarcoderdataPython
|
3523684
|
<filename>justree/bfs.py
from collections import deque
from typing import Iterable, List, Tuple, Deque, Optional, Union
from .tools import reversed_enumerate, T
from .tree_node import TreeNode
def non_recursive_tree_bfs_forward_original(self: T) -> Iterable[T]:
assert isinstance(self, TreeNode)
q: Deque[TreeNode] = deque([self])
while q:
t = q.popleft()
q.extend(t._children)
yield t
def non_recursive_tree_bfs_forward_mirror(self: T) -> Iterable[T]:
assert isinstance(self, TreeNode)
q: Deque[TreeNode] = deque([self])
while q:
t = q.popleft()
q.extend(reversed(t._children))
yield t
def non_recursive_tree_bfs_reverse_original(self: T) -> List[T]:
assert isinstance(self, TreeNode)
q: Deque[TreeNode] = deque([self])
r: List[TreeNode] = [self]
while q:
t = q.popleft()
q.extend(t._children)
r.extend(t._children)
r.reverse()
return r
def non_recursive_tree_bfs_reverse_mirror(self: T) -> List[T]:
assert isinstance(self, TreeNode)
q: Deque[TreeNode] = deque([self])
r: List[TreeNode] = [self]
while q:
t = q.popleft()
q.extend(reversed(t._children))
r.extend(reversed(t._children))
r.reverse()
return r
_Int = Union[int, float]
def bfs_ex_preparation(depth: Optional[_Int]) -> _Int:
return float('inf') if depth is None else depth
def non_recursive_tree_bfs_forward_original_ex(self: T, depth: Optional[_Int] = None) \
-> Iterable[Tuple[T, int, Tuple[int, ...]]]:
assert isinstance(self, TreeNode)
depth = bfs_ex_preparation(depth)
q: Deque[Tuple[TreeNode, int, Tuple[int, ...]]] = deque([(self, 1, ())])
while q:
t, d, i = q.popleft()
if d < depth:
q.extend((ct, d + 1, i + (ci,)) for ci, ct in enumerate(t._children))
yield t, d, i
def non_recursive_tree_bfs_forward_mirror_ex(self: T, depth: Optional[_Int] = None) \
-> Iterable[Tuple[T, int, Tuple[int, ...]]]:
assert isinstance(self, TreeNode)
depth = bfs_ex_preparation(depth)
q: Deque[Tuple[TreeNode, int, Tuple[int, ...]]] = deque([(self, 1, ())])
while q:
t, d, i = q.popleft()
if d < depth:
q.extend((ct, d + 1, i + (ci,)) for ci, ct in reversed_enumerate(t._children))
yield t, d, i
def non_recursive_tree_bfs_reverse_original_ex(self: T, depth: Optional[_Int] = None) \
-> List[Tuple[T, int, Tuple[int, ...]]]:
assert isinstance(self, TreeNode)
depth = bfs_ex_preparation(depth)
q: Deque[Tuple[TreeNode, int, Tuple[int, ...]]] = deque([(self, 1, ())])
r: List[Tuple[TreeNode, int, Tuple[int, ...]]] = [(self, 1, ())]
while q:
t, d, i = q.popleft()
if d < depth:
q.extend((ct, d + 1, i + (ci,)) for ci, ct in enumerate(t._children))
r.extend((ct, d + 1, i + (ci,)) for ci, ct in enumerate(t._children))
r.reverse()
return r
def non_recursive_tree_bfs_reverse_mirror_ex(self: T, depth: Optional[_Int] = None) \
-> List[Tuple[T, int, Tuple[int, ...]]]:
assert isinstance(self, TreeNode)
depth = bfs_ex_preparation(depth)
q: Deque[Tuple[TreeNode, int, Tuple[int, ...]]] = deque([(self, 1, ())])
r: List[Tuple[TreeNode, int, Tuple[int, ...]]] = [(self, 1, ())]
while q:
t, d, i = q.popleft()
if d < depth:
q.extend((ct, d + 1, i + (ci,)) for ci, ct in reversed_enumerate(t._children))
r.extend((ct, d + 1, i + (ci,)) for ci, ct in reversed_enumerate(t._children))
r.reverse()
return r
|
StarcoderdataPython
|
12839846
|
# -*- coding: utf-8 -*-
"""Test functions that get data."""
import os
import unittest
from bio2bel_uniprot import get_mappings_df
HERE = os.path.abspath(os.path.dirname(__file__))
URL = os.path.join(HERE, 'test.tsv')
class TestGet(unittest.TestCase):
"""Test getting data."""
def test_get_mappings(self):
"""Test getting the full mappings file."""
df = get_mappings_df(url=URL)
self.assertEqual(6, len(df.index))
|
StarcoderdataPython
|
3293728
|
from hattori.base import BaseAnonymizer, faker
from users.models import User
class UserAnonimizer(BaseAnonymizer):
model = User
attributes = [
('first_name', faker.first_name),
('last_name', faker.last_name),
('email', faker.email),
('username', faker.ssn),
]
def run(self, *args, **kwargs):
result = super().run(*args, **kwargs)
self.set_simple_password_for_all_remaining_users()
return result
def get_query_set(self):
return User.objects.filter(is_staff=False)
def set_simple_password_for_all_remaining_users(self):
print('Setting password «<PASSWORD>» to all staff users:', end=' ') # noqa T001
updated = list()
for user in User.objects.exclude(pk__in=self.get_query_set().values_list('pk')):
user.set_password('<PASSWORD>')
user.save()
updated.append(user.username)
print(', '.join(updated)) # noqa T001
|
StarcoderdataPython
|
11373166
|
<reponame>robashaw/basisopt
from basisopt import bse_wrapper as bsew
import basis_set_exchange as bse
from tests.data import shells as shell_data
import pytest
def test_make_bse_shell():
internal_vdz = shell_data.get_vdz_internal()
vdz_h = internal_vdz['h']
bse_s_shell = bsew.make_bse_shell(vdz_h[0])
assert bse_s_shell['angular_momentum'][0] == 0
assert len(bse_s_shell['exponents']) == shell_data._nsexp
assert len(bse_s_shell['coefficients']) == shell_data._nsfuncs
def test_make_internal_shell():
bse_vdz = bse.get_basis('cc-pvdz', ['H'])['elements']['1']
internal_s_shell = bsew.make_internal_shell(bse_vdz['electron_shells'][0])
assert internal_s_shell.l == 's'
assert len(internal_s_shell.exps) == shell_data._nsexp
assert len(internal_s_shell.coefs) == shell_data._nsfuncs
def test_fetch_basis():
internal_vdz_ref = shell_data.get_vdz_internal()
internal_vdz_fetch = bsew.fetch_basis('cc-pvdz', ['H'])
assert 'h' in internal_vdz_fetch.keys()
h_ref = internal_vdz_ref['h']
h_fetch = internal_vdz_fetch['h']
assert len(h_ref) == len(h_fetch)
for s1, s2 in zip(h_ref, h_fetch):
assert shell_data.shells_are_equal(s1, s2)
|
StarcoderdataPython
|
8100085
|
# ---------------------------------------------------------------------------------------------------------------------
# lfsr_template.py
# ---------------------------------------------------------------------------------------------------------------------
# Generate Verilog LFSR module and testbench
# ---------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------------------------------------------------------
# More Imports
# ---------------------------------------------------------------------------------------------------------------------
from argparse import ArgumentParser, Namespace
from lfsr_helper import FeedbackModeEnum, ResetTypeEnum, LFSR_Formatter, LFSR_Params
# ---------------------------------------------------------------------------------------------------------------------
class Defaults:
FeedbackMode = FeedbackModeEnum.Fibonacci
InitValue = "0x1"
ResetType = ResetTypeEnum.SyncHigh
ModuleName = 'lfsr'
# ---------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------------------------------------------------------
def parse_args() -> Namespace:
# create parser
parser = ArgumentParser(description="Generate Verilog LFSR module and testbench")
# configure feedback_mode
feedback_mode_parent = parser.add_argument_group('Feedback mode selection',
'default is %s' % Defaults.FeedbackMode.value)
feedback_mode_group = feedback_mode_parent.add_mutually_exclusive_group()
feedback_mode_group.add_argument('-f', '--fibonacci', action='store_const', const=FeedbackModeEnum.Fibonacci,
dest='feedback_mode', help="Generate Fibonacci LFSR")
feedback_mode_group.add_argument('-g', '--galois', action='store_const', const=FeedbackModeEnum.Galois,
dest='feedback_mode', help="Generate Galois LFSR")
parser.set_defaults(feedback_mode=Defaults.FeedbackMode)
# configure init_mode
init_mode_parent = parser.add_argument_group('Initial value selection',
'default is %s' % Defaults.InitValue)
init_mode_group = init_mode_parent.add_mutually_exclusive_group()
init_mode_group.add_argument("-i", "--init-value", default=Defaults.InitValue,
help="Initial value of LFSR shift register (specify as hex number,"
" can also be random, see below)")
init_mode_group.add_argument("-j", "--init-random", action='store_true', help="Generate random initial value")
# configure reset_type
parser.add_argument("-r", "--reset-type", default=Defaults.ResetType.value,
choices=[rst.value for rst in ResetTypeEnum],
help="Type of reset to use (default is '%s')" % Defaults.ResetType.value)
# configure other arguments
parser.add_argument("-p", "--poly", default=None,
help="Feedback polynomial to use (specify as hex number, default is to generate a random one)")
parser.add_argument("-m", "--module-name", default=None,
help="Name of module to generate (default is '%s<width>')" % Defaults.ModuleName)
parser.add_argument("-c", "--clock-enable", action='store_true',
help="Add clock enable port (default is no clock enable)")
parser.add_argument("-v", "--verbose", action='store_true',
help="Be verbose and print internal operation details")
# configure width
parser.add_argument('width', type=int, help="Width of LFSR in bits")
# TODO: Add argument to decide what to generate. Default is both targets, but someone might need just the module
# itself or only the testbench (why?)
# TODO: Add argument to print what was generated instead of always writing to files.
# TODO: Consider refusing to overwrite already existing output files. Maybe add an argument to force overwrite.
# parse command line
return parser.parse_args()
# ---------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------------------------------------------------------
def main() -> None:
# create lfsr generator instance
lfsr = LFSR_Formatter()
# parse arguments and turn them into parameters
args = parse_args()
params = LFSR_Params(args, Defaults.ModuleName)
# generate and save output products (module and testbench)
for output in lfsr.OutputEnum:
verilog = lfsr.generate_output(output, params)
lfsr.write_output(output, verilog, params)
# ---------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------------------------------------------------------
if __name__ == "__main__":
main()
# ---------------------------------------------------------------------------------------------------------------------
# ---------------------------------------------------------------------------------------------------------------------
# End of File
# ---------------------------------------------------------------------------------------------------------------------
|
StarcoderdataPython
|
1670956
|
<filename>py_hawkesn_sir/py_hawkesn_sir/hawkesn_seir_sympy.py
# from scipy.optimize import fmin_l_bfgs_b
import numpy as np
import matplotlib.pyplot as plt
from sympy import derive_by_array, exp, lambdify, log, Piecewise, symbols
class HawkesN:
def __init__(self, history):
"""
Parameters
----------
history : sympy.Array
Array containing the observed event times in ascending order.
"""
self.his = history
def exp_intensity_sigma_neq_gamma(self, sum_less_equal=True):
"""
Calculate the (exponential) intensity of a (SEIR-)HawkesN process
symbolically.
Parameters
----------
sum_less_equal : bool, default: True
If True, we sum over all event times <= time t. Otherwise, we sum
over all event times < time t.
Returns
-------
exp_intensity_ : sympy.core.mul.Mul
A sympy expression containing the symbols beta, sigma, gamma, n,
and t.
"""
beta, sigma, gamma, n, t = symbols("beta sigma gamma n t")
events_until_t = sum(
[Piecewise((1, h <= t), (0, True)) for h in self.his]
)
return (1 - events_until_t / n) * (beta * sigma / (gamma-sigma)) * sum(
[Piecewise(
(
exp(-sigma * (t - h)) - exp(-gamma * (t - h)),
h <= t if sum_less_equal else h < t
),
(0, True)
) for h in self.his])
def exp_intensity_sigma_eq_gamma(self, sum_less_equal=True):
"""
Calculate the (exponential) intensity of a (SEIR-)HawkesN process
symbolically.
Parameters
----------
sum_less_equal : bool, default: True
If True, we sum over all event times <= time t. Otherwise, we sum
over all event times < time t.
Returns
-------
exp_intensity_ : sympy.core.mul.Mul
A sympy expression containing the symbols beta, gamma, n,
and t. The symbol sigma is not contained as sigma=gamma holds in
the case considered by this function.
"""
beta, gamma, n, t = symbols("beta gamma n t")
events_until_t = sum(
[Piecewise((1, h <= t), (0, True)) for h in self.his]
)
return (1 - events_until_t / n) * beta * gamma * sum(
[Piecewise(
(
(t - h) * exp(-gamma * (t - h)),
h <= t if sum_less_equal else h < t
),
(0, True)
) for h in self.his])
def plot_exp_intensity(self, t_max, beta, sigma, gamma, n, step=0.01,
width=5.51, height=4, n_xticks=6, fname=None,
sum_less_equal=True):
"""
Plot (or save the plot of) the exponential intensity function from t=0
until t=t_max.
Parameters
----------
t_max : float
Define the time horizon of the plot. The time axis will contain
values from 0 to t_max.
beta : float
Parameter beta of the SEIR model.
sigma : float or None
Parameter sigma of the SEIR model. If None, then sigma=gamma is
assumed.
gamma : float
Parameter gamma of the SEIR model.
n : int
Population size.
step : float, default: 0.01
Interval on the x-axis between two successive points.
width : float, default: 5.51
Width of the plot.
height : float, default: 4.0
Height of the plot.
n_xticks : int (must be non-negative)
Number of ticks on the time axis.
fname : str or None
Name (without extension) of the file the plot is saved to. If
`None`, the plot is not saved.
sum_less_equal : bool
This arg is used in :meth:`self.exp_intensity`.
"""
if sigma is None:
sigma = gamma
subs_list = [("beta", beta), ("sigma", sigma), ("gamma", gamma),
("n", n)]
if sigma == gamma:
exp_intensity = self.exp_intensity_sigma_eq_gamma(
sum_less_equal=sum_less_equal).subs(subs_list)
else:
exp_intensity = self.exp_intensity_sigma_neq_gamma(
sum_less_equal=sum_less_equal).subs(subs_list)
exp_intensity = lambdify("t", exp_intensity)
time = np.arange(0, t_max, step)
plt.figure(dpi=300, figsize=(width, height))
plt.plot(time, exp_intensity(time))
plt.xlabel("$t$")
plt.xlim(0, t_max)
plt.xticks(np.linspace(0, t_max, n_xticks))
plt.ylabel("Intensity")
plt.grid()
title = "Intensity of a HawkesN process"
if self.his is not None and beta is not None and sigma is not None \
and gamma is not None and n is not None:
title += " with event history \{" \
+ ",".join(str(i) for i in self.his[:4]) \
+ (", ..." if len(self.his) > 4 else "") \
+ "\} \nand parameters: beta=" + str(beta) \
+ ", sigma=" + str(sigma) + ", gamma=" + str(gamma) \
+ ", $N$=" + str(n)
title += "."
plt.title(title)
if fname is not None:
plt.savefig(fname + ".pdf")
def llf_sigma_neq_gamma(self, sum_less_equal=True):
"""
Parameters
----------
sum_less_equal : bool, default: True
This arg is used in :meth:`self.exp_intensity_sigma_neq_gamma`.
Returns
-------
llf : sympy.core.add.Add
The log-likelihood function as symbolic expression (containing the
symbols `beta`, `sigma`, `gamma`, and `n`).
"""
beta, sigma, gamma, n = symbols("beta sigma gamma n")
intensity = self.exp_intensity_sigma_neq_gamma(sum_less_equal)
# for h in self.his:
# print("intensity at", h, "is:", intensity.subs("t", h))
first_event = len(self.his) - sum(1 for t in self.his if t > 0)
his_pos = self.his[first_event:]
addend_sum = sum(log(intensity.subs("t", h)) for h in his_pos)
# print("SUM PART", addend_sum.subs([("scale", .5), ("decay", .5), ("n", 100)]))
addend_int = (beta * sigma / (gamma-sigma)) * sum(
(n - (i + 1)) / n * (
(
exp(-sigma * (self.his[i] - self.his[j]))
-
exp(-sigma * (self.his[i + 1] - self.his[j]))
) / sigma
-
(
exp(-gamma * (self.his[i] - self.his[j]))
-
exp(-gamma * (self.his[i + 1] - self.his[j]))
) / gamma
)
for i in range(len(self.his)-1)
for j in range(i+1))
# print("INT PART", addend_int.subs([("scale", .5), ("decay", .5), ("n", 100)]))
return addend_sum - addend_int
def llf_sigma_eq_gamma(self, sum_less_equal=True):
"""
Parameters
----------
sum_less_equal : bool, default: True
This arg is used in :meth:`self.exp_intensity_sigma_eq_gamma`.
Returns
-------
llf : sympy.core.add.Add
The log-likelihood function as symbolic expression (containing the
symbols `beta`, `sigma`, `gamma`, and `n`).
"""
beta, gamma, n = symbols("beta gamma n")
intensity = self.exp_intensity_sigma_eq_gamma(sum_less_equal)
# for h in self.his:
# print("intensity at", h, "is:", intensity.subs("t", h))
first_event = len(self.his) - sum(1 for t in self.his if t > 0)
his_pos = self.his[first_event:]
addend_sum = sum(log(intensity.subs("t", h)) for h in his_pos)
# print("SUM PART", addend_sum.subs([("scale", .5), ("decay", .5), ("n", 100)]))
addend_int = beta * sum(
(n - (i + 1)) / n * (
(
exp(-gamma * (self.his[i] - self.his[j]))
* (gamma * (self.his[i] - self.his[j]) + 1)
-
exp(-gamma * (self.his[i + 1] - self.his[j]))
* (gamma * (self.his[i + 1] - self.his[j]) + 1)
) / gamma
)
for i in range(len(self.his)-1)
for j in range(i+1))
# print("INT PART", addend_int.subs([("scale", .5), ("decay", .5), ("n", 100)]))
return addend_sum - addend_int
def llf_gradient_sigma_neq_gamma(self, sum_less_equal=True):
"""
Calculate the gradient of the log-likelihood function symbolically.
Parameters
----------
sum_less_equal : bool, default: True
This arg is passed to :meth:`self.llf_sigma_eq_gamma`.
Returns
-------
gradient : sympy.Array
An array containing four entries. The first (second) [third]
{fourth} entry is the derivative of the log-likelihood function
w.r.t. beta (sigma) [gamma] {N} parameter.
"""
beta, sigma, gamma, n = symbols("beta sigma gamma n")
return derive_by_array(
self.llf_sigma_neq_gamma(sum_less_equal),
[beta, sigma, gamma, n]
)
def llf_gradient_sigma_eq_gamma(self, sum_less_equal=True):
"""
Calculate the gradient of the log-likelihood function symbolically.
Parameters
----------
sum_less_equal : bool, default: True
This arg is passed to :meth:`self.llf_sigma_eq_gamma`.
Returns
-------
gradient : sympy.Array
An array containing four entries. The first [second] {third} entry
is the derivative of the log-likelihood function w.r.t. beta
[gamma] {N} parameter. There is no derivative w.r.t. sigma as it is
considered equal to gamma in the case considered by this function.
"""
beta, gamma, n = symbols("beta gamma n")
return derive_by_array(
self.llf_sigma_eq_gamma(sum_less_equal),
[beta, gamma, n]
)
# def fit(self, scale_start, decay_start, n_start):
# """
# Parameters
# ----------
# scale_start : float
# Starting value for the likelihood maximization.
# decay_start : float
# Starting value for the likelihood maximization.
# n_start : float
# Starting value for the likelihood maximization.
#
# Returns
# -------
# ...
# """
# llf_sym = self.llf()
# llf_grad_sym = self.llf_gradient()
# def negative_llf(scale_decay_n):
# """
# Parameters
# ----------
# scale_decay_n : np.array (shape (3))
# Values for the scale and decay parameter and the parameter N
# a single array.
#
# Returns
# -------
# neg_llf : float
# The negative log-likelihood.
# """
# result = llf_sym.subs([("scale", scale_decay_n[0]),
# ("decay", scale_decay_n[1]),
# ("n", scale_decay_n[2])])
# print("llf", result)
# return result
#
# def negative_llf_gradient(scale_decay_n):
# result = -llf_grad_sym.subs([("scale", scale_decay_n[0]),
# ("decay", scale_decay_n[1]),
# ("n", scale_decay_n[2])])
# print("-grad:", result)
# return np.array(result, dtype=np.float64)
#
# eps = np.finfo(float).eps
#
# return fmin_l_bfgs_b(
# func=negative_llf, # minimize this
# x0=np.array([scale_start, decay_start, n_start]), # initial guess
# fprime=negative_llf_gradient,
# bounds=[(eps, None), (eps, None), (len(self.his), None)],
# iprint=101
# )
|
StarcoderdataPython
|
8183585
|
"""
MIT License
Copyright (c) 2018 <NAME> Institute of Molecular Physiology
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import pytest
import pandas as pd
import numpy as np
from .. import star
OUTPUT_TEST_FOLDER = 'OUTPUT_TESTS_DUMP'
class TestStarHeader:
def test_create_star_header_four_list(self):
"""
"""
header_names = [
'Test1',
'Test2',
'Test3',
'Test4',
]
expected_output = [
'',
'data_',
'',
'loop_',
'_rlnTest1 #1',
'_rlnTest2 #2',
'_rlnTest3 #3',
'_rlnTest4 #4',
]
assert star.create_star_header(names=header_names, prefix='rln') == expected_output
def test_create_star_header_four_array(self):
"""
"""
header_names = np.array([
'Test1',
'Test2',
'Test3',
'Test4',
], dtype=str)
expected_output = [
'',
'data_',
'',
'loop_',
'_rlnTest1 #1',
'_rlnTest2 #2',
'_rlnTest3 #3',
'_rlnTest4 #4',
]
assert star.create_star_header(names=header_names, prefix='rln') == expected_output
def test_create_star_header_single_list(self):
"""
"""
header_names = [
'Test1',
]
expected_output = [
'',
'data_',
'',
'loop_',
'_rlnTest1 #1',
]
assert star.create_star_header(names=header_names, prefix='rln') == expected_output
def test_create_star_header_single_array(self):
"""
"""
header_names = np.array([
'Test1',
], dtype=str)
expected_output = [
'',
'data_',
'',
'loop_',
'_rlnTest1 #1',
]
assert star.create_star_header(names=header_names, prefix='rln') == expected_output
class TestDumpStar:
def test_dump_star_four(self, tmpdir):
"""
"""
data_1 = np.arange(4)
data_2 = ['a', 'b', 'c', 'd']
data_3 = np.array(np.arange(4), dtype=float)
data_4 = [1]*4
data = pd.DataFrame({
'MicrographName': data_1,
'ImageName': data_2,
'CoordinateX': data_3,
'CoordinateY': data_4,
})
data_output = pd.DataFrame({
'MicrographName': data_1,
'ImageName': data_2,
'CoordinateX': data_3,
'CoordinateY': data_4,
})
output_file: str = tmpdir.mkdir(OUTPUT_TEST_FOLDER).join('test_dump_star_four.star')
star.dump_star(file_name=output_file, data=data, version='relion_2')
assert star.load_star(file_name=output_file).equals(data_output)
def test_dump_star_single(self, tmpdir):
"""
"""
data_1 = np.arange(4)
data = pd.DataFrame({
'CoordinateX': data_1,
})
data_output = pd.DataFrame({
'CoordinateX': data_1,
})
output_file: str = tmpdir.mkdir(OUTPUT_TEST_FOLDER).join('test_dump_star_single.star')
star.dump_star(file_name=output_file, data=data, version='relion_2')
assert star.load_star(file_name=output_file).equals(data_output)
def test_dump_star_single_empty(self, tmpdir):
"""
"""
data = pd.DataFrame({
})
output_file: str = tmpdir.mkdir(OUTPUT_TEST_FOLDER).join('test_dump_star_single_empty.star')
with pytest.raises(AssertionError):
star.dump_star(file_name=output_file, data=data, version='relion_2')
class TestLoadStarHeader:
def test_load_star_header_single(self, tmpdir):
data_1 = np.arange(4)
data = pd.DataFrame({
'MicrographName': data_1,
})
data_output = ['_rlnMicrographName']
output_file = tmpdir.mkdir(OUTPUT_TEST_FOLDER).join('test_load_star_header_single.star')
star.dump_star(file_name=output_file, data=data, version='relion_2')
assert star.load_star_header(file_name=output_file) == (data_output, 5)
def test_load_star_header_four(self, tmpdir):
data_1 = np.arange(4)
data_2 = ['a', 'b', 'c', 'd']
data_3 = np.array(np.arange(4), dtype=float)
data_4 = [1]*4
data = pd.DataFrame({
'MicrographName': data_1,
'ImageName': data_2,
'CoordinateX': data_3,
'CoordinateY': data_4,
})
output_header = [
'_rlnMicrographName',
'_rlnImageName',
'_rlnCoordinateX',
'_rlnCoordinateY',
]
output_file = tmpdir.mkdir(OUTPUT_TEST_FOLDER).join('test_load_star_header_four.star')
star.dump_star(file_name=output_file, data=data, version='relion_2')
assert star.load_star_header(file_name=output_file) == (output_header, 8)
def test_load_star_header_four_wrong_export(self, tmpdir):
data_1 = np.arange(4)
data_2 = ['a', 'b', 'c', 'd']
data_3 = np.array(np.arange(4), dtype=float)
data_4 = [1]*4
data = pd.DataFrame({
'MicrographName': data_1,
'ImageName': data_2,
'CoordinateX': data_3,
'SgdNextSubset': data_4,
})
output_header = [
'_rlnMicrographName',
'_rlnImageName',
'_rlnCoordinateX',
]
output_file = tmpdir.mkdir(OUTPUT_TEST_FOLDER).join('test_load_star_header_four_wrong_export.star')
star.dump_star(file_name=output_file, data=data, version='relion_3')
assert star.load_star_header(file_name=output_file) == (output_header, 7)
class TestLoadStar:
def test_load_star_single(self, tmpdir):
data_1 = np.arange(4)
data = pd.DataFrame({
'MicrographName': data_1,
})
output_file = tmpdir.mkdir(OUTPUT_TEST_FOLDER).join('test_load_star_single.star')
star.dump_star(file_name=output_file, data=data, version='relion_2')
assert star.load_star(file_name=output_file).equals(data)
def test_load_star_four(self, tmpdir):
data_1 = np.arange(4)
data_2 = ['a', 'b', 'c', 'd']
data_3 = np.array(np.arange(4), dtype=float)
data_4 = [1]*4
data = pd.DataFrame({
'MicrographName': data_1,
'ImageName': data_2,
'CoordinateX': data_3,
'CoordinateY': data_4,
})
output_file = tmpdir.mkdir(OUTPUT_TEST_FOLDER).join('test_load_star_four.star')
star.dump_star(file_name=output_file, data=data, version='relion_2')
assert star.load_star(file_name=output_file).equals(data)
def test_load_star_single_empty(self, tmpdir):
"""
"""
output_file = tmpdir.mkdir(OUTPUT_TEST_FOLDER).join('test_load_star_single_empty.star')
with open(output_file, 'w'):
pass
with pytest.raises(IOError):
star.load_star(file_name=output_file)
class TestImportStarHeader:
def test_MicrographName_outputs_MicrographName(self):
"""
"""
out_dict = star.import_star_header(['_rlnMicrographName'])
assert ['MicrographName'] == out_dict
def test_SgdSkipAnneal_outputs_SgdSkipAnneal(self):
"""
"""
out_dict = star.import_star_header(['_rlnSgdSkipAnneal'])
assert ['SgdSkipAnneal'] == out_dict
def test_SgdNextSubset_outputs_SgdNextSubset(self):
"""
"""
out_dict = star.import_star_header(['_rlnSgdNextSubset'])
assert ['SgdNextSubset'] == out_dict
def test_testii_raises_AssertionError(self):
"""
"""
with pytest.raises(AssertionError):
star.import_star_header(['testii'])
def test_empty_header_raises_AssertionError(self):
"""
"""
with pytest.raises(AssertionError):
star.import_star_header([])
class TestExportStarHeader:
def test_input_relion2_outputs_relion2_correct_out_header(self):
out_header, _, _ = star.export_star_header(
['MicrographName', 'SgdNextSubset'],
version='relion_2',
)
assert ['MicrographName', 'SgdNextSubset'] == out_header
def test_input_relion2_outputs_relion2_correct_old_header(self):
_, old_header, _ = star.export_star_header(
['MicrographName', 'SgdNextSubset'],
version='relion_2'
)
assert ['MicrographName', 'SgdNextSubset'] == old_header
def test_input_relion2_outputs_relion2_correct_prefix(self):
_, _, prefix = star.export_star_header(
['MicrographName', 'SgdNextSubset'],
version='relion_2'
)
assert 'rln' == prefix
def test_input_relion2_outputs_relion3_correct_out_header(self):
out_header, _, _ = star.export_star_header(
['MicrographName', 'SgdNextSubset'],
version='relion_3'
)
assert ['MicrographName'] == out_header
def test_input_relion2_outputs_relion3_correct_old_header(self):
_, old_header, _ = star.export_star_header(
['MicrographName', 'SgdNextSubset'],
version='relion_3'
)
assert ['MicrographName'] == old_header
def test_input_relion2_outputs_relion3_correct_prefix(self):
_, _, prefix = star.export_star_header(
['MicrographName', 'SgdNextSubset'],
version='relion_3'
)
assert 'rln' == prefix
|
StarcoderdataPython
|
1900081
|
# -*- coding: UTF-8 -*-
import sys
import os
dir_path = os.path.dirname(os.path.realpath(__file__))
parent_dir_path = os.path.abspath(os.path.join(dir_path, os.pardir))
sys.path.insert(0, parent_dir_path)
from db.SaUsersDB import dbUsersHelper
"""
Set synology nas host ip and port to database
"""
def SetNasHostIPPort(ip,port):
duh = dbUsersHelper()
duh.setNasHostIPPort(ip,port)
if __name__ == "__main__":
if len(sys.argv[1:])>=2:
SetNasHostIPPort(sys.argv[1],sys.argv[2])
|
StarcoderdataPython
|
5079311
|
<gh_stars>0
from ctypes import *
import time
msvcrt = cdll.msvcrt
counter = 0
while 1:
msvcrt.printf("Loop iteration %d!\n" % counter)
time.sleep(2)
counter += 1
|
StarcoderdataPython
|
8177402
|
<gh_stars>1-10
#!/usr/bin/env python
from http.client import HTTPConnection
import json
import re
def parse_args(parts):
(typ, req) = ({}, set())
for i in range(2, len(parts), 2):
arg = re.sub(r'\W', '', parts[i+1])
typ[arg] = parts[i]
if arg == parts[i+1]:
req.add(arg)
return (typ, req)
def validate_typ(nam, val, typ):
if str(type(val)).find(typ) < 0:
raise TypeError('%s=%s (%s), but expected %s!' % (nam, str(val), type(val), typ))
def validate_args(args, typ, req):
for k, v in args.items():
validate_typ(k, v, typ.get(k))
for k in req:
if k not in args:
raise ValueError('%s is required!' % k)
class ClientStub:
def __init__(self, sign, addr=('127.0.0.1', 1992), serv=''):
self.sign = re.sub(r'\s+', ' ', sign).strip()
parts = re.sub(r'[^\w\[\]]+', ' ', self.sign).strip().split(' ')
(self.retn_typ, self.name) = parts[0:2]
(self.args_typ, self.args_req) = parse_args(parts)
(self.addr, self.serv) = (addr, serv)
def path(self):
if self.serv is None or len(self.serv) == 0:
return '/'+self.name
return '/'+self.serv+'/'+self.name
def call_post(self, args):
(host, port) = self.addr
conn = HTTPConnection(host, port)
conn.request('POST', self.path(), body=json.dumps(args))
resp = conn.getresponse()
data = json.loads(resp.read())
if 'error' in data:
raise Exception(data['error'])
return data['return']
def call(self, args):
validate_args(args, self.args_typ, self.args_req)
retn = self.call_post(args)
validate_typ('return', retn, self.retn_typ)
return retn
# a = ClientStub('int test(int a, int b)')
# r = a.call({'a': 1, 'b': 2})
# print(r)
|
StarcoderdataPython
|
1832243
|
<reponame>jorgemauricio/proyectoCaborca
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Mon Jul 17 16:17:25 2017
@author: jorgemauricio
"""
# librerías
import os
import urllib.request
import time
from time import gmtime, strftime
import matplotlib.pyplot as plt
from mpl_toolkits.basemap import Basemap
from numpy import meshgrid
from scipy.interpolate import griddata as gd
import os
from netCDF4 import Dataset
import numpy as np
import pandas as pd
# programa principal
def main():
# descargar información
print("Iniciar descarga de archivos")
iniciarDescarga()
# procesamiento información
print("Iniciar procesamiento de archivos")
iniciarProcesamiento()
def iniciarDescarga():
# ***** constantes
URL_DESCARGA = "http://satepsanone.nesdis.noaa.gov/pub/FIRE/GBBEPx"
# elementos
arrayElementos = ['bc','co', 'co2','oc','pm25','so2']
# Mac /Users/jorgemauricio/Documents/Research/proyectoCaborca
# Linux /home/jorge/Documents/Research/proyectoCaborca
URL_CARPETA = "/Users/jorgemauricio/Documents/Research/proyectoCaborca"
# fecha actual
fechaActual = strftime("%Y-%m-%d")
# fecha -1
anio, mes, dia = generarDiaAnterior(fechaActual)
# nombre de la ruta para la descarga
rutaDeCarpetaParaDescarga = '{}/data/{}-{:02d}-{:02d}'.format(URL_CARPETA,anio,mes,dia)
# nombre de la ruta para guardar temporales
rutaDeCarpetaParaTemporales = '{}/temp/{}-{:02d}-{:02d}'.format(URL_CARPETA,anio,mes,dia)
# nombre de la ruta para guardar mapas
rutaDeCarpetaParaMapas = '{}/maps/{}-{:02d}-{:02d}'.format(URL_CARPETA,anio,mes,dia)
# nombre de la ruta para shapes
rutaParaArchivosShapes = '{}/shapes/Estados.shp'.format(URL_CARPETA)
# crear carpeta para descarga
if not os.path.exists(rutaDeCarpetaParaDescarga):
os.mkdir(rutaDeCarpetaParaDescarga)
else:
print("***** Carpeta descarga ya existe")
# crear carpeta para guardar mapas
# crear carpeta para descarga
if not os.path.exists(rutaDeCarpetaParaMapas):
os.mkdir(rutaDeCarpetaParaMapas)
else:
print("***** Carpeta mapas ya existe")
# crear carpeta para guardar archivos temporales
if not os.path.exists(rutaDeCarpetaParaTemporales):
os.mkdir(rutaDeCarpetaParaTemporales)
else:
print("***** Carpeta temporales ya existe")
# cambiar a carpeta de descarga
os.chdir(rutaDeCarpetaParaDescarga)
# ciclo de descarga
for i in arrayElementos:
# crear nombre temporal de archivo a descargar
urlDescarga = "{}/GBBEPx.emis_{}.001.{}{:02d}{:02d}.nc".format(URL_DESCARGA,i,anio,mes,dia)
nombreDelArchivo = "GBBEPx.emis_{}.001.{}{:02d}{:02d}.nc".format(i,anio,mes,dia)
print("***** Descarga de archivo: {}".format(nombreDelArchivo))
descargaArchivo(urlDescarga, nombreDelArchivo)
def descargaArchivo(ud, na):
"""
Función que permite la descarga del archivo indicado
param: ud: url de descarga
param: na: nombre del archivo
"""
urllib.request.urlretrieve(ud, na)
def generarDiaAnterior(f):
"""
Función que permite conocer el día anterior para descargar el archivo
param: f: fecha actual
"""
anio, mes, dia = f.split('-')
anio = int(anio)
mes = int(mes)
dia = int(dia)
dia -= 1
if dia == 0:
mes -= 1
if mes == 0:
anio -= 1
mes = 12
diasEnElMes = numeroDeDiasEnElMes(mes)
return (anio, mes, dia)
def numeroDeDiasEnElMes(m):
"""
Función que permite saber el número de días en un mes
param: m: mes actual
"""
if mes == 2 and anio % 4 == 0:
return 29
elif mes == 2 and anio % 4 != 0:
return 28
elif mes == 1 or mes == 3 or mes == 5 or mes == 7 or mes == 8 or mes == 10 or mes == 12:
return 31
elif mes == 4 or mes == 6 or mes == 9 or mes == 11:
return 30
def iniciarProcesamiento():
# Mac /Users/jorgemauricio/Documents/Research/proyectoCaborca
# Linux /home/jorge/Documents/Research/proyectoCaborca
URL_CARPETA = "/Users/jorgemauricio/Documents/Research/proyectoCaborca"
# ruta para acceder a los archivos shapes# nombre de la ruta para shapes
rutaParaArchivosShapes = '{}/shapes/Estados'.format(URL_CARPETA)
# coordenadas estaciones
dataEstaciones = pd.read_csv("/Users/jorgemauricio/Documents/Research/proyectoCaborca/data/coordenadas_estaciones.csv")
# fecha actual
fechaActual = strftime("%Y-%m-%d")
# fecha -1
anio, mes, dia = generarDiaAnterior(fechaActual)
# nombre de la ruta para la descarga
rutaDeCarpetaParaElProcesamiento = '{}/data/{}-{:02d}-{:02d}'.format(URL_CARPETA,anio,mes,dia)
# constantes
LONG_MIN = -115.65
LONG_MAX = -107.94
LAT_MIN = 25.41
LAT_MAX = 33.06
# archivos a procesar
listaDeArchivos = [x for x in os.listdir(rutaDeCarpetaParaElProcesamiento) if x.endswith('.nc')]
# ciclo de procesamiento
for archivo in listaDeArchivos:
# nombre del archivo
# nombreArchivo = "GBBEPx.emis_so2.001.20180118.nc"
arrayNombreArchivo = archivo.split(".")
arrayComponente = arrayNombreArchivo[1].split("_")
nombreParaMapa = arrayComponente[1]
rutaArchivo = "{}/{}".format(rutaDeCarpetaParaElProcesamiento, archivo)
# leer el archivo netcdf
dataset = Dataset(rutaArchivo)
# generar las arreglos de las variables
biomass = dataset.variables['biomass'][:]
Latitude = dataset.variables['Latitude'][:]
Longitude = dataset.variables['Longitude'][:]
# variable para generar CSV
dataText = "Long,Lat,Biomass\n"
# procesamiento de información
for i in range(Longitude.shape[0]):
for j in range(Latitude.shape[0]):
tempText = "{},{},{}\n".format(Longitude[i], Latitude[j], biomass[0,j,i])
dataText += tempText
# generar archivo temporal csv
fileName = "{}/temp/{}-{:02d}-{:02d}/{}.csv".format(URL_CARPETA, anio, mes, dia, nombreParaMapa)
textFile = open(fileName, "w")
textFile.write(dataText)
textFile.close()
# leer el archivo temporal csv
data = pd.read_csv(fileName)
# limites longitud > -115.65 y < -107.94
data = data.loc[data['Long'] > LONG_MIN]
data = data.loc[data['Long'] < LONG_MAX]
# limites latitud > 25.41 y < 33.06
data = data.loc[data['Lat'] > LAT_MIN]
data = data.loc[data['Lat'] < LAT_MAX]
# ug/m3 a ppm
data['Biomass'] = data['Biomass'] * 10000000000
# obtener valores de x, y
lons = np.array(data['Long'])
lats = np.array(data['Lat'])
#%% iniciar la gráfica
plt.clf()
# agregar locación de estaciones
xC = np.array(dataEstaciones['Long'])
yC = np.array(dataEstaciones['Lat'])
m = Basemap(projection='mill',llcrnrlat=LAT_MIN,urcrnrlat=LAT_MAX,llcrnrlon=LONG_MIN,urcrnrlon=LONG_MAX,resolution='h')
# generar lats, lons
x, y = m(lons, lats)
# numero de columnas y filas
numCols = len(x)
numRows = len(y)
# generar xi, yi
xi = np.linspace(x.min(), x.max(), numCols)
yi = np.linspace(y.min(), y.max(), numRows)
# generar el meshgrid
xi, yi = np.meshgrid(xi, yi)
# generar zi
z = np.array(data['Biomass'])
zi = gd((x,y), z, (xi,yi), method='cubic')
# generar clevs
stepVariable = 1
step = (z.max() - z.min()) / 10
# verificar el valor del intervalo
if step <= 1:
stepVariable = 1
clevs = np.linspace(z.min(), z.max() + stepVariable , 10)
#clevs = [1,2,3,4,5,6,7,8,9,10]
# contour plot
cs = m.contourf(xi,yi,zi, clevs, zorder=5, alpha=0.5, cmap='PuBu')
# agregar archivo shape de estados
m.readshapefile(rutaParaArchivosShapes, 'Estados')
# agregar puntos de estaciones
m.scatter(xC, yC, latlon=True,s=1, marker='o', color='r', zorder=25)
# colorbar
cbar = m.colorbar(cs, location='right', pad="5%")
cbar.set_label('pm')
tituloTemporalParaElMapa = "{} {}-{:02d}-{:02d}".format(nombreParaMapa,anio,mes,dia)
plt.title(tituloTemporalParaElMapa)
# Mac /Users/jorgemauricio/Documents/Research/proyectoGranizo/Maps/{}_{}.png
# Linux /home/jorge/Documents/Research/proyectoGranizo/Maps/{}_{}.png
nombreTemporalParaElMapa = "/Users/jorgemauricio/Documents/Research/proyectoCaborca/maps/{}-{:02d}-{:02d}/{}.png".format(anio, mes, dia, nombreParaMapa)
plt.annotate('@2018 INIFAP', xy=(-109,29), xycoords='figure fraction', xytext=(0.45,0.45), color='g', zorder=50)
plt.savefig(nombreTemporalParaElMapa, dpi=300)
print('****** Genereate: {}'.format(nombreTemporalParaElMapa))
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
3549821
|
<reponame>jacksonicson/paper.IS2015<filename>control/Control/src/balancer/placement_nextfit.py
from logs import sonarlog
import conf_domainsize as domainsize
import conf_nodes as nodes
import model
import placement
import conf_schedule
# Setup Sonar logging
logger = sonarlog.getLogger('placement')
class NextFit(placement.PlacementBase):
def __init__(self, model):
super(NextFit, self).__init__(model)
self.active_node = None
def _get_inactive_nodes(self):
# Get list of inactive nodes
inactive_nodes = []
print 'total nodes %i' % len(self.model.get_hosts(model.types.NODE))
for node in self.model.get_hosts(model.types.NODE):
if not node.domains:
inactive_nodes.append(node)
print 'inactive nodes %i' % len(inactive_nodes)
return inactive_nodes
def placement(self, domain):
# Get list of inactive nodes
inactive_nodes = self.__get_inactive_nodes()
# Set initial active node
if self.active_node is None:
self.active_node = inactive_nodes.pop()
# Domain specification of the new domain to place
domain_spec = domainsize.get_domain_spec(domain.size)
# Calculate total demand of all active domains running on selected node
cpu_demand = 0
mem_demand = 0
for active_domain in self.active_node.domains.values():
active_domain_configuration = active_domain.domain_configuration
active_domain_spec = domainsize.get_domain_spec(active_domain_configuration.size)
cpu_demand += active_domain_spec.total_cpu_cores()
mem_demand += active_domain_spec.total_memory()
# Calculate residual capacity
cpu_demand = nodes.NODE_CPU_CORES - domain_spec.total_cpu_cores() - cpu_demand
mem_demand = nodes.NODE_MEM - domain_spec.total_memory() - mem_demand
# Get new node if current one is overloaded
if cpu_demand < 0 or mem_demand < 0:
try:
self.active_node = inactive_nodes.pop()
except:
print 'inactive nodes length: %i' % len(inactive_nodes)
self.model.dump()
print 'PROBLEM IN SCHEDULE ID: %i' %conf_schedule.SCHEDULE_ID
raise ValueError('FATAL error: No more free nodes available %i - sc %i' % (len(inactive_nodes), conf_schedule.SCHEDULE_ID))
# Return selected node
return self.active_node.name
|
StarcoderdataPython
|
5088177
|
<filename>bokeh/protocol/messages/pull_doc_reply.py
from __future__ import absolute_import, print_function
from ..exceptions import ProtocolError
from ..message import Message
from . import register
import logging
log = logging.getLogger(__name__)
@register
class pull_doc_reply_1(Message):
''' Define the ``PULL-DOC-REPLY`` message (revision 1) for replying to
Document pull requests from clients
The ``content`` fragment of for this message is has the form:
.. code-block:: python
{
'doc' : <Document JSON>
}
'''
msgtype = 'PULL-DOC-REPLY'
revision = 1
def __init__(self, header, metadata, content):
super(pull_doc_reply_1, self).__init__(header, metadata, content)
@classmethod
def create(cls, request_id, document, **metadata):
''' Create an ``PULL-DOC-REPLY`` message
Args:
request_id (str) :
The message ID for the message that issues the pull request
document (Document) :
The Document to reply with
Any additional keyword arguments will be put into the message
``metadata`` fragment as-is.
'''
header = cls.create_header(request_id=request_id)
content = { 'doc' : document.to_json() }
msg = cls(header, metadata, content)
return msg
def push_to_document(self, doc):
if 'doc' not in self.content:
raise ProtocolError("No doc in PULL-DOC-REPLY")
doc.replace_with_json(self.content['doc'])
|
StarcoderdataPython
|
5165271
|
<filename>gibson/core/physics/robot_bases.py
## Author: pybullet, <NAME>
import pybullet as p
import gym, gym.spaces, gym.utils
import numpy as np
import os, inspect
currentdir = os.path.dirname(os.path.abspath(inspect.getfile(inspect.currentframe())))
parentdir = os.path.dirname(currentdir)
os.sys.path.insert(0,parentdir)
import pybullet_data
from gibson import assets
from transforms3d.euler import euler2quat
from transforms3d import quaternions
def quatWXYZ2quatXYZW(wxyz):
return np.concatenate((wxyz[1:], wxyz[:1]))
def quatXYZW2quatWXYZ(wxyz):
return np.concatenate((wxyz[-1:], wxyz[:-1]))
class BaseRobot:
"""
Base class for mujoco .xml/ROS urdf based agents.
Handles object loading
"""
def __init__(self, model_file, robot_name, scale = 1, env = None):
self.parts = None
self.jdict = None
self.ordered_joints = None
self.robot_body = None
self.robot_ids = None
self.model_file = model_file
self.robot_name = robot_name
self.physics_model_dir = os.path.join(os.path.dirname(os.path.abspath(assets.__file__)), "models")
self.scale = scale
self._load_model()
self.eyes = self.parts["eyes"]
self.env = env
def addToScene(self, bodies):
if self.parts is not None:
parts = self.parts
else:
parts = {}
if self.jdict is not None:
joints = self.jdict
else:
joints = {}
if self.ordered_joints is not None:
ordered_joints = self.ordered_joints
else:
ordered_joints = []
dump = 0
for i in range(len(bodies)):
if p.getNumJoints(bodies[i]) == 0:
part_name, robot_name = p.getBodyInfo(bodies[i], 0)
robot_name = robot_name.decode("utf8")
part_name = part_name.decode("utf8")
parts[part_name] = BodyPart(part_name, bodies, i, -1, self.scale, model_type=self.model_type)
for j in range(p.getNumJoints(bodies[i])):
p.setJointMotorControl2(bodies[i],j,p.POSITION_CONTROL,positionGain=0.1,velocityGain=0.1,force=0)
## TODO (hzyjerry): the following is diabled due to pybullet update
#_,joint_name,joint_type, _,_,_, _,_,_,_, _,_, part_name = p.getJointInfo(bodies[i], j)
_,joint_name,joint_type, _,_,_, _,_,_,_, _,_, part_name, _,_,_,_ = p.getJointInfo(bodies[i], j)
joint_name = joint_name.decode("utf8")
part_name = part_name.decode("utf8")
if dump: print("ROBOT PART '%s'" % part_name)
if dump: print("ROBOT JOINT '%s'" % joint_name) # limits = %+0.2f..%+0.2f effort=%0.3f speed=%0.3f" % ((joint_name,) + j.limits()) )
parts[part_name] = BodyPart(part_name, bodies, i, j, self.scale, model_type=self.model_type)
if part_name == self.robot_name:
self.robot_body = parts[part_name]
if i == 0 and j == 0 and self.robot_body is None: # if nothing else works, we take this as robot_body
parts[self.robot_name] = BodyPart(self.robot_name, bodies, 0, -1, self.scale, model_type=self.model_type)
self.robot_body = parts[self.robot_name]
if joint_name[:6] == "ignore":
Joint(joint_name, bodies, i, j, self.scale).disable_motor()
continue
if joint_name[:8] != "jointfix" and joint_type != p.JOINT_FIXED:
joints[joint_name] = Joint(joint_name, bodies, i, j, self.scale, model_type=self.model_type)
ordered_joints.append(joints[joint_name])
joints[joint_name].power_coef = 100.0
debugmode = 0
if debugmode:
for j in ordered_joints:
print(j, j.power_coef)
return parts, joints, ordered_joints, self.robot_body
def _load_model(self):
if self.model_type == "MJCF":
self.robot_ids = p.loadMJCF(os.path.join(self.physics_model_dir, self.model_file), flags=p.URDF_USE_SELF_COLLISION+p.URDF_USE_SELF_COLLISION_EXCLUDE_ALL_PARENTS)
if self.model_type == "URDF":
self.robot_ids = (p.loadURDF(os.path.join(self.physics_model_dir, self.model_file), flags=p.URDF_USE_SELF_COLLISION+p.URDF_USE_SELF_COLLISION_EXCLUDE_ALL_PARENTS, globalScaling = self.scale), )
self.parts, self.jdict, self.ordered_joints, self.robot_body = self.addToScene(self.robot_ids)
def reset(self):
if self.robot_ids is None:
self._load_model()
self.robot_body.reset_orientation(quatWXYZ2quatXYZW(euler2quat(*self.config["initial_orn"])))
self.robot_body.reset_position(self.config["initial_pos"])
self.reset_random_pos()
self.robot_specific_reset()
state = self.calc_state()
return state
def reset_random_pos(self):
'''Add randomness to resetted initial position
'''
if not self.config["random"]["random_initial_pose"]:
return
pos = self.robot_body.current_position()
orn = self.robot_body.current_orientation()
x_range = self.config["random"]["random_init_x_range"]
y_range = self.config["random"]["random_init_y_range"]
z_range = self.config["random"]["random_init_z_range"]
r_range = self.config["random"]["random_init_rot_range"]
new_pos = [ pos[0] + self.np_random.uniform(low=x_range[0], high=x_range[1]),
pos[1] + self.np_random.uniform(low=y_range[0], high=y_range[1]),
pos[2] + self.np_random.uniform(low=z_range[0], high=z_range[1])]
new_orn = quaternions.qmult(quaternions.axangle2quat([1, 0, 0], self.np_random.uniform(low=r_range[0], high=r_range[1])), orn)
self.robot_body.reset_orientation(new_orn)
self.robot_body.reset_position(new_pos)
def reset_new_pos(self, pos, orn):
self.robot_body.reset_orientation(orn)
self.robot_body.reset_position(pos)
def calc_potential(self):
return 0
class Pose_Helper: # dummy class to comply to original interface
def __init__(self, body_part):
self.body_part = body_part
def xyz(self):
return self.body_part.current_position()
def rpy(self):
return p.getEulerFromQuaternion(self.body_part.current_orientation())
def orientation(self):
return self.body_part.current_orientation()
class BodyPart:
def __init__(self, body_name, bodies, bodyIndex, bodyPartIndex, scale, model_type):
self.bodies = bodies
self.body_name = body_name
self.bodyIndex = bodyIndex
self.bodyPartIndex = bodyPartIndex
if model_type=="MJCF":
self.scale = scale
else:
self.scale = 1
self.initialPosition = self.current_position() / self.scale
self.initialOrientation = self.current_orientation()
self.bp_pose = Pose_Helper(self)
def get_name(self):
return self.body_name
def state_fields_of_pose_of(self, body_id, link_id=-1): # a method you will most probably need a lot to get pose and orientation
if link_id == -1:
(x, y, z), (a, b, c, d) = p.getBasePositionAndOrientation(body_id)
else:
(x, y, z), (a, b, c, d), _, _, _, _ = p.getLinkState(body_id, link_id)
x, y, z = x * self.scale, y * self.scale, z * self.scale
return np.array([x, y, z, a, b, c, d])
def get_pose(self):
return self.state_fields_of_pose_of(self.bodies[self.bodyIndex], self.bodyPartIndex)
def speed(self):
if self.bodyPartIndex == -1:
(vx, vy, vz), _ = p.getBaseVelocity(self.bodies[self.bodyIndex])
else:
(x,y,z), (a,b,c,d), _,_,_,_, (vx, vy, vz), (vr,vp,vyaw) = p.getLinkState(self.bodies[self.bodyIndex], self.bodyPartIndex, computeLinkVelocity=1)
return np.array([vx, vy, vz])
def angular_speed(self):
if self.bodyPartIndex == -1:
_, (vr,vp,vyaw) = p.getBaseVelocity(self.bodies[self.bodyIndex])
else:
(x,y,z), (a,b,c,d), _,_,_,_, (vx, vy, vz), (vr,vp,vyaw) = p.getLinkState(self.bodies[self.bodyIndex], self.bodyPartIndex, computeLinkVelocity=1)
return np.array([vr, vp, vyaw])
def current_position(self):
"""Get position in physics simulation (unscaled)
"""
return self.get_pose()[:3]
def current_orientation(self):
"""Get orientation in physics simulation
"""
return self.get_pose()[3:]
def reset_position(self, position):
p.resetBasePositionAndOrientation(self.bodies[self.bodyIndex], np.array(position) / self.scale, self.current_orientation())
def reset_orientation(self, orientation):
p.resetBasePositionAndOrientation(self.bodies[self.bodyIndex], self.current_position() / self.scale, orientation)
def reset_pose(self, position, orientation):
p.resetBasePositionAndOrientation(self.bodies[self.bodyIndex], np.array(position) / self.scale, orientation)
def pose(self):
return self.bp_pose
def contact_list(self):
return p.getContactPoints(self.bodies[self.bodyIndex], -1, self.bodyPartIndex, -1)
class Joint:
def __init__(self, joint_name, bodies, bodyIndex, jointIndex, scale, model_type):
self.bodies = bodies
self.bodyIndex = bodyIndex
self.jointIndex = jointIndex
self.joint_name = joint_name
_,_,_,_,_,_,_,_,self.lowerLimit, self.upperLimit,_,_,_, _,_,_,_ = p.getJointInfo(self.bodies[self.bodyIndex], self.jointIndex)
self.power_coeff = 0
if model_type=="mjcf":
self.scale = scale
else:
self.scale = 1
def set_state(self, x, vx):
p.resetJointState(self.bodies[self.bodyIndex], self.jointIndex, x, vx)
def current_position(self): # just some synonyme method
state = self.get_state
state[:3] = state[:3] * self.scale
return state
def current_relative_position(self):
pos, vel = self.get_state()
pos_mid = 0.5 * (self.lowerLimit + self.upperLimit);
return (
2 * (pos - pos_mid) / (self.upperLimit - self.lowerLimit),
0.1 * vel
)
def get_state(self):
x, vx,_,_ = p.getJointState(self.bodies[self.bodyIndex],self.jointIndex)
return x * self.scale, vx
def set_position(self, position):
p.setJointMotorControl2(self.bodies[self.bodyIndex],self.jointIndex,p.POSITION_CONTROL, targetPosition=np.array(position) / self.scale)
def set_velocity(self, velocity):
p.setJointMotorControl2(self.bodies[self.bodyIndex],self.jointIndex,p.VELOCITY_CONTROL, targetVelocity=velocity)
def set_motor_torque(self, torque): # just some synonyme method
self.set_torque(torque)
def set_torque(self, torque):
p.setJointMotorControl2(bodyIndex=self.bodies[self.bodyIndex], jointIndex=self.jointIndex, controlMode=p.TORQUE_CONTROL, force=torque) #, positionGain=0.1, velocityGain=0.1)
def set_motor_velocity(self, vel):
p.setJointMotorControl2(bodyIndex=self.bodies[self.bodyIndex], jointIndex=self.jointIndex,
controlMode=p.VELOCITY_CONTROL, targetVelocity=vel) # , positionGain=0.1, velocityGain=0.1)
def reset_current_position(self, position, velocity): # just some synonyme method
self.reset_position(position / self.scale, velocity)
def reset_position(self, position, velocity):
p.resetJointState(self.bodies[self.bodyIndex],self.jointIndex,targetValue= np.array(position) / self.scale, targetVelocity=velocity)
self.disable_motor()
def disable_motor(self):
p.setJointMotorControl2(self.bodies[self.bodyIndex],self.jointIndex,controlMode=p.POSITION_CONTROL, targetPosition=0, targetVelocity=0, positionGain=0.1, velocityGain=0.1, force=0)
|
StarcoderdataPython
|
5191103
|
#!/usr/bin/python
"""
Copyright (c) 2018 <NAME>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import os
import sys
import time
import logging
from lockfile import locked
from logging.handlers import RotatingFileHandler
from common import *
@locked(LOCKFILE)
def main():
if os.fork() != 0:
sys.exit()
time.sleep(3)
output = os.popen('mount').read()
for line in filter(lambda x: x.find('ifuse') == 0, output.split('\n')):
path = line.split()[2]
logger.info('Unmounting %s ...' % path)
umount(path)
logger.info('Deleting DSM share named %s...' % os.path.basename(path))
del_share(path)
if __name__ == '__main__':
logger.info('--- umounting.py started ---')
try:
main()
except Exception as e:
logger.error(str(e))
logger.info('--- umounting.py end ---')
|
StarcoderdataPython
|
4899242
|
from __future__ import division, print_function
import numpy as np
import sys,os
sys.path.append("..")
import pyrads
from scipy.integrate import trapz,simps,cumtrapz
### -----------------------------------
### Helpers
class Dummy:
pass
### -----------------------------------
# ---
## setup thermodynamic parameters
params = Dummy()
params.Rv = pyrads.phys.H2O.R # moist component
params.cpv = pyrads.phys.H2O.cp
params.Lvap = pyrads.phys.H2O.L_vaporization_TriplePoint
params.satvap_T0 = pyrads.phys.H2O.TriplePointT
params.satvap_e0 = pyrads.phys.H2O.TriplePointP
params.esat = lambda T: pyrads.Thermodynamics.get_satvps(T,params.satvap_T0,params.satvap_e0,params.Rv,params.Lvap)
params.R = pyrads.phys.air.R # dry component
params.cp = pyrads.phys.air.cp
params.ps_dry = 1e5 # surface pressure of dry component
params.g = 9.8 # surface gravity
params.cosThetaBar = 3./5. # average zenith angle used in 2stream eqns
params.RH = 1. # relative humidity
params.R_CO2 = pyrads.phys.CO2.R
params.ppv_CO2 = 400e-6
# ---
## setup resolution (vertical,spectral)
#N_press = 60 #
N_press = 15 #
wavenr_min = 0.1 # [cm^-1]
wavenr_max = 3500. #
#dwavenr = 0.01 #
dwavenr = 0.1 #
Tstrat = 150. # stratospheric temperature
# ---
## setup range of temperatures, and if/where output is saved to:
#Ts_grid = np.arange(170.,370.1,10.)
Ts_grid = np.arange(170.,370.1,20.)
filename = 'output.compute_olr_h2o.01.100RH.numba.txt'
saveOutput = True # Save the output/plots? [Yes/No]
if saveOutput:
OUTDIR = "./"
print( "Saving output to ",OUTDIR)
if not os.path.isdir( OUTDIR ):
os.makedirs( OUTDIR )
### -----------------------------------
## MAIN LOOP
# save resolution etc for a given loop to file:
if saveOutput:
f = open(OUTDIR+filename,'w')
f.write("wavenr_min,wavenr_max,dwave [cm^-1] = %.4f,%.4f,%.4f" % (wavenr_min,wavenr_max,dwavenr) )
f.write("\n")
f.write("N_press = %.1f" % N_press )
f.write("\n")
f.write("\n")
f.write("Ts [K],\tps [bar],\tolr [W/m2],\tsurface contribution to olr [W/m2],\tTransmission int[T dBdT(Ts) dn]/int[ dBdT(Ts) dn],\tSimple feedback model int[dB/dT*T]")
f.write("\n")
f.close()
## main loop here
for Ts in Ts_grid:
f = open(OUTDIR+filename,'a')
# setup grid:
g = pyrads.SetupGrids.make_grid( Ts,Tstrat,N_press,wavenr_min,wavenr_max,dwavenr,params, RH=params.RH )
# compute optical thickness:
# -> this is the computationally most intensive step
g.tau,tmp,tmp2 = pyrads.OpticalThickness.compute_tau_H2ON2_CO2dilute(g.p,g.T,g.q,params.ppv_CO2,g,params, RH=params.RH, use_numba=True )
# compute Planck functions etc:
# -> here: fully spectrally resolved!
T_2D = np.tile( g.T, (g.Nn,1) ).T # [press x wave]
g.B_surf = np.pi* pyrads.Planck.Planck_n( g.n,Ts ) # [wave]
g.B = np.pi* pyrads.Planck.Planck_n( g.wave, T_2D ) # [press x wave]
# compute OLR etc:
olr_spec = pyrads.Get_Fluxes.Fplus_alternative(0,g) # (spectrally resolved=irradiance)
olr = simps(olr_spec,g.n)
# compute fraction of surface flux that makes it to space
surf_spec = g.B_surf * np.exp(-g.tau[-1,:])
surf = simps(surf_spec,x=g.n)
# compute spectrally averaged transmission function...
weight = np.pi* pyrads.Planck.dPlanckdT_n( g.n,Ts )
trans = trapz( np.exp(-g.tau[-1,:]) * weight,x=g.n ) / trapz( weight,x=g.n )
# Simple feedback model (like above, without normalization)
weight = np.pi* pyrads.Planck.dPlanckdT_n( g.n,Ts )
lam = trapz( np.exp(-g.tau[-1,:]) * weight,x=g.n )
print( "\n",Ts,g.ps/1e5,olr,surf, "\n")
f.write("%.2f,\t%.4f,\t%.8f,\t%.8f,\t%.8f,\t%.8f" % (Ts,g.ps/1e5,olr,surf,trans,lam) )
f.write("\n")
f.close()
|
StarcoderdataPython
|
3430040
|
import datetime
from enum import Enum
from pydantic import BaseModel, Field
class SchoolDistricts(Enum):
davis_district = "Davis District"
alpine_district = "Alpine District"
canyons_district = "Canyons District"
granite_district = "Granite District"
jordan_district = "Jordan District"
nebo_district = "Nebo District"
cache_district = "Cache District"
weber_district = "Weber District"
tooele_district = "Tooele District"
wasatch_district = "Wasatch District"
murray_district = "Murray District"
sevier_district = "Sevier District"
salt_lake_district = "Salt Lake District"
provo_district = "Provo District"
iron_district = "Iron District"
park_city_district = "Park City District"
washington_district = "Washington District"
box_elder_district = "Box Elder District"
logan_city_district = "Logan City District"
carbon_district = "Carbon District"
south_summit_district = "South Summit District"
beaver_district = "Beaver District"
duchesne_district = "Duchesne District"
juab_district = "Juab District"
ogden_city_district = "Ogden City District"
south_sanpete_district = "South Sanpete District"
uintah_district = "Uintah District"
emery_district = "Emery District"
kane_district = "Kane District"
morgan_district = "Morgan District"
north_summit_district = "North Summit District"
daggett_district = "Daggett District"
garfield_district = "Garfield District"
grand_district = "Grand District"
millard_district = "Millard District"
north_sanpete_district = "North Sanpete District"
piute_district = "Piute District"
rich_district = "Rich District"
san_juan_district = "San Juan District"
tintic_district = "Tintic District"
wayne_district = "Wayne District"
salt_lake_county_private = "Salt Lake County - Private"
utah_county_private = "Utah County - Private"
bear_river_private = "Bear River - Private"
central_utah_private = "Central Utah - Private"
davis_county_private = "Davis County - Private"
southwest_utah_private = "Southwest Utah - Private"
weber_morgan_private = "Weber-Morgan - Private"
salt_lake_county_charter = "Salt Lake County - Charter"
utah_county_charter = "Utah County - Charter"
davis_county_charter = "Davis County - Charter"
bear_river_charter = "Bear River - Charter"
tooele_county_charter_private = "Tooele County - Charter/Private"
weber_morgan_charter = "Weber-Morgan - Charter"
southeast_utah_charter = "Southeast Utah - Charter"
southwest_utah_charter = "Southwest Utah - Charter"
summit_county_charter_private = "Summit County - Charter/Private"
tri_county_charter_private = "TriCounty - Charter/Private"
wasatch_county_charter_private = "Wasatch County - Charter/Private"
class Jurisdiction(Enum):
weber_morgan = "Weber-Morgan"
wasatch_county = "Wasatch County"
utah_county = "Utah County"
tri_county = "TriCounty"
tooele_county = "Tooele County"
summit_county = "Summit County"
southwest_utah = "Southwest Utah"
southeast_utah = "Southeast Utah"
san_juan = "San Juan"
salt_lake_county = "Salt Lake County"
davis_county = "Davis County"
central_utah = "Central Utah"
bear_river = "Bear River"
class SchoolCasesByDistrict(BaseModel):
school_district: SchoolDistricts = Field(..., alias="School District")
jurisdiction: Jurisdiction = Field(..., alias="Jurisdiction")
active_cases: str = Field(..., alias="Active Cases")
total_cases: int = Field(..., alias="Total Cases")
class DBSchoolCasesByDistrict(SchoolCasesByDistrict):
date: datetime.datetime
|
StarcoderdataPython
|
9763199
|
import torch
import numpy as np
import cv2
import os
class PawpularDataset(torch.utils.data.Dataset):
def __init__(self, csv, data_path, mode='train', augmentations=None, meta_features=None):
self.csv = csv
self.data_path = data_path
self.mode = mode
self.augmentations = augmentations
self.meta_features = meta_features
def __len__(self):
return len(self.csv)
def __getitem__(self, index):
row = self.csv.iloc[index]
image_path = os.path.join(self.data_path, self.mode, f'{row["Id"]}.jpg')
image = cv2.imread(image_path)
image = cv2.cvtColor(image, cv2.COLOR_BGR2RGB)
if self.augmentations:
augmented = self.augmentations(image=image)
image = augmented["image"]
image = np.transpose(image, (2, 0, 1)).astype(np.float32)
if self.meta_features:
data = (torch.tensor(image, dtype=torch.float),
torch.tensor(row[self.meta_features], dtype=torch.float))
else:
data = torch.tensor(image, dtype=torch.float)
# if self.mode == 'test':
# return data
return data, torch.tensor([row['Pawpularity'] / 100.], dtype=torch.float)
|
StarcoderdataPython
|
6677050
|
<filename>utils/builder/register_builder/riscv/BootPriority.py
#
# Copyright (C) [2020] Futurewei Technologies, Inc.
#
# FORCE-RISCV is licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR
# FIT FOR A PARTICULAR PURPOSE.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#**************************************************************************************************
# BootPriority.py
#
# This file defines the BootPriority helper class.
#**************************************************************************************************
#**************************************************************************************************
# The boot priority class defines helper methods associated with boot priority.
#**************************************************************************************************
class BootPriority:
## Returns the appropriate boot priority based on the name and type of register provided along
# with if the register is write only
def getBootPriority(aName = None, aType = None, aWriteOnly = 0):
#if aType is this_particular_type:
#return a_particular_boot_priority
#if aName is this_particular_name:
#return a_particular_boot_priority
return 1
|
StarcoderdataPython
|
3450821
|
<filename>core/client.py
"""
Hbtn Module
"""
from typing import List, Dict, Union, Any
import requests
from bs4 import BeautifulSoup
JsonType = Dict[str, Union[List[Dict[str, Union[list, Any]]], Any]]
class Hbtn:
"""
Class that authenticates to the intranet website,
and fetches json data of a project referenced by URL.
"""
__loginURL = "https://intranet.hbtn.io/auth/sign_in"
__headers = {'user-agent': "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (HTML, like Gecko) "
"Chrome/80.0.3987.87 Safari/537.36"}
def __init__(self, username: str, password: str):
"""
:param username: intranet username.
:param password: <PASSWORD>.
"""
# Initiate session
with requests.Session() as session:
session.headers.update(self.__headers)
self.__session = session
assert self.authenticate(
username,
password), "Double check your credentials [Authentication Failed]"
@staticmethod
def get_token(html_content: str) -> str:
"""
Returns extracted auth token from html
"""
soup = BeautifulSoup(html_content, features='lxml')
return soup.find('input', attrs={'name': 'authenticity_token'})['value']
@staticmethod
def preprocess_data(json_data: JsonType) -> JsonType:
"""
Cleans the retrieved data.
"""
return {
"name": json_data["name"],
'github_repo': json_data['tasks'][0]['github_repo'],
'github_dir': json_data['tasks'][0]['github_dir'],
"tasks": [
{
'title': task['title'],
'github_file': [
file.strip() for file in task['github_file'].split(',')
if file.split('.')[-1] not in ['png', 'jpeg', 'icon', 'jpg']
]
}
for task in json_data['tasks']]
}
def get_login_page(self) -> str:
"""Get login page.
:return: response.text
"""
with self.__session.get(self.__loginURL) as res:
return res.text
def authenticate(self, username: str, password: str) -> bool:
"""
Handles authentication with website using username && password.
"""
login_page = self.get_login_page()
payload = {
'authenticity_token': self.get_token(login_page),
'user[login]': username,
'user[password]': password,
'user[remember_me]': '0',
'commit': 'Log ' + 'in'}
# login to website
with self.__session.post(self.__loginURL, data=payload) as res:
return 'Invalid Email or password.' not in res.text
def fetch_project_details(self, url: str) -> Union[JsonType, Dict]:
"""Fetch project details referenced by project URL.
:param url: project URL.
:return: json data or empty dict.
"""
with self.__session.get(url + ".json") as res:
if res.status_code in range(200, 299):
data = self.preprocess_data(res.json())
data['tasks'].append(
{ # Add README.md file :)
'title': "README.md file",
'github_file': ["README.md"]
}
)
return data
return {}
|
StarcoderdataPython
|
3406737
|
import fridge.Constituent.Smear as Smear
import fridge.utilities.mcnpCreatorFunctions as mcnpCF
class FuelCoolant(Smear.Smear):
"""Creates the coolant surrounding the fuel pin.
This coolant is a homogenized material consisting of the coolant material and the wirewrap."""
def __init__(self, unitInfo, voidMaterial='', voidPercent=1.0):
super().__init__(unitInfo, voidMaterial=voidMaterial, voidPercent=voidPercent)
self.cladSurfaceNum = 0
def makeComponent(self, coolantInfo):
self.flat2flat = coolantInfo[0]
self.height = coolantInfo[1]
self.cladSurfaceNum = coolantInfo[2]
surfaceComment = "$Pin: Coolant - 1% higher than fuel"
cellComment = "$Pin: Wirewrap + Coolant"
self.surfaceCard = mcnpCF.getRHP(self.flat2flat, self.height, self.position, self.surfaceNum,
surfaceComment)
self.cellCard = mcnpCF.getOutsideCell(self.cellNum, self.materialNum, self.material.atomDensity,
self.cladSurfaceNum, self.universe, cellComment)
|
StarcoderdataPython
|
322039
|
import os
import base64
import requests
from io import BytesIO
from PIL import Image, ImageDraw, ImageOps, ImageColor
from abc import abstractmethod
from flask import request, abort
from flask_restful import Resource
class ImageFunctions(object):
@staticmethod
def get_color(color):
try:
return ImageColor.getrgb(color)
except (AttributeError, ValueError):
return
def add_avatar_border(self, avatar, width=10, outline=None):
outline = self.get_color(outline or "#ffffff")
drawer = ImageDraw.Draw(avatar)
drawer.ellipse((0, 0) + avatar.size, width=width, outline=outline)
return avatar
@staticmethod
def get_round_avatar(avatar):
avatar_mask = Image.new("L", avatar.size)
avatar_drawer = ImageDraw.Draw(avatar_mask)
avatar_drawer.ellipse((0, 0) + avatar.size, fill=225)
avatar = ImageOps.fit(avatar, avatar_mask.size)
avatar.putalpha(avatar_mask)
return avatar
def draw_circular_progress(self, base, value, max_value, box, width=30, fill=None):
fill = self.get_color(fill)
angle = ((value / max_value) * 360) - 90
drawer = ImageDraw.Draw(base)
drawer.arc(box, -90, angle, width=width, fill=fill)
class ApiResourceBase(ImageFunctions, Resource):
ROUTE = str()
REQUIRED_DATA = list()
TEMPLATES_PATH = "app/api_resources/templates/"
def __new__(cls, *args, **kwargs):
if not cls.ROUTE:
raise NotImplementedError
return super().__new__(cls, *args, **kwargs)
IMAGE_CACHE_PATH = "cache/images/"
FONT_PATH = "app/api_resources/templates/fonts/"
MEDIA_MAX_SIZE = 4200000
@staticmethod
def encode_url(url):
return base64.urlsafe_b64encode(url.encode("ascii")).decode("ascii")
@staticmethod
def get_image_from_url(url: str, max_size=MEDIA_MAX_SIZE):
response = requests.get(url, stream=True)
for chunk in response.iter_content(chunk_size=max_size):
if len(chunk) >= max_size:
raise OverflowError
image_bytes = BytesIO(chunk)
image_bytes.seek(0)
return image_bytes
def get_cached_image_from_url(self, url: str, max_size=MEDIA_MAX_SIZE):
file_path = self.IMAGE_CACHE_PATH + self.encode_url(url)
try:
with open(file_path, "rb") as file:
return BytesIO(file.read())
except FileNotFoundError:
image_bytes = self.get_image_from_url(url, max_size)
try:
with open(file_path, "wb") as file:
file.write(image_bytes.read())
except FileNotFoundError:
os.makedirs(self.IMAGE_CACHE_PATH)
return image_bytes
@abstractmethod
def _process(self, **kwargs):
raise NotImplementedError
@staticmethod
def to_bytes(image: Image.Image, image_format: str = "png"):
image_bytes = BytesIO()
if isinstance(image, list):
image_format = "gif"
image[0].save(
image_bytes, save_all=True, append_images=image[1:], format=image_format.upper(), optimize=True)
else:
image.save(image_bytes, format=image_format.upper())
image_bytes.seek(0)
return image_bytes, image_format
def get_json(self):
payload = request.get_json()
if not all(key in payload for key in self.REQUIRED_DATA):
abort(400)
return payload
|
StarcoderdataPython
|
306157
|
<reponame>brystmar/greeting-cards<filename>backend/config.py
"""Defines the object to configure parameters for our Flask app."""
from logging import getLogger
from os import environ, path
logger = getLogger()
class Config(object):
logger.debug("Start of the Config() class.")
# If the app is running locally, our environment variables can be applied directly
# from the local .env file
if "pycharm" in path.abspath(path.dirname(__file__)).lower():
logger.debug("Applying variables from local .env file")
from env_tools import apply_env
apply_env()
logger.info("Local .env variables applied")
# App-related variables
APP_NAME = "greeting-cards"
BOUND_PORT = 5000
SECRET_KEY = environ.get("SECRET_KEY") or "<KEY>"
WHITELISTED_ORIGIN = environ.get('WHITELISTED_ORIGIN')
WHITELISTED_ORIGINS = environ.get('WHITELISTED_ORIGINS')
# TODO: Determine which variable is actually needed
# Database
SQLALCHEMY_DATABASE_URI = environ.get("SQLALCHEMY_DATABASE_URI")
logger.debug(f"SQLAlchemy db URI: {SQLALCHEMY_DATABASE_URI}")
# Should SQLAlchemy send a notification to the app every time an object changes?
SQLALCHEMY_TRACK_MODIFICATIONS = False
# Log a warning if the fallback secret key was used
if SECRET_KEY != environ.get("SECRET_KEY"):
logger.warning("Error loading SECRET_KEY! Temporarily using a hard-coded key.")
logger.debug("End of the Config() class.")
|
StarcoderdataPython
|
393207
|
<reponame>vadim-ivlev/STUDY
def twoStacks(x, a, b):
score = 0
total = 0
while total < x:
if a != [] and b != []:
if a[0] < b[0]:
if total+a[0] < x:
total += a.pop(0)
score += 1
else:
break
else:
if total+b[0] < x:
total += b.pop(0)
score += 1
else:
break
elif a != []:
if total+a[0] < x:
total += a.pop(0)
score += 1
else:
break
elif b != []:
if total+b[0] < x:
total += b.pop(0)
score += 1
else:
break
return score
print(twoStacks(10, [4, 2, 4, 6, 1], [2, 1, 8, 5]))
|
StarcoderdataPython
|
1605795
|
#----------------------------
# Author: <NAME>
#----------------------------
from collections import namedtuple
import numpy as np
import math
FILE_TYPE = "P2" # to verify the file type
PGMFile = namedtuple('PGMFile', ['max_shade', 'data']) # named tuple
# This function receives the name of a file, reads it in, verifies that
# the type is P2, and returns the corresponding PGMFile
def read_pgm(filename):
rows = 0
cols = 0
max_shade = 0
pixel_array = []
line_no = 1
try:
f = open(filename)
except:
print(f"\nError: The file named \'{filename}\' does not exist!")
else:
try:
for line in f: # reading one line at a time from the file
if line != "\n": # checking for blank lines
end_index = line.find("\n")
line = line[0:end_index]
if line.find("#") != -1: # checking for annoying cooments
end_index = line.find("#")
line = line[0:end_index]
line = line.strip()
if len(line) != 0:
if line_no == 1: # checking for file type in line 1
if line == FILE_TYPE:
line_no += 1
else:
print("Error: The input file is not a P2 image!")
elif line_no == 2: # getting the width and height of the image from line 2
dimensions = line.split()
rows = int(dimensions[1]) # rows = height
cols = int(dimensions[0]) # columns = width
line_no += 1
elif line_no == 3: # getting the maximum shade value from line 3
max_shade = int(line)
line_no += 1
else:
line_array = line.split() # storing all the numbers into a list after removing all the white spaces
for i in range(len(line_array)):
pixel_array.append(int(line_array[i]))
except:
print("\nError: The input file could not be read properly!")
else:
data = np.array(pixel_array).reshape(rows, cols) # creating a 2D numpy array
return PGMFile(max_shade, data) # returning the corresponding PGMFile
# This function receives a file name and a PGMFile, and creates the corresponding image file
def create_image(filename, pgm):
with open(filename, "w") as f:
print(f"{FILE_TYPE}\n{pgm.data.shape[1]} {pgm.data.shape[0]}\n{pgm.max_shade}\n", file=f)
for row in pgm.data:
for i in range(0, len(row)):
print(str(row[i]), end=" ", file=f)
print("", file=f)
# This function reflects a pgm image from left to right
def reflect_left_to_right(pgm_file):
matrix = np.flip(pgm_file.data, axis=1)
return PGMFile(pgm_file.max_shade, matrix)
# This function reflects a pgm image from top to bottom
def reflect_top_to_bottom(pgm_file):
matrix = np.flip(pgm_file.data, axis=0)
return PGMFile(pgm_file.max_shade, matrix)
# This function inverts the black and white pixels in a pgm image
def invert_black_white(pgm_file):
matrix = np.subtract(pgm_file.max_shade, pgm_file.data)
return PGMFile(pgm_file.max_shade, matrix)
# This function brightens a pgm image by 10%
def brighten(pgm_file, increase_by):
brightness = int((increase_by/100) * (np.sum(pgm_file.data, dtype=np.uint64) / pgm_file.data.size))
matrix = np.add(brightness, pgm_file.data) # adding the brightness value to each pixel of the image
matrix = np.clip(matrix, 0, pgm_file.max_shade) # some pixels will be > 255, so bringing those values down to 255
return PGMFile(pgm_file.max_shade, matrix)
# A function that receives a standard deviation σ and number of neighbors r, and returns the corresponding
# 1D dimensional Gaussian kernel of length 2r+ 1, normalized so that its entries sum to 1
def one_d_gaussian_kernel(sigma, r):
size = (2*r)+1
gaussian_kernel = []
for i in range(size):
x = i-r
p_x = 1/(sigma*math.sqrt(2*math.pi)) * (math.pow(math.e, (-1/2)*(math.pow(x, 2)/math.pow(sigma, 2))))
gaussian_kernel.append(p_x)
gaussian_kernel = np.array(gaussian_kernel)
gaussian_kernel = np.divide(gaussian_kernel, np.sum(gaussian_kernel))
return gaussian_kernel
# A helper function to truncate and normalize the 1D Gaussian kernel
def truncate_normalize_1d_gaussian(kernel, left, right):
highest_col_index = kernel.size-1
new_kernel = np.copy(kernel)
if left != 0:
col_nums = [y for y in range(left)] # storing the column numbers to be deleted from the left, in a list
new_kernel = np.delete(new_kernel, col_nums)
highest_col_index = new_kernel.size - 1
if right != 0:
col_nums = [highest_col_index-y for y in range(right)] # storing the column numbers to be deleted from the right, in a list
new_kernel = np.delete(new_kernel, col_nums)
new_kernel = np.divide(new_kernel, np.sum(new_kernel)) # normalizing the kernel
return new_kernel
def convolve_1dkernel_hrzntl(kernel, image_matrix):
r = kernel.size // 2
num_rows, num_cols = image_matrix.shape
# traversing through the image matrix
for row in range(num_rows):
for col in range(num_cols):
left = col # num of cols to the left of current pixel
right = (num_cols-1) - col # num of cols to the right of current pixel
trunc_left = 0
trunc_right = 0
if left < r:
trunc_left = r - left # num of cols to truncate from left of 1D Gaussian
if right < r:
trunc_right = r - right # num of cols to truncate from left of 1D Gaussian
new_kernel = truncate_normalize_1d_gaussian(kernel, trunc_left, trunc_right)
curr_pixel_value = 0
if left > r:
for x in range(new_kernel.size):
curr_pixel_value += new_kernel[x] * image_matrix[row][x+(left-r)]
else:
for x in range(new_kernel.size):
curr_pixel_value += new_kernel[x] * image_matrix[row][x]
image_matrix[row][col] = curr_pixel_value # updating the current pixel value
return image_matrix
# A function that convolves a 2D image with a 1D kernel twice in succession: first horizontally, then vertically
def convolve_1dkernel(kernel, pgm_file):
img_matrix = np.copy(pgm_file.data)
img_matrix = convolve_1dkernel_hrzntl(kernel, img_matrix) # convolving horizontally
img_matrix = np.transpose(img_matrix) # changing the orientation of the image
img_matrix = convolve_1dkernel_hrzntl(kernel, img_matrix) # convolving vertically
img_matrix = np.transpose(img_matrix) # changing the orientation of the image
max_pixel = np.amax(img_matrix)
return PGMFile(max_pixel, img_matrix)
# A helper function to build the gradient vector matrix
def get_gradient_vector(smooth_image):
img_matrix = smooth_image.data
num_rows, num_cols = img_matrix.shape
gradient_vector = []
cd_j = 0 # in j direction - horizontal
cd_k = 0 # in k direction - vertical
for row in range(num_rows):
top = row
bottom = (num_rows-1)-row
cols = []
for col in range(num_cols):
left = col
right = (num_cols-1)-col
if left >= 1 and right >= 1:
cd_j = (img_matrix[row][col+1] - img_matrix[row][col-1])/2
elif left < 1 and right >= 1:
cd_j = img_matrix[row][col+1] - img_matrix[row][col]
elif left >= 1 and right < 1:
cd_j = img_matrix[row][col] - img_matrix[row][col-1]
if top >= 1 and bottom >= 1:
cd_k = (img_matrix[row+1][col] - img_matrix[row-1][col])/2
elif top < 1 and bottom >= 1:
cd_k = img_matrix[row+1][col] - img_matrix[row][col]
elif top >= 1 and bottom < 1:
cd_k = img_matrix[row][col] - img_matrix[row-1][col]
cols.append((cd_j, cd_k))
gradient_vector.append(cols)
return gradient_vector # returns gradient vector matrix as a matrix of tuples
# a helper function to get the theta of the gradient vector
def get_angle(gradient_vector):
result = 0
angle = np.arctan2(gradient_vector[1], gradient_vector[0]) * 180 / np.pi
if angle > 337.5 or angle <= 22.5 or (angle > 157.5 and angle <= 202.5):
result = 0
elif (angle > 22.5 and angle <= 67.5) or (angle > 202.5 and angle <= 247.5):
result = 45
elif (angle > 67.5 and angle <= 112.5) or (angle > 247.5 and angle <= 292.5):
result = 90
elif (angle > 112.5 and angle <= 157.5) or (angle > 292.5 and angle <= 337.5):
result = 135
return result
# A function to detect edges in the image
def edge_detection(pgm_file, gradient_vector_matrix):
img_matrix = pgm_file.data
num_rows, num_cols = img_matrix.shape
temp_matrix = np.copy(pgm_file.data)
for row in range(num_rows):
for col in range(num_cols):
gradient_vector = gradient_vector_matrix[row][col]
mag_grad_vector = math.sqrt(math.pow(gradient_vector[0], 2) + math.pow(gradient_vector[1], 2))
temp_matrix[row][col] = mag_grad_vector
max_pixel = np.amax(temp_matrix)
return PGMFile(max_pixel, temp_matrix)
# A function to thin the edges in the image
def edge_thinning(pgm_file, gradient_vector_matrix):
img_matrix = pgm_file.data
num_rows, num_cols = img_matrix.shape
temp_matrix = np.copy(pgm_file.data)
for row in range(num_rows):
top = row
bottom = (num_rows-1)-row
for col in range(num_cols):
left = col
right = (num_cols-1)-col
gradient_vector = gradient_vector_matrix[row][col]
angle = get_angle(gradient_vector)
curr_pixel = img_matrix[row][col]
if angle == 0:
if left >= 1 and right >= 1:
if curr_pixel < img_matrix[row][col-1] or curr_pixel < img_matrix[row][col+1]:
temp_matrix[row][col] = 0
elif left < 1 and right >= 1:
if curr_pixel < img_matrix[row][col+1]:
temp_matrix[row][col] = 0
elif left >= 1 and right < 1:
if curr_pixel < img_matrix[row][col-1]:
temp_matrix[row][col] = 0
elif angle == 45:
if left >= 1 and right >= 1 and top >= 1 and bottom >= 1:
if curr_pixel < img_matrix[row-1][col+1] or curr_pixel < img_matrix[row+1][col-1]:
temp_matrix[row][col] = 0
elif left >= 1 and bottom >= 1:
if curr_pixel < img_matrix[row+1][col-1]:
temp_matrix[row][col] = 0
elif right >= 1 and top >= 1:
if curr_pixel < img_matrix[row-1][col+1]:
temp_matrix[row][col] = 0
elif angle == 90:
if top >= 1 and bottom >= 1:
if curr_pixel < img_matrix[row-1][col] or curr_pixel < img_matrix[row+1][col]:
temp_matrix[row][col] = 0
elif top < 1 and bottom >= 1:
if curr_pixel < img_matrix[row+1][col]:
temp_matrix[row][col] = 0
elif top >= 1 and bottom < 1:
if curr_pixel < img_matrix[row-1][col]:
temp_matrix[row][col] = 0
elif angle == 135:
if left >= 1 and right >= 1 and top >= 1 and bottom >= 1:
if curr_pixel < img_matrix[row-1][col-1] or curr_pixel < img_matrix[row+1][col+1]:
temp_matrix[row][col] = 0
elif left >= 1 and top >= 1:
if curr_pixel < img_matrix[row-1][col-1]:
temp_matrix[row][col] = 0
elif right >= 1 and bottom >= 1:
if curr_pixel < img_matrix[row+1][col+1]:
temp_matrix[row][col] = 0
max_pixel = np.amax(temp_matrix)
return PGMFile(max_pixel, temp_matrix)
# A function tosuppress the noise in the image
def noise_suppress(pgm_file, low_thresh, high_thresh):
img_matrix = pgm_file.data
num_rows, num_cols = img_matrix.shape
temp_matrix = np.copy(pgm_file.data)
max_pixel = np.amax(img_matrix)
low_thresh = low_thresh * max_pixel
high_thresh = high_thresh * max_pixel
for row in range(num_rows):
top = row
bottom = (num_rows-1)-row
for col in range(num_cols):
left = col
right = (num_cols-1)-col
curr_pixel = img_matrix[row][col]
if left >= 1 and right >= 1 and top >= 1 and bottom >= 1:
if curr_pixel < low_thresh:
temp_matrix[row][col] = 0
elif low_thresh <= curr_pixel < high_thresh:
if img_matrix[row][col-1] <= high_thresh:
if img_matrix[row-1][col-1] <= high_thresh:
if img_matrix[row-1][col] <= high_thresh:
if img_matrix[row-1][col+1] <= high_thresh:
if img_matrix[row][col+1] <= high_thresh:
if img_matrix[row+1][col+1] <= high_thresh:
if img_matrix[row+1][col] <= high_thresh:
if img_matrix[row+1][col-1] <= high_thresh:
temp_matrix[row][col] = 0
max_pixel = np.amax(temp_matrix)
return PGMFile(max_pixel, temp_matrix)
|
StarcoderdataPython
|
203483
|
import os
import importlib
from pathlib import Path
import getpass
import inspect
import pickle
from unittest.mock import Mock
import pytest
import yaml
import numpy as np
from ploomber.env.env import Env
from ploomber.env.decorators import with_env, load_env
from ploomber.env import validate
from ploomber.env.envdict import EnvDict
from ploomber.env import expand
from ploomber.env.expand import (EnvironmentExpander, expand_raw_dictionary,
cast_if_possible, iterate_nested_dict,
expand_raw_dictionaries_and_extract_tags)
from ploomber.util import default
from ploomber import repo
from ploomber.exceptions import BaseException
def test_env_repr_and_str(cleanup_env, monkeypatch):
mock = Mock()
mock.datetime.now().isoformat.return_value = 'current-timestamp'
monkeypatch.setattr(expand, "datetime", mock)
env = Env({'user': 'user', 'cwd': 'cwd', 'root': 'root'})
d = {
'user': 'user',
'cwd': 'cwd',
'now': 'current-timestamp',
'root': 'root'
}
assert repr(env) == f"Env({d})"
assert str(env) == str(d)
def test_env_repr_and_str_when_loaded_from_file(tmp_directory, cleanup_env,
monkeypatch):
mock = Mock()
mock.datetime.now().isoformat.return_value = 'current-timestamp'
monkeypatch.setattr(expand, "datetime", mock)
path_env = Path('env.yaml')
d = {
'user': 'user',
'cwd': 'cwd',
'now': 'current-timestamp',
'root': 'root',
}
path_env.write_text(yaml.dump(d))
env = Env()
path = str(path_env.resolve())
expected = f"Env({d!r}) (from file: {path})"
assert repr(env) == expected
assert str(env) == str(d)
def test_includes_path_in_repr_if_init_from_file(cleanup_env, tmp_directory):
Path('env.yaml').write_text('a: 1')
env = Env('env.yaml')
assert 'env.yaml' in repr(env)
def test_init_with_arbitrary_name(cleanup_env, tmp_directory):
Path('some_environment.yaml').write_text('a: 1')
assert Env('some_environment.yaml')
def test_init_with_null_value(cleanup_env, tmp_directory):
Path('env.yaml').write_text('a: null')
assert Env('env.yaml')
def test_init_with_absolute_path(cleanup_env, tmp_directory):
Path('env.yaml').write_text('a: 1')
assert Env(Path(tmp_directory, 'env.yaml'))
def test_includes_function_module_and_name_if_decorated(cleanup_env):
@with_env({'a': 1})
def my_fn(env):
return env
# NOTE: pytest sets the module name to the current filename
assert 'test_env.my_fn' in repr(my_fn())
def test_cannot_start_env_if_one_exists_already(cleanup_env):
Env({'a': 1})
with pytest.raises(RuntimeError):
Env({'a': 2})
def test_can_initialize_env_after_failed_attempt(cleanup_env):
try:
# underscores are not allowed, this will fail, but before raising
# the exception, the instance (created in __new__) must be discarded
Env({'_a': 1})
except ValueError:
pass
# if we can initialize another object, it means the previous call was
# corerctly discarded
assert Env({'a': 1}).a == 1
def test_context_manager(cleanup_env):
with Env({'a': 1}) as env:
value = env.a
# should be able to initialize another env now
Env({'a': 2})
assert value == 1
def test_load_env_with_name(tmp_directory, cleanup_env):
Path('env.some_name.yaml').write_text(yaml.dump({'a': 1}))
Env('env.some_name.yaml')
def test_load_env_default_name(tmp_directory, cleanup_env):
Path('env.yaml').write_text(yaml.dump({'a': 1}))
Env()
def test_path_returns_Path_objects(cleanup_env):
env = Env(
{'path': {
'a': '/tmp/path/file.txt',
'b': '/another/path/file.csv'
}})
assert isinstance(env.path.a, Path)
assert isinstance(env.path.b, Path)
def test_automatically_creates_path(cleanup_env, tmp_directory):
Env({'path': {'home': 'some_path/'}})
assert Path('some_path').exists() and Path('some_path').is_dir()
def test_path_expandsuser(cleanup_env):
env = Env({'path': {'home': '~'}})
assert env.path.home == Path('~').expanduser()
def test_init_with_module_key(cleanup_env):
env = Env({'_module': 'test_pkg'})
expected = Path(importlib.util.find_spec('test_pkg').origin).parent
assert env._module == expected
def test_init_with_nonexistent_package(cleanup_env):
with pytest.raises(ValueError) as exc_info:
Env({'_module': 'i_do_not_exist'})
expected = ('Could not resolve _module "i_do_not_exist", '
'it is not a valid module nor a directory')
assert exc_info.value.args[0] == expected
def test_init_with_file(tmp_directory, cleanup_env):
Path('not_a_package').touch()
with pytest.raises(ValueError) as exc_info:
Env({'_module': 'not_a_package'})
expected = ('Could not resolve _module "not_a_package", '
'expected a module or a directory but got a file')
assert exc_info.value.args[0] == expected
def test_module_is_here_placeholder_raises_error_if_init_w_dict(cleanup_env):
with pytest.raises(ValueError) as exc_info:
Env({'_module': '{{here}}'})
expected = '_module cannot be {{here}} if not loaded from a file'
assert exc_info.value.args[0] == expected
def test_module_with_here_placeholder(tmp_directory, cleanup_env):
Path('env.yaml').write_text('_module: "{{here}}"')
env = Env()
assert env._module == Path(tmp_directory).resolve()
def test_expand_version(cleanup_env):
env = Env({'_module': 'test_pkg', 'version': '{{version}}'})
assert env.version == 'VERSION'
def test_expand_git_with_underscode_module(monkeypatch, cleanup_env):
monkeypatch.setattr(repo, 'git_location', lambda _: 'git-location')
monkeypatch.setattr(repo, 'git_hash', lambda _: 'git-hash')
env = Env({
'_module': 'test_pkg',
'git': '{{git}}',
'git_hash': '{{git_hash}}',
})
assert env.git == 'git-location'
assert env.git_hash == 'git-hash'
def test_expand_git(monkeypatch, cleanup_env, tmp_git):
monkeypatch.setattr(repo, 'git_location', lambda _: 'git-location')
monkeypatch.setattr(repo, 'git_hash', lambda _: 'git-hash')
Path('env.yaml').write_text(
yaml.dump({
'git': '{{git}}',
'git_hash': '{{git_hash}}',
}))
# need to initialize from a file for this to work, since Env will use
# the env.yaml location to run the git command
env = Env('env.yaml')
assert env.git == 'git-location'
assert env.git_hash == 'git-hash'
def test_can_create_env_from_dict(cleanup_env):
e = Env({'a': 1})
assert e.a == 1
def test_can_instantiate_env_if_located_in_sample_dir(tmp_sample_dir,
cleanup_env):
Env()
def test_raise_file_not_found_if(cleanup_env):
with pytest.raises(FileNotFoundError):
Env('env.non_existing.yaml')
def test_with_env_initialized_from_path(cleanup_env, tmp_directory):
Path('env.yaml').write_text('{"a": 42}')
@with_env('env.yaml')
def my_fn(env):
return env.a
assert my_fn() == 42
def test_with_env_initialized_from_path_looks_recursively(
cleanup_env, tmp_directory):
Path('env.yaml').write_text('{"a": 42}')
Path('dir').mkdir()
os.chdir('dir')
@with_env('env.yaml')
def my_fn(env):
return env.a
assert my_fn() == 42
def test_with_env_decorator(cleanup_env):
@with_env({'a': 1})
def my_fn(env, b):
return env.a, b
assert (1, 2) == my_fn(2)
def test_with_env_modifies_signature(cleanup_env):
@with_env({'a': 1})
def my_fn(env, b):
return env.a, b
assert tuple(inspect.signature(my_fn).parameters) == ('b', )
# TODO: try even more nested
def test_with_env_casts_paths(cleanup_env):
@with_env({'path': {'data': '/some/path'}})
def my_fn(env):
return env.path.data
returned = my_fn(env__path__data='/another/path')
assert returned == Path('/another/path')
def test_with_env_fails_if_no_env_arg(cleanup_env):
with pytest.raises(RuntimeError):
@with_env({'a': 1})
def my_fn(not_env):
pass
def test_with_env_fails_if_fn_takes_no_args(cleanup_env):
with pytest.raises(RuntimeError):
@with_env({'a': 1})
def my_fn():
pass
def test_replace_defaults(cleanup_env):
@with_env({'a': {'b': 1}})
def my_fn(env, c):
return env.a.b + c
assert my_fn(1, env__a__b=100) == 101
def test_with_env_without_args(tmp_directory, cleanup_env):
Path('env.yaml').write_text('key: value')
@with_env
def my_fn(env):
return 1
assert my_fn() == 1
def test_env_dict_is_available_upon_decoration():
@with_env({'a': 1})
def make(env, param, optional=1):
pass
assert make._env_dict['a'] == 1
def test_replacing_defaults_also_expand(monkeypatch, cleanup_env):
@with_env({'user': 'some_user'})
def my_fn(env):
return env.user
def mockreturn():
return 'expanded_username'
monkeypatch.setattr(getpass, 'getuser', mockreturn)
assert my_fn(env__user='{{user}}') == 'expanded_username'
def test_replacing_raises_error_if_key_does_not_exist():
@with_env({'a': {'b': 1}})
def my_fn(env, c):
return env.a.b + c
with pytest.raises(KeyError):
my_fn(1, env__c=100)
def test_with_env_shows_name_and_module_if_invalid_env(cleanup_env):
with pytest.raises(RuntimeError) as excinfo:
@with_env({'_a': 1})
def some_function(env):
pass
# NOTE: pytest sets the module name to the current filename
assert 'test_env.some_function' in str(excinfo.getrepr())
def test_with_env_shows_function_names_if_env_exists(cleanup_env):
@with_env({'a': 1})
def first(env):
pass
@with_env({'a': 1})
def second(env):
first()
with pytest.raises(RuntimeError) as excinfo:
second()
# NOTE: pytest sets the module name to the current filename
assert 'test_env.first' in str(excinfo.getrepr())
assert 'test_env.second' in str(excinfo.getrepr())
def test_get_all_dict_keys():
got = validate.get_keys_for_dict({'a': 1, 'b': {'c': {'d': 10}}})
assert set(got) == {'a', 'b', 'c', 'd'}
def test_double_underscore_raises_error():
msg = r"Keys cannot have double underscores, got: \['b\_\_c'\]"
with pytest.raises(ValueError, match=msg):
Env({'a': {'b__c': 1}})
def test_leading_underscore_in_top_key_raises_error(cleanup_env):
msg = ("Error validating env.\nTop-level keys cannot start with "
"an underscore, except for {'_module'}. Got: ['_a']")
with pytest.raises(ValueError) as exc_info:
Env({'_a': 1})
assert exc_info.value.args[0] == msg
def test_can_decorate_w_load_env_without_initialized_env():
@load_env
def fn(env):
pass
def test_load_env_modifies_signature(cleanup_env):
@load_env
def fn(env):
pass
assert tuple(inspect.signature(fn).parameters) == ()
def test_load_env_decorator(cleanup_env):
Env({'a': 10})
@load_env
def fn(env):
return env.a
assert fn() == 10
def test_expand_tags(monkeypatch, tmp_directory):
def mockreturn():
return 'username'
monkeypatch.setattr(getpass, "getuser", mockreturn)
# this is required to enable {{root}}
Path('setup.py').touch()
Path('src', 'package').mkdir(parents=True)
Path('src', 'package', 'pipeline.yaml').touch()
raw = {
'a': '{{user}}',
'b': {
'c': '{{user}} {{user}}'
},
'cwd': '{{cwd}}',
'root': '{{root}}',
}
expander = EnvironmentExpander(preprocessed={})
env_expanded = expander.expand_raw_dictionary(raw)
assert env_expanded == {
'a': 'username',
'b': {
'c': 'username username'
},
'cwd': str(Path(tmp_directory).resolve()),
'root': str(Path(tmp_directory).resolve()),
}
def test_error_if_no_project_root(tmp_directory):
raw = {'root': '{{root}}'}
expander = EnvironmentExpander(preprocessed={})
with pytest.raises(BaseException) as excinfo:
expander.expand_raw_dictionary(raw)
assert ('An error happened while expanding placeholder {{root}}'
in str(excinfo.getrepr()))
assert ('could not find a setup.py in a parent folder'
in str(excinfo.getrepr()))
def test_root_expands_relative_to_path_to_here(tmp_directory):
path = Path('some', 'nested', 'dir').resolve()
path.mkdir(parents=True)
(path / 'pipeline.yaml').touch()
raw = {'root': '{{root}}'}
expander = EnvironmentExpander(preprocessed={}, path_to_here=path)
out = expander.expand_raw_dictionary(raw)
assert out['root'] == str(path.resolve())
def test_here_placeholder(tmp_directory, cleanup_env):
Path('env.yaml').write_text(yaml.dump({'here': '{{here}}'}))
env = Env()
assert env.here == str(Path(tmp_directory).resolve())
def test_serialize_env_dict():
# this tests an edge case due to EnvDict's implementation: to enable
# accessing values in the underlying dictionary as attributes, we are
# customizing __getattr__, however, when an object is unserialized,
# Python tries to look for __getstate__ (which triggers calling
# __getattr__), since it cannot find it, it will go to __getitem__
# (given the current implementation of __getattr__). But __getitem__
# uses self.preprocessed. At unserialization time, this attribute does
# not exist yet!, which will cause another call to __getattr__. To avoid
# this recursive loop, we have to prevent special methods to call
# __getitem__ if they do not exist - EnvDict and Env objects are not
# expected to be serialized but we have fix it anyway
env = EnvDict({'a': 1})
assert pickle.loads(pickle.dumps(env))
def test_replace_flatten_key_env_dict():
env = EnvDict({'a': 1})
new_env = env._replace_flatten_key(2, 'env__a')
assert new_env.a == 2 and env is not new_env # must return a copy
def test_replace_nested_flatten_key_env_dict():
env = EnvDict({'a': {'b': 1}})
new_env = env._replace_flatten_key(2, 'env__a__b')
assert new_env.a.b == 2 and env is not new_env # must return a copy
def test_replace_nested_flatten_keys_env_dict():
env = EnvDict({'a': {'b': 1, 'c': 1}})
new_env = env._replace_flatten_keys({'env__a__b': 2, 'env__a__c': 2})
assert (new_env.a.b == 2 and new_env.a.c == 2
and env is not new_env) # must return a copy
def test_error_when_flatten_key_doesnt_exist():
env = EnvDict({'a': 1})
with pytest.raises(KeyError):
env._replace_flatten_key(2, 'env__b')
@pytest.mark.parametrize(
'data, keys',
[
[{
'a': 1
}, ('a', )],
# added this to fix an edge case
[{
'a': {
'b': 1
}
}, ('a', 'b')],
])
def test_env_dict_initialized_with_env_dict(data, keys):
original = EnvDict(data)
env = EnvDict(original)
# ensure we initialized the object correctly
assert repr(env)
assert str(env)
# check default keys are correctly copied
assert original._default_keys == env._default_keys
# check we can access nested keys
for key in keys:
env = env[key]
assert env == 1
def test_env_dict_initialized_with_replaced_env_dict():
a = EnvDict({'a': {'b': 1}})
a_mod = a._replace_flatten_keys({'env__a__b': 2})
b = EnvDict(a_mod)
# make sure the new object has the updated values
assert b['a']['b'] == 2
def test_expand_raw_dictionary():
mapping = EnvDict({'key': 'value'})
d = {'some_setting': '{{key}}'}
assert expand_raw_dictionary(d, mapping) == {'some_setting': 'value'}
def test_expand_raw_dictionaries_and_extract_tags():
mapping = EnvDict({'key': 'value'})
d = [{'some_setting': '{{key}}'}, {'another_setting': '{{key}}'}]
expanded, tags = expand_raw_dictionaries_and_extract_tags(d, mapping)
assert expanded == (
{
'some_setting': 'value',
},
{
'another_setting': 'value'
},
)
assert tags == {'key'}
def test_expand_raw_dict_nested():
mapping = EnvDict({'key': 'value'})
d = {
'section': {
'some_settting': '{{key}}'
},
'list': ['{{key}}', '{{key}}']
}
assert (expand_raw_dictionary(d, mapping) == {
'section': {
'some_settting': 'value'
},
'list': ['value', 'value']
})
def test_envdict_git_ignored_if_git_command_fails_and_no_git_placeholder(
tmp_directory):
env = EnvDict({'tag': 'value'}, path_to_here='.')
assert set(env) == {'cwd', 'here', 'now', 'tag', 'user'}
def test_expand_raw_dict_error_if_missing_key():
mapping = EnvDict({'another_key': 'value'})
d = {'some_stuff': '{{key}}'}
with pytest.raises(BaseException) as excinfo:
expand_raw_dictionary(d, mapping)
assert "Error replacing placeholders:" in str(excinfo.value)
assert "* {{key}}: Ensure the placeholder is defined" in str(excinfo.value)
def test_expand_raw_dictionary_parses_literals():
raw = {'a': '{{a}}', 'b': '{{b}}'}
mapping = EnvDict({'a': [1, 2, 3], 'b': {'z': 1}})
out = expand_raw_dictionary(raw, mapping)
assert out['a'] == [1, 2, 3]
assert out['b'] == {'z': 1}
@pytest.mark.parametrize('constructor', [list, tuple, np.array])
def test_iterate_nested_dict(constructor):
numbers = constructor([1, 2, 3])
c = {'c': numbers}
b = {'b': c}
g = iterate_nested_dict({'a': b})
parent, key, value, preffix = next(g)
assert parent is numbers and key == 0 and value == 1, preffix == [
'a', 'b', 'c', 0
]
parent, key, value, preffix = next(g)
assert parent is numbers and key == 1 and value == 2, preffix == [
'a', 'b', 'c', 1
]
parent, key, value, preffix = next(g)
assert parent is numbers and key == 2 and value == 3, preffix == [
'a', 'b', 'c', 2
]
def test_iterate_nested_dict_with_str():
assert list(iterate_nested_dict({'a': 'string'})) == [({
'a': 'string'
}, 'a', 'string', ['a'])]
@pytest.mark.parametrize('value, expected', [
('True', True),
('false', False),
('100', 100),
('0.11', 0.11),
('string', 'string'),
(True, True),
(False, False),
(10, 10),
(10.1, 10.1),
(None, None),
('None', None),
('none', None),
('NULL', None),
('null', None),
])
def test_cast_if_possible(value, expected):
assert cast_if_possible(value) == expected
def test_replace_value_casts_if_possible():
env = EnvDict({'a': False, 'b': 1, 'c': 1.1})
env._replace_value('True', ['a'])
env._replace_value('2', ['b'])
env._replace_value('2.2', ['c'])
assert env.a is True
assert env.b == 2
assert env.c == 2.2
def test_attribute_error_message():
env = EnvDict({'user': 'user', 'cwd': 'cwd', 'root': 'root'})
with pytest.raises(AttributeError) as excinfo_attr:
env.aa
with pytest.raises(KeyError) as excinfo_key:
env['aa']
assert str(excinfo_attr.value) == f"{env!r} object has no atttribute 'aa'"
assert str(excinfo_key.value) == f'"{env!r} object has no key \'aa\'"'
@pytest.mark.parametrize('content, type_',
[['a', 'str'], ['- a', 'list'], ['', 'NoneType']])
def test_error_when_loaded_obj_is_not_dict(content, type_, tmp_directory):
path = Path(tmp_directory, 'file.yaml')
path.write_text(content)
with pytest.raises(ValueError) as excinfo:
EnvDict('file.yaml')
expected = ("Expected object loaded from 'file.yaml' to be "
"a dict but got '{}' instead, "
"verify the content").format(type_)
assert str(excinfo.value) == expected
def test_default(monkeypatch):
monkeypatch.setattr(getpass, 'getuser', Mock(return_value='User'))
monkeypatch.setattr(os, 'getcwd', Mock(return_value='/some_path'))
env = EnvDict(dict())
assert env.cwd == str(Path('/some_path').resolve())
assert env.user == 'User'
def test_default_with_here_relative(tmp_directory):
Path('dir').mkdir()
env = EnvDict(dict(), path_to_here='dir')
assert env.here == str(Path(tmp_directory, 'dir').resolve())
def test_default_with_here_absolute(tmp_directory):
here = str(Path(tmp_directory, 'dir').resolve())
env = EnvDict(dict(), path_to_here=here)
assert env.here == here
def test_default_with_root(monkeypatch):
mock = Mock(return_value='some_value')
monkeypatch.setattr(default, 'find_root_recursively', mock)
env = EnvDict(dict())
assert env.root == 'some_value'
@pytest.mark.parametrize('kwargs, expected', [
[
dict(source={'cwd': 'some_value'}, path_to_here='value'),
{'here', 'user', 'now'}
],
[dict(source={'cwd': 'some_value'}), {'user', 'now'}],
[dict(source={'user': 'some_value'}), {'cwd', 'now'}],
])
def test_default_keys(kwargs, expected):
assert EnvDict(**kwargs).default_keys == expected
def test_adds_default_keys_if_they_dont_exist(monkeypatch):
monkeypatch.setattr(getpass, 'getuser', Mock(return_value='User'))
monkeypatch.setattr(os, 'getcwd', Mock(return_value='/some_path'))
mock = Mock(return_value='some_value')
monkeypatch.setattr(default, 'find_root_recursively', mock)
monkeypatch.setattr(expand.default, 'find_root_recursively', mock)
env = EnvDict({'a': 1}, path_to_here='/dir')
assert env.cwd == str(Path('/some_path').resolve())
assert env.here == str(Path('/dir').resolve())
assert env.user == 'User'
assert env.root == 'some_value'
assert env.default_keys == {'cwd', 'here', 'user', 'root', 'now'}
def test_find(tmp_directory):
path = Path('some', 'dir')
path.mkdir(parents=True)
Path('some', 'env.yaml').write_text('key: value')
expected_here = str(Path('some').resolve())
os.chdir(path)
env = EnvDict.find('env.yaml')
assert env.cwd == str(Path('.').resolve())
assert env.here == expected_here
@pytest.mark.parametrize('value, error', [
['{{git}}', 'Ensure git is installed and git repository exists'],
['{{git_hash}}', 'Ensure git is installed and git repository exists'],
['{{here}}', 'Ensure the spec was initialized from a file'],
['{{root}}', 'Ensure a pipeline.yaml or setup.py exist'],
['{{another}}', 'Ensure the placeholder is defined'],
])
def test_error_message_if_missing_default_placeholder(tmp_directory, value,
error):
Path('env.yaml').write_text(yaml.dump({'key': 'value'}))
env = EnvDict('env.yaml')
with pytest.raises(BaseException) as excinfo:
expand_raw_dictionary({
'a': value,
}, env)
assert error in str(excinfo.value)
|
StarcoderdataPython
|
302561
|
from Crypto.PublicKey import RSA
from Crypto.Cipher import AES
from Crypto import Random
import ast, os, random, struct, string
class AES_cipher():
def __init__(self, passcode):
self.passcode = passcode
self.iv = bytes(16*'\x00'.encode())
self._add_padding()
def _add_padding(self):
"""
set the length of passcode and iv to 16, 24, 32
"""
if len(self.passcode) in [16,24,32]:
return
if len(self.passcode) < 16:
for i in range(16 - len(self.passcode)):
self.passcode = self.passcode + random.choice(string.ascii_letters)
#self.passcode = self.passcode + random.choice('a')
elif len(self.passcode) < 24:
for i in range(24 - len(self.passcode)):
self.passcode = self.passcode + random.choice(string.ascii_letters)
#self.passcode = self.passcode + random.choice('a')
elif len(self.passcode) < 32:
for i in range(32 - len(self.passcode)):
self.passcode = self.passcode + random.choice(string.ascii_letters)
#self.passcode = self.passcode + random.choice('a')
def encrypt_file(self, in_filename, out_filename=None, chunksize=64*1024):
passcode = self.passcode
if out_filename is None:
out_filename = in_filename + '.enc'
#iv = ''.join(chr(random.randint(0, 0xFF)) for i in range(16))
iv = self.iv
encryptor = AES.new(passcode, AES.MODE_CBC, iv)
filesize = os.path.getsize(in_filename)
with open(in_filename, 'rb') as infile:
with open(out_filename, 'wb') as outfile:
outfile.write(struct.pack('<Q', filesize))
outfile.write(iv)
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
elif len(chunk) % 16 != 0:
chunk += bytes((16 - len(chunk) % 16)*' '.encode())
outfile.write(encryptor.encrypt(chunk))
def decrypt_file(self,in_filename, out_filename=None, chunksize=24*1024):
"""
Decrypts a file using AES (CBC mode) with the
given key. Parameters are similar to encrypt_file,
with one difference: out_filename, if not supplied
will be in_filename without its last extension
(i.e. if in_filename is 'aaa.zip.enc' then
out_filename will be 'aaa.zip')
"""
passcode = self.passcode
if out_filename is None:
out_filename = os.path.splitext(in_filename)[0] + ".dec"
with open(in_filename, 'rb') as infile:
origsize = struct.unpack('<Q', infile.read(struct.calcsize('Q')))[0]
iv = infile.read(16)
decryptor = AES.new(passcode, AES.MODE_CBC, iv)
with open(out_filename, 'wb') as outfile:
while True:
chunk = infile.read(chunksize)
if len(chunk) == 0:
break
outfile.write(decryptor.decrypt(chunk))
outfile.truncate(origsize)
class RSA_cipher(): # public key algorithm
def __init__(self, username=None):
self.usename = None
if username is None:
self.username = "noname"
else:
self.username = username
self.key = None
self.pubkey = None
def init(self, username=None):
if username is not None:
self.username = username
self.random_generator = Random.new().read
self.key = RSA.generate(1024, self.random_generator)
self.pubkey = self.key.publickey()
with open("key/{}.priv".format(self.username), "wb") as privkey:
privkey.write(self.key.exportKey(format='PEM'))
with open("key/{}.pub".format(self.username), "wb") as pubkey:
pubkey.write(self.pubkey.exportKey(format='PEM'))
#self.prikey = self.key.privatekey()
def importKey(self, keypath=None): # pubkey_path or priv_path
if keypath.endswith(".pub"):
pubkey_text = open(keypath, "rb")
self.pubkey = RSA.importKey(pubkey_text.read())
pubkey_text.close()
if keypath.endswith(".priv"):
prikey_text = open(keypath, "rb")
self.key = RSA.importKey(prikey_text.read())
self.pubkey = self.key.publickey()
prikey_text.close()
def importKeyAsString(self, pub_text=None):
self.pubkey = RSA.importKey(pub_text)
def encrypt_with_public(self, msg):
encrypted = self.pubkey.encrypt(msg.encode('utf-8'), 32)
self.encrypted = encrypted[0]
return encrypted[0]
def encrypt_with_private(self, msg):
raise NotImplementedError
def decrypt_with_public(self, msg):
raise NotImplementedError
def decrypt_with_private(self, msg=None):
if msg is None:
msg = str(self.encrypted)
#print(ast.literal_eval(str(msg)))
decrypted = self.key.decrypt(ast.literal_eval(str(msg)))
#decrypted = self.key.decrypt(bytes(msg))
return decrypted
def restore_key(self, priv_path):
pass
def printParams(self):
attr = [attr for attr in vars(self).items() if not attr[0].startswith('__')]
print(attr)
return attr
if __name__ == '__main__':
rsa = RSA_cipher()
rsa.init()
print(type(rsa.pubkey))
|
StarcoderdataPython
|
9742508
|
# Copyright 2016 Autodesk Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import moldesign as mdt
from ..molecules import MolecularProperties
from ..utils import exports
from .base import QMMMBase
class QMMMEmbeddingBase(QMMMBase):
""" Abstract class for standard QM/MM embedding models.
To use any of this classes' subclasses, the MM models must support the ability to
calculate the internal energies and interaction energies between subsystems,
using the ``calculation_groups`` parameter.
"""
def __init__(self, *args, **kwargs):
super(QMMMEmbeddingBase, self).__init__(*args, **kwargs)
self.qmmol = None
self.mmmol = None
self.qm_atoms = None
self.qm_link_atoms = None
self._qm_index_set = None
# TODO: add `qm_atom_indices` to QMMMBase parameters
def calculate(self, requests):
self.prep()
self.mmmol.positions = self.mol.positions
self._set_qm_positions()
qmprops = self.qmmol.calculate(requests)
mmprops = self.mmmol.calculate(requests)
potential_energy = mmprops.potential_energy+qmprops.potential_energy
forces = mmprops.forces.copy()
for iatom, realatom in enumerate(self.qm_atoms):
forces[realatom.index] = qmprops.forces[iatom]
for atom in self.qm_link_atoms:
self._distribute_linkatom_forces(forces, atom)
properties = MolecularProperties(self.mol,
mmprops=mmprops,
qmprops=qmprops,
potential_energy=potential_energy,
forces=forces)
if 'wfn' in qmprops:
properties.wfn = qmprops.wfn
return properties
def prep(self):
if self._prepped:
return None
self.params.qm_atom_indices.sort()
self.qm_atoms = [self.mol.atoms[idx] for idx in self.params.qm_atom_indices]
self._qm_index_set = set(self.params.qm_atom_indices)
self.qmmol = self._setup_qm_subsystem()
self.mmmol = mdt.Molecule(self.mol,
name='%s MM subsystem' % self.mol.name)
self.mol.ff.copy_to(self.mmmol)
self._turn_off_qm_forcefield(self.mmmol.ff)
self.mmmol.set_energy_model(self.params.mm_model)
self._prepped = True
return True
def _setup_qm_subsystem(self):
raise NotImplemented("%s is an abstract class, use one of its subclasses"
% self.__class__.__name__)
def _turn_off_qm_forcefield(self, ff):
self._remove_internal_qm_bonds(ff.parmed_obj)
self._exclude_internal_qm_ljterms(ff.parmed_obj)
def _exclude_internal_qm_ljterms(self, pmdobj):
# Turn off QM/QM LJ interactions (must be done AFTER _remove_internal_qm_bonds)
numqm = len(self.params.qm_atom_indices)
for i in range(numqm):
for j in range(i+1, numqm):
pmdobj.atoms[i].exclude(pmdobj.atoms[j])
def _remove_internal_qm_bonds(self, pmdobj):
for i, iatom in enumerate(self.params.qm_atom_indices):
pmdatom = pmdobj.atoms[iatom]
allterms = ((pmdatom.bonds, 2), (pmdatom.angles, 3),
(pmdatom.dihedrals, 4), (pmdatom.impropers, 4))
for termlist, numatoms in allterms:
for term in termlist[:]: # make a copy so it doesn't change during iteration
if self._term_in_qm_system(term, numatoms):
term.delete()
@staticmethod
def _distribute_linkatom_forces(fullforces, linkatom):
""" Distribute forces according to the apparently indescribable and unciteable "lever rule"
"""
# TODO: CHECK THIS!!!!
mmatom = linkatom.metadata.mmatom
qmatom = linkatom.metadata.mmpartner
dfull = mmatom.distance(qmatom)
d_mm = linkatom.distance(mmatom)
p = (dfull - d_mm)/dfull
fullforces[qmatom.index] += p*linkatom.force
fullforces[mmatom.index] += (1.0-p) * linkatom.force
def _set_qm_positions(self):
for qmatom, realatom in zip(self.qmmol.atoms, self.qm_atoms):
qmatom.position = realatom.position
mdt.helpers.qmmm.set_link_atom_positions(self.qm_link_atoms)
def _term_in_qm_system(self, t, numatoms):
""" Check if an FF term is entirely within the QM subsystem """
for iatom in range(numatoms):
attrname = 'atom%i' % (iatom + 1)
if not getattr(t, attrname).idx in self._qm_index_set:
return True
else:
return False
@exports
class MechanicalEmbeddingQMMM(QMMMEmbeddingBase):
"""
Handles _non-covalent_ QM/MM with mechanical embedding.
No electrostatic interactions will be calculated between the QM and MM subsystems.
No covalent bonds are are allowed between the two susbystems.
"""
def prep(self):
if not super(MechanicalEmbeddingQMMM, self).prep():
return # was already prepped
# Set QM partial charges to 0
self.mmmol.energy_model._prepped = False
pmdobj = self.mmmol.ff.parmed_obj
for i, iatom in enumerate(self.params.qm_atom_indices):
pmdatom = pmdobj.atoms[iatom]
pmdatom.charge = 0.0
def _setup_qm_subsystem(self):
""" QM subsystem for mechanical embedding is the QM atoms + any link atoms
"""
qm_atoms = [self.mol.atoms[iatom] for iatom in self.params.qm_atom_indices]
self.qm_link_atoms = mdt.helpers.qmmm.create_link_atoms(self.mol, qm_atoms)
qmmol = mdt.Molecule(qm_atoms + self.qm_link_atoms,
name='%s QM subsystem' % self.mol.name)
for real_atom, qm_atom in zip(self.qm_atoms, qmmol.atoms):
qm_atom.metadata.real_atom = real_atom
qmmol.set_energy_model(self.params.qm_model)
return qmmol
@exports
class ElectrostaticEmbeddingQMMM(QMMMEmbeddingBase):
""" Handles _non-covalent_ QM/MM with electrostaic embedding.
No bonds allowed across the QM/MM boundaries.
To support this calculation type, the QM model must support the ability to denote
a subset of atoms as the "QM" atoms, using the ``qm_atom_indices`` parameter.
To support this calculation type, the QM model must support the ability to denote
a subset of atoms as the "QM" atoms, using the ``qm_atom_indices`` parameter.
The MM models must support the ability to turn of _internal_ interactions for
a certain subset of the system, using the ``no_internal_calculations`` parameter.
"""
def prep(self):
if not super(ElectrostaticEmbeddingQMMM, self).prep():
return # was already prepped
if not self.params.qm_model.supports_parameter('qm_atom_indices'):
raise TypeError('Supplied QM model ("%s") does not support QM/MM'
% self.params.qm_model.__name__)
def _setup_qm_subsystem(self):
qmmol = mdt.Molecule(self.mol)
self.mol.ff.copy_to(qmmol)
self.qm_link_atoms = mdt.helpers.qmmm.create_link_atoms(self.mol, self.qm_atoms)
if self.qm_link_atoms:
raise ValueError('The %s model does not support link atoms' % self.__class__.__name__)
qmmol.set_energy_model(self.params.qm_model)
qmmol.energy_model.params.qm_atom_indices = self.params.qm_atom_indices
return qmmol
|
StarcoderdataPython
|
11299440
|
<reponame>animator/orange3-scoring<gh_stars>1-10
import numpy as np
from AnyQt.QtWidgets import QGridLayout, QSizePolicy as Policy
from AnyQt.QtCore import QSize
from Orange.widgets.widget import OWWidget, Msg, Output
from Orange.data import Table, DiscreteVariable, Domain, ContinuousVariable
from Orange.widgets import gui
from Orange.evaluation import Results
from orangecontrib.scoring.lib.model import ScoringModel
from orangecontrib.scoring.lib.utils import prettifyText
class OWEvaluate(OWWidget):
# Each widget has a name description and a set of input/outputs (referred to as the widget’s meta description).
# Widget's name as displayed in the canvas
name = "Evaluate PMML/PFA Model"
# Orange Canvas looks for widgets using an orange.widgets entry point.
id = "orange.widgets.scoring.evaluate"
# Short widget description
description = "Evaluate PFA (*.json, *.yaml), PMML (*.xml) or ONNX (*.onnx) model"
# An icon resource file path for this widget
# (a path relative to the module where this widget is defined)
icon = "icons/evaluate.svg"
# Each Orange widget belongs to a category and has an associated priority within that category.
priority = 2
category = "Scoring"
keywords = ["scoring", "inference", "load", "pfa", "pmml", "onnx"]
# Widget's inputs; here, a single input named "Number", of type int
inputs = [("Data", Table, "set_data"),
("Scoring Model", ScoringModel, "set_model")]
# Widget's outputs; here, a single output named "Number", of type int
class Outputs:
predictions = Output("Predictions", Table, doc="Scored results")
evaluations_results = Output("Evaluation Results", Results)
# Basic (convenience) GUI definition:
# a simple 'single column' GUI layout
# want_main_area = False
# with a fixed or resizable geometry.
resizing_enabled = True
class Error(OWWidget.Error):
connection = Msg("{}")
def __init__(self):
super().__init__()
self.data = None
self.model = None
self.output_data = None
self.eval_results = None
self.inputDataAsArray = None
self.inputWithoutFieldName = None
# ensure the widget has some decent minimum width.
self.controlArea.hide()
box = gui.vBox(self.mainArea, "Info")
self.infolabel = gui.widgetLabel(box, 'No model or data loaded.')
self.warnings = gui.widgetLabel(box, '')
box = gui.hBox(self.mainArea)
gui.rubber(box)
self.apply_button = gui.button(
box, self, "Score", callback=self.score)
self.apply_button.setEnabled(False)
self.progressBarInit()
@staticmethod
def sizeHint():
return QSize(320, 100)
def connect(self):
return True
def handleNewSignals(self):
self.progressBarSet(0)
self.output_data = None
self.eval_results = None
self.send_data()
self.Error.clear()
if self.data is not None and self.model is not None:
conforms, fieldNamesChecked, inputFieldsChecked = self.describeFields()
if conforms:
self.inputDataAsArray = not inputFieldsChecked
self.inputWithoutFieldName = not fieldNamesChecked
self.apply_button.setEnabled(True)
def describeFields(self):
TAB = ' '
BR = '<br/>'
SP = ' '
doFieldNameCheck = True
doInputFieldsCheck = True
X = self.data.X
inputColumnNames = [field.name for field in self.data.domain.attributes]
self.infolabel.setText('')
text = "Input Data:"
text += BR + "Rows - " + str(len(X))
text += BR + "<br/>".join(prettifyText(inputColumnNames, pre="Column Names - "))
text += BR
text += BR + "{0} Model: ".format(self.model.type)
inputFields = [name for name, _ in self.model.inputFields]
inputDataTypes = [type for _, type in self.model.inputFields]
text += BR + "<br/>".join(prettifyText(inputFields, pre="Model Field Names - "))
text += BR
text += BR + 'Processing INFO:'
if self.model.type == "PFA":
if len(inputFields) == 1:
if inputFields[0] == "input_value":
doFieldNameCheck = False
text += BR + '- PFA input is of primitive Avrotype with no column name. Skipping field names check.'
if "array" in inputDataTypes[0]:
doInputFieldsCheck = False
text += BR + '- PFA input is of array Avrotype so value of all fields of the input data will' +\
BR + SP + SP + 'be converted into an array. Skipping field name and number of input fields check.'
if doInputFieldsCheck:
if len(inputFields) != len(inputColumnNames):
text += BR + 'Error: No. of columns in Data is not equal to the no. of input fields of the model.'
self.infolabel.setText(text)
return False, doFieldNameCheck, doInputFieldsCheck
text += BR + '- No. of columns in Data is equal to the no. of input fields of the model.'
if doFieldNameCheck:
if sorted(inputFields) != sorted(inputColumnNames):
text += BR + 'Error: Column names in Data do not match the input field names of the model.'
self.infolabel.setText(text)
return False, doFieldNameCheck, doInputFieldsCheck
text += BR + '- Column names in Data match with the input field names of the model.'
self.infolabel.setText(text)
return True, doFieldNameCheck, doInputFieldsCheck
def send_data(self):
self.Outputs.predictions.send(self.output_data)
self.Outputs.evaluations_results.send(self.eval_results)
self.apply_button.setEnabled(False)
def set_data(self, data):
self.data = data
self.handleNewSignals()
def set_model(self, model):
self.model = model
self.handleNewSignals()
def score(self):
self.output_data = None
self.progressBarSet(0)
#cv = ["null", "boolean", "integer", "int", "long", "float", "double"]
dv = ["string", "bytes"]
res = []
inputColumnNames = [field.name for field in self.data.domain.attributes]
dvFieldSet = {name: [] for name, type in self.model.outputFields if type in dv}
nRows = len(self.data.X)
for cnt, row in enumerate(self.data.X):
self.progressBarSet(int(100*cnt/nRows) -10)
datum = None
if self.inputWithoutFieldName:
datum = row[0]
elif self.inputDataAsArray:
datum = {self.model.inputFields[0][0]: list(row)}
else:
datum = dict(zip(inputColumnNames, row))
if datum is not None:
result = self.model.predict(datum)
if "output_value" == self.model.outputFields[0][0]:
if "output_value" in dvFieldSet.keys():
if result in dvFieldSet["output_value"]:
result = dvFieldSet["output_value"].index(result)
else:
dvFieldSet["output_value"].append(result)
result = len(dvFieldSet["output_value"]) - 1
res.append([result, ])
else:
resRow = []
for name, _ in self.model.outputFields:
if name in dvFieldSet.keys():
if result[name] in dvFieldSet[name]:
resRow.append(dvFieldSet[name].index(result[name]))
else:
dvFieldSet[name].append(result[name])
resRow.append(len(dvFieldSet[name])-1)
else:
resRow.append(result[name])
res.append(resRow)
else:
raise RuntimeError("Error detecting input data row - {0}".format(row))
DomainX = self.data.domain.attributes
DomainY = [DiscreteVariable(name, values=dvFieldSet[name]) if name in dvFieldSet.keys() else ContinuousVariable(name) \
for name, _ in self.model.outputFields]
DomainM = self.data.domain.class_vars
output_data_domain = Domain(DomainX, class_vars=DomainY, metas=DomainM)
self.output_data = Table.from_numpy(output_data_domain, self.data.X, Y=np.array(res), metas=self.data._Y)
self.output_data.name = "Result Table"
if len(DomainM) > 0 and len(res[0])==1:
self.eval_result_matrix(np.array(res), DomainY)
self.send_data()
self.progressBarSet(100)
def eval_result_matrix(self, predicted_results, domain_results):
self.eval_results = Results(self.data,
nrows=len(self.data),
row_indices = np.arange(len(self.data)),
actual=self.data.Y,
predicted=np.array([predicted_results.ravel()]))
if __name__ == "__main__":
from Orange.widgets.utils.widgetpreview import WidgetPreview # since Orange 3.20.0
from orangecontrib.scoring.lib.readers import PFAFormat
import os
pfaFile = os.path.join(os.path.dirname(os.path.realpath(__file__)), "../tests/sample_iris.json")
WidgetPreview(OWEvaluate).run(set_data=Table("iris"),
set_model=PFAFormat.get_reader(pfaFile).read())
|
StarcoderdataPython
|
11323393
|
'''Implements the infrastructure to spread indexing tasks over a single
:class:`~.RandomAccessScanSource` across multiple processes for speed.
The primary function in this module is :func:`index`, which will perform
this dispatch.
'''
import multiprocessing
import logging
import dill
from .scan_index import ExtendedScanIndex
from .scan_interval_tree import (
ScanIntervalTree, extract_intervals, make_rt_tree)
logger = logging.getLogger(__name__)
n_cores = multiprocessing.cpu_count()
def indexing_iterator(reader, start, end, index):
"""A helper function which will iterate over an interval of a :class:`~.RandomAccessScanSource`
while feeding each yielded :class:`~.ScanBunch` into a provided :class:`~.ExtendedScanIndex`.
[description]
Parameters
----------
reader : :class:`~.RandomAccessScanSource` or :class:`~.ScanIterator`
The scan data source to loop over.
start : int
The starting index
end : int
The stopping index
index : :class:`~.ExtendedScanIndex`
The scan index to fill.
Yields
------
:class:`~.ScanBunch`
"""
assert end >= start, "End cannot precede Start"
try:
iterator = reader.start_from_scan(index=start, grouped=True)
except AttributeError:
if start != 0:
raise
iterator = reader
for scan_bunch in iterator:
try:
ix = scan_bunch.precursor.index
except AttributeError:
ix = scan_bunch.products[0].index
if ix > end:
break
index.add_scan_bunch(scan_bunch)
yield scan_bunch
def index_chunk(reader, start, end):
"""The task function for :func:`quick_index`, which will build an
:class:`~.ExtendedIndex` and :class:`ScanIntervalTree` from an index
range over a :class:`~.ScanIterator`
Parameters
----------
reader : :class:`~.ScanIterator` or :class:`~.RandomAccessScanSource`
The scan source to iterate over
start : int
The starting index
end : int
The stopping index
Returns
-------
start: int
The starting index for this chunk
end: int
The stopping index for this chunk
index: :class:`~.ExtendedIndex`
The constructed scan metadata index for this chunk
intervals: :class:`~.ScanIntervalTree
The constructed scan interval tree for this chunk
"""
index = ExtendedScanIndex()
iterator = indexing_iterator(reader, start, end, index)
intervals = extract_intervals(iterator)
return (start, end, index, intervals)
def partition_work(n_items, n_workers, start_index=0):
"""Given an index range and a number of workers to work on them,
break the index range into approximately evenly sized sub-intervals.
This is a helper function for :func:`run_task_in_chunks` used to
compute the chunks.
Parameters
----------
n_items : int
The maximum value of the index range
n_workers : int
The number of workers to split the work between
start_index : int, optional
The starting value of the index range (the default is 0)
Returns
-------
list:
A list of (start, end) pairs defining the index ranges each worker will
handle.
"""
if n_workers == 1:
return [[start_index, start_index + n_items]]
chunk_size = int(n_items / n_workers)
n_items += start_index
intervals = []
start = start_index
intervals.append([start, start + chunk_size])
start += chunk_size
while start + chunk_size < (n_items):
end = start + chunk_size
intervals.append([start, end])
start = end
# Make sure that the last chunk actually covers the end of
# the interval.
last = intervals[-1]
if last[1] > n_items:
last[1] = n_items
else:
last[1] += (n_items - last[1])
return intervals
class _Indexer(object):
'''A pickle-able callable object which wraps :func:`index_chunk` for the call signature
used by :func:`run_task_in_chunks`
'''
def __call__(self, payload):
reader, start, end = payload
try:
result = index_chunk(reader, start, end)
except Exception as e:
print(reader, start, end, e)
import traceback
traceback.print_exc()
raise e
return result
class _TaskWrapper(object):
'''A simple wrapper for a callable to capture the index range for a chunk created by
:func:`run_task_in_chunks`, so that the start index of the chunk is known.
'''
def __init__(self, task):
self.task = task
def __getstate__(self):
state = {
"task": dill.dumps(self.task)
}
return state
def __setstate__(self, state):
self.task = dill.loads(state['task'])
def __call__(self, payload):
_reader, start, _end = payload
out = self.task(payload)
return start, out
class _TaskPayload(object):
"""A wrapper for the input to the distributed task which transmits the :class:`~.RandomAccessScanSource`
via :mod:`dill` to make pickling of any wrapped file objects possible.
Mocks a subset of the :class:`~.Sequence` API to allow it to be treated like a :class:`tuple`
Attributes
----------
reader: :class:`~.RandomAccessScanSource`
The scan data source to be shared with the worker.
start: int
The scan index to start processing from
end: int
The scan index to stop processing at.
options: dict
A dictionary of extra arguments that the task might use.
"""
def __init__(self, reader, start, end, **kwargs):
self.reader = reader
self.start = start
self.end = end
self.options = kwargs
def __iter__(self):
yield self.reader
yield self.start
yield self.end
def __getitem__(self, i):
if i == 0:
return self.reader
elif i == 1:
return self.start
elif i == 2:
return self.end
else:
raise IndexError(i)
def __len__(self):
return 3
def __getstate__(self):
state = {
"reader": dill.dumps(self.reader, -1),
"start": self.start,
"end": self.end,
"options": self.options
}
return state
def __setstate__(self, state):
self.reader = dill.loads(state['reader'])
self.start = state['start']
self.end = state['end']
self.options = state['options']
def __repr__(self):
template = "{self.__class__.__name__}({self.reader}, {self.start}, {self.end}, {self.options})"
return template.format(self=self)
def run_task_in_chunks(reader, n_processes=None, n_chunks=None, scan_interval=None, task=None, progress_indicator=None):
"""Run a :class:`~.Callable` `task` over a :class:`~.ScanIterator` in chunks across multiple processes.
This function breaks apart a :class:`~.ScanIterator`'s scans over `scan_interval`,
or the whole sequence if not provided.
Parameters
----------
reader : :class:`~.ScanIterator`
The set of :class:`~.Scan` objects to operate on
n_processes : int, optional
The number of worker processes to use (the default is 4 or the number of cores available,
whichever is lower)
n_chunks : int, optional
The number of chunks to break the scan range into (the default is equal to `n_processes`)
scan_interval : :class:`tuple` of (:class:`int`, :class:`int`), optional
The start and stop scan index to apply the task over. If omitted, the entire scan range will
be used. If either entry is :const:`None`, then the index will be assumed to be the first or
last scan respectively.
task : :class:`~.Callable`
The callable object which will be executed on each chunk in a sub-process. It must take one
argument, a :class:`tuple` of (`reader`, `start index`, `stop index`), and it must return a pickle-able
object.
progress_indicator : :class:`~.Callable`, optional
A callable object which will be used to report progress as chunks finish processing. It must take
one argument, a :class:`float` which represents the fraction of all work completed.
Returns
-------
:class:`list`:
The result of `task` on each chunk of `reader` in index sorted order.
"""
if n_processes is None:
n_processes = min(n_cores, 4)
if task is None or not callable(task):
raise ValueError("The task must be callable!")
if scan_interval is None:
start_scan = 0
end_scan = len(reader.index)
else:
start_scan, end_scan = scan_interval
if start_scan is None:
start_scan = 0
if end_scan is None:
end_scan = len(reader.index)
if n_chunks is None:
n_chunks = n_processes
n_items = end_scan - start_scan
pool = multiprocessing.Pool(n_processes)
scan_ranges = partition_work(n_items, n_chunks, start_scan)
feeder = (
_TaskPayload(reader, scan_range[0], scan_range[1])
for scan_range in scan_ranges)
result = []
for i, block in enumerate(pool.imap_unordered(_TaskWrapper(task), feeder), 1):
result.append(block)
if progress_indicator is not None:
progress_indicator(i / float(n_chunks))
pool.close()
pool.join()
result.sort(key=lambda x: x[0])
result = [x[1] for x in result]
return result
def _merge_indices(indices):
index = indices[0]
for ind in indices:
index = index.merge(ind)
return index
def _make_interval_tree(intervals):
concat = []
for i in intervals:
concat.extend(i)
return ScanIntervalTree(make_rt_tree(concat), None)
def index(reader, n_processes=4, scan_interval=None, progress_indicator=None):
"""Generate a :class:`~.ExtendedScanIndex` and :class:`~.ScanIntervalTree` for
`reader` between `scan_interval` start and end points across `n_processes` worker
processes.
If a :class:`~.ScanIterator` is passed instead of a :class:`~.RandomAccessScanSource`,
only a single process will be used.
Parameters
----------
reader : :class:`~.RandomAccessScanSource` or :class:`~.ScanIterator`
The scan data source to index.
n_processes : int, optional
The number of worker processes to use (the default is 4 or however many CPUs are available, whichever is lower)
scan_interval : tuple, optional
The start and stop scan indices to operate on (the default is None, which will index the entire file)
progress_indicator : :class:`Callable`, optional
A callable object which will be used to report progress as chunks finish processing. It must take
one argument, a :class:`float` which represents the fraction of all work completed.
Returns
-------
:class:`~.ExtendedScanIndex`:
The extended metadata index for this data file.
:class:`~.ScanIntervalTree`:
The scan interval tree for this data file.
See Also
--------
run_task_in_chunks
"""
task = _Indexer()
# indexing a :class:`ScanIterator` without random access, have to go in sequence
if not hasattr(reader, 'start_from_scan'):
logger.info("A non-random access ScanIterator was passed, defaulting to a single worker.")
reader.make_iterator(grouped=True)
chunks = [task(_TaskPayload(reader, 0, len(reader)))]
n_processes = 1
else:
chunks = run_task_in_chunks(
reader, n_processes, scan_interval=scan_interval, task=task,
progress_indicator=progress_indicator)
indices = [chunk[2] for chunk in chunks]
intervals = [chunk[3] for chunk in chunks]
index = _merge_indices(indices)
interval_tree = _make_interval_tree(intervals)
return index, interval_tree
def multi_index(readers, n_processes=4, scan_interval=None):
index_collection = []
interval_collection = []
for reader in readers:
chunks = run_task_in_chunks(
reader, n_processes, scan_interval=scan_interval, task=_Indexer())
indices = [chunk[2] for chunk in chunks]
intervals = [chunk[3] for chunk in chunks]
index_collection.append(indices)
interval_collection.append(intervals)
|
StarcoderdataPython
|
11261382
|
__all__ = ['pcap', 'logger']
|
StarcoderdataPython
|
1819330
|
<filename>app/model.py
import json
class Post():
def __init__(self, title, image, summary, post, author, author_id):
self.summary = summary
self.image = image
self.post = post
self.title = title
self.author = author
self.author_id = author_id
def toJSON(self):
return json.dumps(self, default=lambda o: o.__dict__,
sort_keys=True, indent=4)
# class PitchSchema(ma.Schema):
# class Meta:
# fields = ('id', 'title', 'description', 'summary', 'posted')
|
StarcoderdataPython
|
1710933
|
<filename>src/medius/mediuspackets/getallclanmessagesresponse.py
from enums.enums import MediusEnum, CallbackStatus
from utils import utils
from enums.enums import MediusIdEnum
class GetAllClanMessagesResponseSerializer:
data_dict = [
{'name': 'mediusid', 'n_bytes': 2, 'cast': None}
]
@classmethod
def build(self,
message_id,
callback_status,
clan_message_id,
message,
clan_message_status,
end_of_list
):
packet = [
{'name': __name__},
{'mediusid': MediusIdEnum.GetAllClanMessagesResponse},
{'message_id': message_id},
{'buf': utils.hex_to_bytes("000000")},
{'callback_status': utils.int_to_bytes_little(4, callback_status, signed=True)},
{'clan_id': utils.int_to_bytes_little(4, clan_message_id)},
{'message': utils.str_to_bytes(message, MediusEnum.CLANMSG_MAXLEN)},
{'clan_message_status': utils.int_to_bytes_little(4, clan_message_id)},
{'end_of_list': utils.int_to_bytes_little(4, end_of_list)},
]
return packet
class GetAllClanMessagesResponseHandler:
def process(self, serialized, monolith, con):
raise Exception('Unimplemented Handler: GetAllClanMessagesResponseHandler')
|
StarcoderdataPython
|
373607
|
<gh_stars>1-10
from bs4 import BeautifulSoup
import pytest
import shutil
@pytest.mark.sphinx("html", testroot="hiddendirectives")
def test_warning(app, warnings):
"""Test warning thrown during the build"""
build_path = app.srcdir.joinpath("_build")
shutil.rmtree(build_path)
app.build()
assert (
"_enum_hidden.rst: WARNING: duplicate label: ex-hidden-number;"
) in warnings(app)
@pytest.mark.sphinx("html", testroot="hiddendirectives")
@pytest.mark.parametrize(
"idir",
[
"_enum_hidden.html",
"_unenum_hidden.html",
],
)
def test_hidden_exercise(app, idir, file_regression):
"""Test exercise directive markup."""
app.build()
path_to_directive = app.outdir / idir
assert path_to_directive.exists()
# get content markup
soup = BeautifulSoup(path_to_directive.read_text(encoding="utf8"), "html.parser")
exercise = soup.select("div.exercise")
assert len(exercise) == 0
@pytest.mark.sphinx("html", testroot="hiddendirectives")
@pytest.mark.parametrize(
"docname",
[
"_enum_hidden",
"_unenum_hidden",
],
)
def test_hidden_exercise_doctree(app, docname, file_regression, get_sphinx_app_doctree):
app.build()
get_sphinx_app_doctree(
app,
docname,
resolve=False,
regress=True,
)
@pytest.mark.sphinx("html", testroot="hiddendirectives")
@pytest.mark.parametrize(
"idir",
[
"_linked_enum_hidden.html",
"_linked_unenum_hidden.html",
],
)
def test_hidden_solution(app, idir, file_regression):
"""Test exercise directive markup."""
app.build()
path_to_directive = app.outdir / idir
assert path_to_directive.exists()
# get content markup
soup = BeautifulSoup(path_to_directive.read_text(encoding="utf8"), "html.parser")
solution = soup.select("div.solution")
assert len(solution) == 0
@pytest.mark.sphinx("html", testroot="hiddendirectives")
@pytest.mark.parametrize(
"docname",
[
"_linked_enum_hidden",
"_linked_unenum_hidden",
],
)
def test_hidden_solution_doctree(app, docname, file_regression, get_sphinx_app_doctree):
app.build()
get_sphinx_app_doctree(
app,
docname,
resolve=False,
regress=True,
)
|
StarcoderdataPython
|
1686683
|
<filename>spectrum/django/spectrum.py
FIRE_HOSE = {
'version': 1,
'disable_existing_loggers': False,
'root': {
'level': 'DEBUG',
'handlers': ['console', 'root']
},
'filters': {
'request_id': {
'()': 'spectrum.filters.RequestIdFilter'
}
},
'formatters': {
'verbose': {
'format': '[%(name)s][%(levelname)s] %(message)s'
}
},
'loggers': {
'django': {
'handlers': ['django'],
'level': 'DEBUG',
'propagate': False,
},
'django.request': {
'handlers': ['django.request'],
'level': 'DEBUG',
'propagate': False,
},
'django.db.backends': {
'handlers': ['django.db.backends'],
'level': 'DEBUG',
'propagate': False,
},
'celery': {
'handlers': ['celery'],
'level': 'DEBUG',
'propagate': False,
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'filters': ['request_id']
},
'root': {
'level': 'DEBUG',
'class': 'spectrum.handlers.RestSpectrum',
'sublevel': '',
'filters': ['request_id']
},
'django': {
'level': 'DEBUG',
'class': 'spectrum.handlers.RestSpectrum',
'sublevel': 'django',
'filters': ['request_id']
},
'django.request': {
'level': 'DEBUG',
'class': 'spectrum.handlers.RestSpectrum',
'sublevel': 'django.request',
'filters': ['request_id']
},
'celery': {
'level': 'DEBUG',
'class': 'spectrum.handlers.RestSpectrum',
'sublevel': 'celery',
'filters': ['request_id']
},
'django.db.backends': {
'level': 'DEBUG',
'class': 'spectrum.handlers.RestSpectrum',
'sublevel': 'django.db.backends',
},
},
}
FIRE_HOSE_UDP = {
'version': 1,
'disable_existing_loggers': False,
'root': {
'level': 'DEBUG',
'handlers': ['console', 'root']
},
'filters': {
'request_id': {
'()': 'spectrum.filters.RequestIdFilter'
}
},
'formatters': {
'verbose': {
'format': '[%(name)s][%(levelname)s] %(message)s'
}
},
'loggers': {
'django': {
'handlers': ['django'],
'level': 'DEBUG',
'propagate': False,
},
'django.request': {
'handlers': ['django.request'],
'level': 'DEBUG',
'propagate': False,
},
'django.db.backends': {
'handlers': ['django.db.backends'],
'level': 'DEBUG',
'propagate': False,
},
'celery': {
'handlers': ['celery'],
'level': 'DEBUG',
'propagate': False,
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'filters': ['request_id']
},
'root': {
'level': 'DEBUG',
'class': 'spectrum.handlers.UDPSpectrum',
'sublevel': '',
},
'django': {
'level': 'DEBUG',
'class': 'spectrum.handlers.UDPSpectrum',
'sublevel': 'django',
},
'django.request': {
'level': 'DEBUG',
'class': 'spectrum.handlers.UDPSpectrum',
'sublevel': 'django.request',
},
'celery': {
'level': 'DEBUG',
'class': 'spectrum.handlers.UDPSpectrum',
'sublevel': 'celery',
},
'django.db.backends': {
'level': 'DEBUG',
'class': 'spectrum.handlers.UDPSpectrum',
'sublevel': 'django.db.backends',
},
},
}
FIRE_HOSE_WS = {
'version': 1,
'disable_existing_loggers': False,
'root': {
'level': 'DEBUG',
'handlers': ['console', 'root']
},
'filters': {
'request_id': {
'()': 'spectrum.filters.RequestIdFilter'
}
},
'formatters': {
'verbose': {
'format': '[%(name)s][%(levelname)s] %(message)s'
}
},
'loggers': {
'django': {
'handlers': ['django'],
'level': 'DEBUG',
'propagate': False,
},
'django.request': {
'handlers': ['django.request'],
'level': 'DEBUG',
'propagate': False,
},
'django.db.backends': {
'handlers': ['django.db.backends'],
'level': 'DEBUG',
'propagate': False,
},
'celery': {
'handlers': ['celery'],
'level': 'DEBUG',
'propagate': False,
},
},
'handlers': {
'console': {
'level': 'DEBUG',
'class': 'logging.StreamHandler',
'formatter': 'verbose',
'filters': ['request_id']
},
'root': {
'level': 'DEBUG',
'class': 'spectrum.handlers.WebsocketSpectrum',
'sublevel': '',
},
'django': {
'level': 'DEBUG',
'class': 'spectrum.handlers.WebsocketSpectrum',
'sublevel': 'django',
},
'django.request': {
'level': 'DEBUG',
'class': 'spectrum.handlers.WebsocketSpectrum',
'sublevel': 'django.request',
},
'celery': {
'level': 'DEBUG',
'class': 'spectrum.handlers.WebsocketSpectrum',
'sublevel': 'celery',
},
'django.db.backends': {
'level': 'DEBUG',
'class': 'spectrum.handlers.WebsocketSpectrum',
'sublevel': 'django.db.backends',
},
},
}
def fire_hose(base_config=None, log_db=True, levels=None, handler_kwargs=None):
"""
A convenience method to get and modify predefined logging configurations.
Arguments
~~~~~~~~~
* ``base_config``: Defaults to `FIRE_HOSE`, which uses the REST HTTP stream on ``http://127.0.0.1:9000/``
* ``log_db``: shortcut for toggling the level of ``django.db.backends`` logging. Defaults to ``True``
* ``levels``: if provided, a 2-tuples iterable of logger names and their level.
* ``handler_kwargs``: if provided, kwargs to pass to the handles. Use this to override default settings such as ip / port Spectrum is running on.
Examples
~~~~~~~~
::
from spectrum.django import fire_hose, FIRE_HOSE_UDP
LOGGING = fire_hose()
LOGGING = fire_hose(log_db=False)
LOGGING = fire_hose(levels=(
('my.overly.verbose.module', 'WARNING'),
('some.other.module', 'CRITICAL'),
)
LOGGING = fire_hose(FIRE_HOSE_UDP, handler_kwargs={'url': '127.0.0.1:12345'})
"""
if base_config is None:
base_config = FIRE_HOSE
if levels is None:
levels = tuple()
if handler_kwargs is None:
handler_kwargs = {}
if log_db is False:
base_config['loggers']['django.db.backends']['level'] = 'WARNING'
for silenced, level in levels:
if silenced not in base_config['loggers']:
base_config['loggers'][silenced] = {}
base_config['loggers'][silenced]['level'] = level
for handler, handler_config in base_config['handlers'].items():
handler_config.update(handler_kwargs)
return base_config
|
StarcoderdataPython
|
3594136
|
# Here goes the ball class
import pygame
from config import BALL_VELOCITY, SCREEN_HEIGHT, SCREEN_WIDTH, colors
class Ball_1(pygame.sprite.Sprite):
def __init__(self, width, height):
super().__init__()
self.image = pygame.Surface([width, height])
self.width = width
self.height = height
self.bonus = False
self.color = colors["Blue_ball"]
pygame.draw.rect(self.image, self.color, [0, 0, self.width, self.height])
self.rect = self.image.get_rect()
self.vel = [BALL_VELOCITY, BALL_VELOCITY]
def update(self):
self.rect.x += self.vel[0]
self.rect.y -= self.vel[1]
def bounce(self):
self.vel[0] = -self.vel[0]
self.vel[1] = +self.vel[1]
def change_colors(self):
if self.bonus is True:
self.color = colors["Black"]
pygame.draw.rect(self.image, self.color, [0, 0, self.width, self.height])
else:
self.color = colors["Blue_ball"]
pygame.draw.rect(self.image, self.color, [0, 0, self.width, self.height])
class Ball_2(pygame.sprite.Sprite):
def __init__(self, width, height):
super().__init__()
self.image = pygame.Surface([width, height])
self.width = width
self.height = height
self.bonus = False
self.color = colors["Red_ball"]
pygame.draw.rect(self.image, self.color, [0, 0, self.width, self.height])
self.rect = self.image.get_rect()
self.vel = [BALL_VELOCITY, BALL_VELOCITY]
def update(self):
self.rect.x -= self.vel[0]
self.rect.y += self.vel[1]
def bounce(self):
self.vel[0] = -self.vel[0]
self.vel[1] = +self.vel[1]
def change_colors(self):
if self.bonus is True:
self.color = colors["Black"]
pygame.draw.rect(self.image, self.color, [0, 0, self.width, self.height])
else:
self.color = colors["Red_ball"]
pygame.draw.rect(self.image, self.color, [0, 0, self.width, self.height])
class Ball_PU(pygame.sprite.Sprite):
def __init__(self, width, height):
super().__init__()
self.image = pygame.Surface([width, height])
self.color = colors["White"]
self.direction = ["Left", "Right"]
pygame.draw.rect(self.image, self.color, [0, 0, width, height])
self.rect = self.image.get_rect()
self.vel = BALL_VELOCITY
self.direction = 0
def update(self):
if self.direction == 0:
self.rect.x -= self.vel
if self.direction == 1:
self.rect.x += self.vel
|
StarcoderdataPython
|
9714762
|
<reponame>LauraOlivera/gammapy<gh_stars>0
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""Spectral models for Gammapy."""
import operator
import numpy as np
import scipy.optimize
import scipy.special
import astropy.units as u
from astropy import constants as const
from astropy.table import Table
from astropy.utils.decorators import classproperty
from astropy.visualization import quantity_support
from gammapy.maps import MapAxis, RegionNDMap
from gammapy.modeling import Parameter, Parameters
from gammapy.utils.integrate import trapz_loglog
from gammapy.utils.interpolation import (
ScaledRegularGridInterpolator,
interpolation_scale,
)
from gammapy.utils.scripts import make_path
from .core import Model
from gammapy.utils.roots import find_roots
def scale_plot_flux(flux, energy_power=0):
"""Scale flux to plot
Parameters
----------
flux : `Map`
Flux map
energy_power : int, optional
Power of energy to multiply flux axis with
Returns
-------
flux : `Map`
Scaled flux map
"""
energy = flux.geom.get_coord(sparse=True)["energy"]
try:
eunit = [_ for _ in flux.unit.bases if _.physical_type == "energy"][0]
except IndexError:
eunit = energy.unit
y = flux * np.power(energy, energy_power)
return y.to_unit(flux.unit * eunit ** energy_power)
def integrate_spectrum(func, energy_min, energy_max, ndecade=100):
"""Integrate 1d function using the log-log trapezoidal rule.
Internally an oversampling of the energy bins to "ndecade" is used.
Parameters
----------
func : callable
Function to integrate.
energy_min : `~astropy.units.Quantity`
Integration range minimum
energy_max : `~astropy.units.Quantity`
Integration range minimum
ndecade : int, optional
Number of grid points per decade used for the integration.
Default : 100
"""
num = np.max(ndecade * np.log10(energy_max / energy_min))
energy = np.geomspace(energy_min, energy_max, num=int(num), axis=-1)
integral = trapz_loglog(func(energy), energy, axis=-1)
return integral.sum(axis=0)
class SpectralModel(Model):
"""Spectral model base class."""
_type = "spectral"
def __call__(self, energy):
kwargs = {par.name: par.quantity for par in self.parameters}
kwargs = self._convert_evaluate_unit(kwargs, energy)
return self.evaluate(energy, **kwargs)
@classproperty
def is_norm_spectral_model(cls):
"""Whether model is a norm spectral model"""
return "Norm" in cls.__name__
@staticmethod
def _convert_evaluate_unit(kwargs_ref, energy):
kwargs = {}
for name, quantity in kwargs_ref.items():
if quantity.unit.physical_type == "energy":
quantity = quantity.to(energy.unit)
kwargs[name] = quantity
return kwargs
def __add__(self, model):
if not isinstance(model, SpectralModel):
model = ConstantSpectralModel(const=model)
return CompoundSpectralModel(self, model, operator.add)
def __mul__(self, other):
if isinstance(other, SpectralModel):
return CompoundSpectralModel(self, other, operator.mul)
else:
raise TypeError(f"Multiplication invalid for type {other!r}")
def __radd__(self, model):
return self.__add__(model)
def __sub__(self, model):
if not isinstance(model, SpectralModel):
model = ConstantSpectralModel(const=model)
return CompoundSpectralModel(self, model, operator.sub)
def __rsub__(self, model):
return self.__sub__(model)
def _propagate_error(self, epsilon, fct, **kwargs):
"""Evaluate error for a given function with uncertainty propagation.
Parameters
----------
fct : `~astropy.units.Quantity`
Function to estimate the error.
epsilon : float
Step size of the gradient evaluation. Given as a
fraction of the parameter error.
**kwargs : dict
Keyword argument
Returns
-------
f_cov : `~astropy.units.Quantity`
Error of the given function.
"""
eps = np.sqrt(np.diag(self.covariance)) * epsilon
n, f_0 = len(self.parameters), fct(**kwargs)
shape = (n, len(np.atleast_1d(f_0)))
df_dp = np.zeros(shape)
for idx, parameter in enumerate(self.parameters):
if parameter.frozen or eps[idx] == 0:
continue
parameter.value += eps[idx]
df = fct(**kwargs) - f_0
df_dp[idx] = df.value / eps[idx]
parameter.value -= eps[idx]
f_cov = df_dp.T @ self.covariance @ df_dp
f_err = np.sqrt(np.diagonal(f_cov))
return u.Quantity([f_0.value, f_err], unit=f_0.unit)
def evaluate_error(self, energy, epsilon=1e-4):
"""Evaluate spectral model with error propagation.
Parameters
----------
energy : `~astropy.units.Quantity`
Energy at which to evaluate
epsilon : float
Step size of the gradient evaluation. Given as a
fraction of the parameter error.
Returns
-------
dnde, dnde_error : tuple of `~astropy.units.Quantity`
Tuple of flux and flux error.
"""
return self._propagate_error(epsilon=epsilon, fct=self, energy=energy)
def integral(self, energy_min, energy_max, **kwargs):
r"""Integrate spectral model numerically if no analytical solution defined.
.. math::
F(E_{min}, E_{max}) = \int_{E_{min}}^{E_{max}} \phi(E) dE
Parameters
----------
energy_min, energy_max : `~astropy.units.Quantity`
Lower and upper bound of integration range.
**kwargs : dict
Keyword arguments passed to :func:`~gammapy.utils.integrate.integrate_spectrum`
"""
if hasattr(self, "evaluate_integral"):
kwargs = {par.name: par.quantity for par in self.parameters}
kwargs = self._convert_evaluate_unit(kwargs, energy_min)
return self.evaluate_integral(energy_min, energy_max, **kwargs)
else:
return integrate_spectrum(self, energy_min, energy_max, **kwargs)
def integral_error(self, energy_min, energy_max, epsilon=1e-4, **kwargs):
"""Evaluate the error of the integral flux of a given spectrum in
a given energy range.
Parameters
----------
energy_min, energy_max : `~astropy.units.Quantity`
Lower and upper bound of integration range.
epsilon : float
Step size of the gradient evaluation. Given as a
fraction of the parameter error.
Returns
-------
flux, flux_err : tuple of `~astropy.units.Quantity`
Integral flux and flux error betwen energy_min and energy_max.
"""
return self._propagate_error(
epsilon=epsilon,
fct=self.integral,
energy_min=energy_min,
energy_max=energy_max,
**kwargs,
)
def energy_flux(self, energy_min, energy_max, **kwargs):
r"""Compute energy flux in given energy range.
.. math::
G(E_{min}, E_{max}) = \int_{E_{min}}^{E_{max}} E \phi(E) dE
Parameters
----------
energy_min, energy_max : `~astropy.units.Quantity`
Lower and upper bound of integration range.
**kwargs : dict
Keyword arguments passed to func:`~gammapy.utils.integrate.integrate_spectrum`
"""
def f(x):
return x * self(x)
if hasattr(self, "evaluate_energy_flux"):
kwargs = {par.name: par.quantity for par in self.parameters}
kwargs = self._convert_evaluate_unit(kwargs, energy_min)
return self.evaluate_energy_flux(energy_min, energy_max, **kwargs)
else:
return integrate_spectrum(f, energy_min, energy_max, **kwargs)
def energy_flux_error(self, energy_min, energy_max, epsilon=1e-4, **kwargs):
"""Evaluate the error of the energy flux of a given spectrum in
a given energy range.
Parameters
----------
energy_min, energy_max : `~astropy.units.Quantity`
Lower and upper bound of integration range.
epsilon : float
Step size of the gradient evaluation. Given as a
fraction of the parameter error.
Returns
-------
energy_flux, energy_flux_err : tuple of `~astropy.units.Quantity`
Energy flux and energy flux error betwen energy_min and energy_max.
"""
return self._propagate_error(
epsilon=epsilon,
fct=self.energy_flux,
energy_min=energy_min,
energy_max=energy_max,
**kwargs,
)
def reference_fluxes(self, energy_axis):
"""Get reference fluxes for a given energy axis.
Parameters
----------
energy_axis : `MapAxis`
Energy axis
Returns
-------
fluxes : dict of `~astropy.units.Quantity`
Reference fluxes
"""
energy = energy_axis.center
energy_min, energy_max = energy_axis.edges_min, energy_axis.edges_max
return {
"e_ref": energy,
"e_min": energy_min,
"e_max": energy_max,
"ref_dnde": self(energy),
"ref_flux": self.integral(energy_min, energy_max),
"ref_eflux": self.energy_flux(energy_min, energy_max),
"ref_e2dnde": self(energy) * energy ** 2,
}
def _get_plot_flux(self, energy, sed_type):
flux = RegionNDMap.create(region=None, axes=[energy])
flux_err = RegionNDMap.create(region=None, axes=[energy])
if sed_type in ["dnde", "norm"]:
flux.quantity, flux_err.quantity = self.evaluate_error(energy.center)
elif sed_type == "e2dnde":
flux.quantity, flux_err.quantity = energy.center ** 2 * self.evaluate_error(energy.center)
elif sed_type == "flux":
flux.quantity, flux_err.quantity = self.integral_error(energy.edges_min, energy.edges_max)
elif sed_type == "eflux":
flux.quantity, flux_err.quantity = self.energy_flux_error(energy.edges_min, energy.edges_max)
else:
raise ValueError(f"Not a valid SED type: '{sed_type}'")
return flux, flux_err
def plot(
self,
energy_bounds,
ax=None,
sed_type="dnde",
energy_power=0,
n_points=100,
**kwargs,
):
"""Plot spectral model curve.
kwargs are forwarded to `matplotlib.pyplot.plot`
By default a log-log scaling of the axes is used, if you want to change
the y axis scaling to linear you can use::
from gammapy.modeling.models import ExpCutoffPowerLawSpectralModel
from astropy import units as u
pwl = ExpCutoffPowerLawSpectralModel()
ax = pwl.plot(energy_bounds=(0.1, 100) * u.TeV)
ax.set_yscale('linear')
Parameters
----------
ax : `~matplotlib.axes.Axes`, optional
Axis
energy_bounds : `~astropy.units.Quantity`
Plot energy bounds passed to MapAxis.from_energy_bounds
sed_type : {"dnde", "flux", "eflux", "e2dnde"}
Evaluation methods of the model
energy_power : int, optional
Power of energy to multiply flux axis with
n_points : int, optional
Number of evaluation nodes
**kwargs : dict
Keyword arguments forwared to `~matplotlib.pyplot.plot`
Returns
-------
ax : `~matplotlib.axes.Axes`, optional
Axis
"""
from gammapy.estimators.flux_map import DEFAULT_UNIT
import matplotlib.pyplot as plt
ax = plt.gca() if ax is None else ax
if self.is_norm_spectral_model:
sed_type = "norm"
energy_min, energy_max = energy_bounds
energy = MapAxis.from_energy_bounds(
energy_min, energy_max, n_points,
)
kwargs.setdefault("yunits", DEFAULT_UNIT[sed_type] * energy.unit ** energy_power)
flux, _ = self._get_plot_flux(sed_type=sed_type, energy=energy)
flux = scale_plot_flux(flux, energy_power=energy_power)
with quantity_support():
ax.plot(energy.center, flux.quantity[:, 0, 0], **kwargs)
self._plot_format_ax(ax, energy_power, sed_type)
return ax
def plot_error(
self,
energy_bounds,
ax=None,
sed_type="dnde",
energy_power=0,
n_points=100,
**kwargs,
):
"""Plot spectral model error band.
.. note::
This method calls ``ax.set_yscale("log", nonpositive='clip')`` and
``ax.set_xscale("log", nonposx='clip')`` to create a log-log representation.
The additional argument ``nonposx='clip'`` avoids artefacts in the plot,
when the error band extends to negative values (see also
https://github.com/matplotlib/matplotlib/issues/8623).
When you call ``plt.loglog()`` or ``plt.semilogy()`` explicitely in your
plotting code and the error band extends to negative values, it is not
shown correctly. To circumvent this issue also use
``plt.loglog(nonposx='clip', nonpositive='clip')``
or ``plt.semilogy(nonpositive='clip')``.
Parameters
----------
ax : `~matplotlib.axes.Axes`, optional
Axis
energy_bounds : `~astropy.units.Quantity`
Plot energy bounds passed to MapAxis.from_energy_bounds
sed_type : {"dnde", "flux", "eflux", "e2dnde"}
Evaluation methods of the model
energy_power : int, optional
Power of energy to multiply flux axis with
n_points : int, optional
Number of evaluation nodes
**kwargs : dict
Keyword arguments forwarded to `matplotlib.pyplot.fill_between`
Returns
-------
ax : `~matplotlib.axes.Axes`, optional
Axis
"""
from gammapy.estimators.flux_map import DEFAULT_UNIT
import matplotlib.pyplot as plt
ax = plt.gca() if ax is None else ax
if self.is_norm_spectral_model:
sed_type = "norm"
energy_min, energy_max = energy_bounds
energy = MapAxis.from_energy_bounds(
energy_min, energy_max, n_points,
)
kwargs.setdefault("facecolor", "black")
kwargs.setdefault("alpha", 0.2)
kwargs.setdefault("linewidth", 0)
kwargs.setdefault("yunits", DEFAULT_UNIT[sed_type] * energy.unit ** energy_power)
flux, flux_err = self._get_plot_flux(sed_type=sed_type, energy=energy)
y_lo = scale_plot_flux(flux - flux_err, energy_power).quantity[:, 0, 0]
y_hi = scale_plot_flux(flux + flux_err, energy_power).quantity[:, 0, 0]
with quantity_support():
ax.fill_between(energy.center, y_lo, y_hi, **kwargs)
self._plot_format_ax(ax, energy_power, sed_type)
return ax
@staticmethod
def _plot_format_ax(ax, energy_power, sed_type):
ax.set_xlabel(f"Energy [{ax.xaxis.units}]")
if energy_power > 0:
ax.set_ylabel(f"e{energy_power} * {sed_type} [{ax.yaxis.units}]")
else:
ax.set_ylabel(f"{sed_type} [{ax.yaxis.units}]")
ax.set_xscale("log", nonpositive="clip")
ax.set_yscale("log", nonpositive="clip")
def spectral_index(self, energy, epsilon=1e-5):
"""Compute spectral index at given energy.
Parameters
----------
energy : `~astropy.units.Quantity`
Energy at which to estimate the index
epsilon : float
Fractional energy increment to use for determining the spectral index.
Returns
-------
index : float
Estimated spectral index.
"""
f1 = self(energy)
f2 = self(energy * (1 + epsilon))
return np.log(f1 / f2) / np.log(1 + epsilon)
def inverse(self, value, energy_min=0.1 * u.TeV, energy_max=100 * u.TeV):
"""Return energy for a given function value of the spectral model.
Calls the `scipy.optimize.brentq` numerical root finding method.
Parameters
----------
value : `~astropy.units.Quantity`
Function value of the spectral model.
energy_min : `~astropy.units.Quantity`
Lower energy bound of the roots finding
energy_max : `~astropy.units.Quantity`
Upper energy bound of the roots finding
Returns
-------
energy : `~astropy.units.Quantity`
Energies at which the model has the given ``value``.
"""
eunit = "TeV"
energy_min = energy_min.to(eunit)
energy_max = energy_max.to(eunit)
def f(x):
# scale by 1e12 to achieve better precision
energy = u.Quantity(x, eunit, copy=False)
y = self(energy).to_value(value.unit)
return 1e12 * (y - value.value)
roots, res = find_roots(f, energy_min, energy_max, points_scale="log")
return roots
def inverse_all(self, values, energy_min=0.1 * u.TeV, energy_max=100 * u.TeV):
"""Return energies for multiple function values of the spectral model.
Calls the `scipy.optimize.brentq` numerical root finding method.
Parameters
----------
values : `~astropy.units.Quantity`
Function values of the spectral model.
energy_min : `~astropy.units.Quantity`
Lower energy bound of the roots finding
energy_max : `~astropy.units.Quantity`
Upper energy bound of the roots finding
Returns
-------
energy : list of `~astropy.units.Quantity`
each element contain the energies at which the model
has corresponding value of ``values``.
"""
energies = []
for val in np.atleast_1d(values):
res = self.inverse(val, energy_min, energy_max)
energies.append(res)
return energies
class ConstantSpectralModel(SpectralModel):
r"""Constant model.
For more information see :ref:`constant-spectral-model`.
Parameters
----------
const : `~astropy.units.Quantity`
:math:`k`
"""
tag = ["ConstantSpectralModel", "const"]
const = Parameter("const", "1e-12 cm-2 s-1 TeV-1")
@staticmethod
def evaluate(energy, const):
"""Evaluate the model (static function)."""
return np.ones(np.atleast_1d(energy).shape) * const
class CompoundSpectralModel(SpectralModel):
"""Arithmetic combination of two spectral models.
For more information see :ref:`compound-spectral-model`.
"""
tag = ["CompoundSpectralModel", "compound"]
def __init__(self, model1, model2, operator):
self.model1 = model1
self.model2 = model2
self.operator = operator
super().__init__()
@property
def parameters(self):
return self.model1.parameters + self.model2.parameters
def __str__(self):
return (
f"{self.__class__.__name__}\n"
f" Component 1 : {self.model1}\n"
f" Component 2 : {self.model2}\n"
f" Operator : {self.operator.__name__}\n"
)
def __call__(self, energy):
val1 = self.model1(energy)
val2 = self.model2(energy)
return self.operator(val1, val2)
def to_dict(self, full_output=False):
return {
"type": self.tag[0],
"model1": self.model1.to_dict(full_output),
"model2": self.model2.to_dict(full_output),
"operator": self.operator.__name__,
}
@classmethod
def from_dict(cls, data):
from gammapy.modeling.models import SPECTRAL_MODEL_REGISTRY
model1_cls = SPECTRAL_MODEL_REGISTRY.get_cls(data["model1"]["type"])
model1 = model1_cls.from_dict(data["model1"])
model2_cls = SPECTRAL_MODEL_REGISTRY.get_cls(data["model2"]["type"])
model2 = model2_cls.from_dict(data["model2"])
op = getattr(operator, data["operator"])
return cls(model1, model2, op)
class PowerLawSpectralModel(SpectralModel):
r"""Spectral power-law model.
For more information see :ref:`powerlaw-spectral-model`.
Parameters
----------
index : `~astropy.units.Quantity`
:math:`\Gamma`
amplitude : `~astropy.units.Quantity`
:math:`\phi_0`
reference : `~astropy.units.Quantity`
:math:`E_0`
See Also
--------
PowerLaw2SpectralModel, PowerLawNormSpectralModel
"""
tag = ["PowerLawSpectralModel", "pl"]
index = Parameter("index", 2.0)
amplitude = Parameter("amplitude", "1e-12 cm-2 s-1 TeV-1", scale_method="scale10", interp="log")
reference = Parameter("reference", "1 TeV", frozen=True)
@staticmethod
def evaluate(energy, index, amplitude, reference):
"""Evaluate the model (static function)."""
return amplitude * np.power((energy / reference), -index)
@staticmethod
def evaluate_integral(energy_min, energy_max, index, amplitude, reference):
r"""Integrate power law analytically (static function).
.. math::
F(E_{min}, E_{max}) = \int_{E_{min}}^{E_{max}}\phi(E)dE = \left.
\phi_0 \frac{E_0}{-\Gamma + 1} \left( \frac{E}{E_0} \right)^{-\Gamma + 1}
\right \vert _{E_{min}}^{E_{max}}
Parameters
----------
energy_min, energy_max : `~astropy.units.Quantity`
Lower and upper bound of integration range
"""
val = -1 * index + 1
prefactor = amplitude * reference / val
upper = np.power((energy_max / reference), val)
lower = np.power((energy_min / reference), val)
integral = prefactor * (upper - lower)
mask = np.isclose(val, 0)
if mask.any():
integral[mask] = (amplitude * reference * np.log(energy_max / energy_min))[
mask
]
return integral
@staticmethod
def evaluate_energy_flux(energy_min, energy_max, index, amplitude, reference):
r"""Compute energy flux in given energy range analytically (static function).
.. math::
G(E_{min}, E_{max}) = \int_{E_{min}}^{E_{max}}E \phi(E)dE = \left.
\phi_0 \frac{E_0^2}{-\Gamma + 2} \left( \frac{E}{E_0} \right)^{-\Gamma + 2}
\right \vert _{E_{min}}^{E_{max}}
Parameters
----------
energy_min, energy_max : `~astropy.units.Quantity`
Lower and upper bound of integration range.
"""
val = -1 * index + 2
prefactor = amplitude * reference ** 2 / val
upper = (energy_max / reference) ** val
lower = (energy_min / reference) ** val
energy_flux = prefactor * (upper - lower)
mask = np.isclose(val, 0)
if mask.any():
# see https://www.wolframalpha.com/input/?i=a+*+x+*+(x%2Fb)+%5E+(-2)
# for reference
energy_flux[mask] = (
amplitude * reference ** 2 * np.log(energy_max / energy_min)[mask]
)
return energy_flux
def inverse(self, value, *args):
"""Return energy for a given function value of the spectral model.
Parameters
----------
value : `~astropy.units.Quantity`
Function value of the spectral model.
"""
base = value / self.amplitude.quantity
return self.reference.quantity * np.power(base, -1.0 / self.index.value)
@property
def pivot_energy(self):
r"""The decorrelation energy is defined as:
.. math::
E_D = E_0 * \exp{cov(\phi_0, \Gamma) / (\phi_0 \Delta \Gamma^2)}
Formula (1) in https://arxiv.org/pdf/0910.4881.pdf
"""
index_err = self.index.error
reference = self.reference.quantity
amplitude = self.amplitude.quantity
cov_index_ampl = self.covariance.data[0, 1] * amplitude.unit
return reference * np.exp(cov_index_ampl / (amplitude * index_err ** 2))
class PowerLawNormSpectralModel(SpectralModel):
r"""Spectral power-law model with normalized amplitude parameter.
Parameters
----------
tilt : `~astropy.units.Quantity`
:math:`\Gamma`
norm : `~astropy.units.Quantity`
:math:`\phi_0`
reference : `~astropy.units.Quantity`
:math:`E_0`
See Also
--------
PowerLawSpectralModel, PowerLaw2SpectralModel
"""
tag = ["PowerLawNormSpectralModel", "pl-norm"]
norm = Parameter("norm", 1, unit="", interp="log")
tilt = Parameter("tilt", 0, frozen=True)
reference = Parameter("reference", "1 TeV", frozen=True)
@staticmethod
def evaluate(energy, tilt, norm, reference):
"""Evaluate the model (static function)."""
return norm * np.power((energy / reference), -tilt)
@staticmethod
def evaluate_integral(energy_min, energy_max, tilt, norm, reference):
"""Evaluate pwl integral."""
val = -1 * tilt + 1
prefactor = norm * reference / val
upper = np.power((energy_max / reference), val)
lower = np.power((energy_min / reference), val)
integral = prefactor * (upper - lower)
mask = np.isclose(val, 0)
if mask.any():
integral[mask] = (norm * reference * np.log(energy_max / energy_min))[mask]
return integral
@staticmethod
def evaluate_energy_flux(energy_min, energy_max, tilt, norm, reference):
"""Evaluate the energy flux (static function)"""
val = -1 * tilt + 2
prefactor = norm * reference ** 2 / val
upper = (energy_max / reference) ** val
lower = (energy_min / reference) ** val
energy_flux = prefactor * (upper - lower)
mask = np.isclose(val, 0)
if mask.any():
# see https://www.wolframalpha.com/input/?i=a+*+x+*+(x%2Fb)+%5E+(-2)
# for reference
energy_flux[mask] = (
norm * reference ** 2 * np.log(energy_max / energy_min)[mask]
)
return energy_flux
def inverse(self, value, *args):
"""Return energy for a given function value of the spectral model.
Parameters
----------
value : `~astropy.units.Quantity`
Function value of the spectral model.
"""
base = value / self.norm.quantity
return self.reference.quantity * np.power(base, -1.0 / self.tilt.value)
@property
def pivot_energy(self):
r"""The decorrelation energy is defined as:
.. math::
E_D = E_0 * \exp{cov(\phi_0, \Gamma) / (\phi_0 \Delta \Gamma^2)}
Formula (1) in https://arxiv.org/pdf/0910.4881.pdf
"""
tilt_err = self.tilt.error
reference = self.reference.quantity
norm = self.norm.quantity
cov_tilt_norm = self.covariance.data[0, 1] * norm.unit
return reference * np.exp(cov_tilt_norm / (norm * tilt_err ** 2))
class PowerLaw2SpectralModel(SpectralModel):
r"""Spectral power-law model with integral as amplitude parameter.
For more information see :ref:`powerlaw2-spectral-model`.
Parameters
----------
index : `~astropy.units.Quantity`
Spectral index :math:`\Gamma`
amplitude : `~astropy.units.Quantity`
Integral flux :math:`F_0`.
emin : `~astropy.units.Quantity`
Lower energy limit :math:`E_{0, min}`.
emax : `~astropy.units.Quantity`
Upper energy limit :math:`E_{0, max}`.
See Also
--------
PowerLawSpectralModel, PowerLawNormSpectralModel
"""
tag = ["PowerLaw2SpectralModel", "pl-2"]
amplitude = Parameter("amplitude", "1e-12 cm-2 s-1", scale_method="scale10", interp="log")
index = Parameter("index", 2)
emin = Parameter("emin", "0.1 TeV", frozen=True)
emax = Parameter("emax", "100 TeV", frozen=True)
@staticmethod
def evaluate(energy, amplitude, index, emin, emax):
"""Evaluate the model (static function)."""
top = -index + 1
# to get the energies dimensionless we use a modified formula
bottom = emax - emin * (emin / emax) ** (-index)
return amplitude * (top / bottom) * np.power(energy / emax, -index)
@staticmethod
def evaluate_integral(energy_min, energy_max, amplitude, index, emin, emax):
r"""Integrate power law analytically.
.. math::
F(E_{min}, E_{max}) = F_0 \cdot \frac{E_{max}^{\Gamma + 1} \
- E_{min}^{\Gamma + 1}}{E_{0, max}^{\Gamma + 1} \
- E_{0, min}^{\Gamma + 1}}
Parameters
----------
energy_min, energy_max : `~astropy.units.Quantity`
Lower and upper bound of integration range.
"""
temp1 = np.power(energy_max, -index.value + 1)
temp2 = np.power(energy_min, -index.value + 1)
top = temp1 - temp2
temp1 = np.power(emax, -index.value + 1)
temp2 = np.power(emin, -index.value + 1)
bottom = temp1 - temp2
return amplitude * top / bottom
def inverse(self, value, *args):
"""Return energy for a given function value of the spectral model.
Parameters
----------
value : `~astropy.units.Quantity`
Function value of the spectral model.
"""
amplitude = self.amplitude.quantity
index = self.index.value
energy_min = self.emin.quantity
energy_max = self.emax.quantity
# to get the energies dimensionless we use a modified formula
top = -index + 1
bottom = energy_max - energy_min * (energy_min / energy_max) ** (-index)
term = (bottom / top) * (value / amplitude)
return np.power(term.to_value(""), -1.0 / index) * energy_max
class BrokenPowerLawSpectralModel(SpectralModel):
r"""Spectral broken power-law model.
For more information see :ref:`broken-powerlaw-spectral-model`.
Parameters
----------
index1 : `~astropy.units.Quantity`
:math:`\Gamma1`
index2 : `~astropy.units.Quantity`
:math:`\Gamma2`
amplitude : `~astropy.units.Quantity`
:math:`\phi_0`
ebreak : `~astropy.units.Quantity`
:math:`E_{break}`
See Also
--------
SmoothBrokenPowerLawSpectralModel
"""
tag = ["BrokenPowerLawSpectralModel", "bpl"]
index1 = Parameter("index1", 2.0)
index2 = Parameter("index2", 2.0)
amplitude = Parameter("amplitude", "1e-12 cm-2 s-1 TeV-1", scale_method="scale10", interp="log")
ebreak = Parameter("ebreak", "1 TeV")
@staticmethod
def evaluate(energy, index1, index2, amplitude, ebreak):
"""Evaluate the model (static function)."""
energy = np.atleast_1d(energy)
cond = energy < ebreak
bpwl = amplitude * np.ones(energy.shape)
bpwl[cond] *= (energy[cond] / ebreak) ** (-index1)
bpwl[~cond] *= (energy[~cond] / ebreak) ** (-index2)
return bpwl
class SmoothBrokenPowerLawSpectralModel(SpectralModel):
r"""Spectral smooth broken power-law model.
For more information see :ref:`smooth-broken-powerlaw-spectral-model`.
Parameters
----------
index1 : `~astropy.units.Quantity`
:math:`\Gamma1`
index2 : `~astropy.units.Quantity`
:math:`\Gamma2`
amplitude : `~astropy.units.Quantity`
:math:`\phi_0`
reference : `~astropy.units.Quantity`
:math:`E_0`
ebreak : `~astropy.units.Quantity`
:math:`E_{break}`
beta : `~astropy.units.Quantity`
:math:`\beta`
See Also
--------
BrokenPowerLawSpectralModel
"""
tag = ["SmoothBrokenPowerLawSpectralModel", "sbpl"]
index1 = Parameter("index1", 2.0)
index2 = Parameter("index2", 2.0)
amplitude = Parameter("amplitude", "1e-12 cm-2 s-1 TeV-1", scale_method="scale10", interp="log")
ebreak = Parameter("ebreak", "1 TeV")
reference = Parameter("reference", "1 TeV", frozen=True)
beta = Parameter("beta", 1, frozen=True)
@staticmethod
def evaluate(energy, index1, index2, amplitude, ebreak, reference, beta):
"""Evaluate the model (static function)."""
beta *= np.sign(index2 - index1)
pwl = amplitude * (energy / reference) ** (-index1)
brk = (1 + (energy / ebreak) ** ((index2 - index1) / beta)) ** (-beta)
return pwl * brk
class PiecewiseNormSpectralModel(SpectralModel):
""" Piecewise spectral correction
with a free normalization at each fixed energy nodes.
For more information see :ref:`piecewise-norm-spectral`.
Parameters
----------
energy : `~astropy.units.Quantity`
Array of energies at which the model values are given (nodes).
norms : `~numpy.ndarray` or list of `Parameter`
Array with the initial norms of the model at energies ``energy``.
A normalisation parameters is created for each value.
Default is one at each node.
interp : str
Interpolation scaling in {"log", "lin"}. Default is "log"
"""
tag = ["PiecewiseNormSpectralModel", "piecewise-norm"]
def __init__(self, energy, norms=None, interp="log"):
self._energy = energy
self._interp = interp
if norms is None:
norms = np.ones(len(energy))
if len(norms) != len(energy):
raise ValueError("dimension mismatch")
if len(norms) < 2:
raise ValueError("Input arrays must contain at least 2 elements")
if not isinstance(norms[0], Parameter):
parameters = Parameters(
[Parameter(f"norm_{k}", norm) for k, norm in enumerate(norms)]
)
else:
parameters = Parameters(norms)
self.default_parameters = parameters
super().__init__()
@property
def energy(self):
"""Energy nodes"""
return self._energy
@property
def norms(self):
"""Norm values"""
return u.Quantity(self.parameters.value)
def evaluate(self, energy, **norms):
scale = interpolation_scale(scale=self._interp)
e_eval = scale(np.atleast_1d(energy.value))
e_nodes = scale(self.energy.to(energy.unit).value)
v_nodes = scale(self.norms)
log_interp = scale.inverse(np.interp(e_eval, e_nodes, v_nodes))
return log_interp
def to_dict(self, full_output=False):
data = super().to_dict(full_output=full_output)
data["energy"] = {
"data": self.energy.data.tolist(),
"unit": str(self.energy.unit),
}
return data
@classmethod
def from_dict(cls, data):
"""Create model from dict"""
energy = u.Quantity(data["energy"]["data"], data["energy"]["unit"])
parameters = Parameters.from_dict(data["parameters"])
return cls.from_parameters(parameters, energy=energy)
@classmethod
def from_parameters(cls, parameters, **kwargs):
"""Create model from parameters"""
return cls(norms=parameters, **kwargs)
class ExpCutoffPowerLawSpectralModel(SpectralModel):
r"""Spectral exponential cutoff power-law model.
For more information see :ref:`exp-cutoff-powerlaw-spectral-model`.
Parameters
----------
index : `~astropy.units.Quantity`
:math:`\Gamma`
amplitude : `~astropy.units.Quantity`
:math:`\phi_0`
reference : `~astropy.units.Quantity`
:math:`E_0`
lambda_ : `~astropy.units.Quantity`
:math:`\lambda`
alpha : `~astropy.units.Quantity`
:math:`\alpha`
See Also
--------
ExpCutoffPowerLawNormSpectralModel
"""
tag = ["ExpCutoffPowerLawSpectralModel", "ecpl"]
index = Parameter("index", 1.5)
amplitude = Parameter("amplitude", "1e-12 cm-2 s-1 TeV-1", scale_method="scale10", interp="log")
reference = Parameter("reference", "1 TeV", frozen=True)
lambda_ = Parameter("lambda_", "0.1 TeV-1")
alpha = Parameter("alpha", "1.0", frozen=True)
@staticmethod
def evaluate(energy, index, amplitude, reference, lambda_, alpha):
"""Evaluate the model (static function)."""
pwl = amplitude * (energy / reference) ** (-index)
cutoff = np.exp(-np.power(energy * lambda_, alpha))
return pwl * cutoff
@property
def e_peak(self):
r"""Spectral energy distribution peak energy (`~astropy.units.Quantity`).
This is the peak in E^2 x dN/dE and is given by:
.. math::
E_{Peak} = \left(\frac{2 - \Gamma}{\alpha}\right)^{1/\alpha} / \lambda
"""
reference = self.reference.quantity
index = self.index.quantity
lambda_ = self.lambda_.quantity
alpha = self.alpha.quantity
if index >= 2 or lambda_ == 0.0 or alpha == 0.0:
return np.nan * reference.unit
else:
return np.power((2 - index) / alpha, 1 / alpha) / lambda_
class ExpCutoffPowerLawNormSpectralModel(SpectralModel):
r"""Norm spectral exponential cutoff power-law model.
Parameters
----------
index : `~astropy.units.Quantity`
:math:`\Gamma`
norm : `~astropy.units.Quantity`
:math:`\phi_0`
reference : `~astropy.units.Quantity`
:math:`E_0`
lambda_ : `~astropy.units.Quantity`
:math:`\lambda`
alpha : `~astropy.units.Quantity`
:math:`\alpha`
See Also
--------
ExpCutoffPowerLawSpectralModel
"""
tag = ["ExpCutoffPowerLawNormSpectralModel", "ecpl-norm"]
index = Parameter("index", 1.5)
norm = Parameter("norm", 1, unit="", interp="log")
reference = Parameter("reference", "1 TeV", frozen=True)
lambda_ = Parameter("lambda_", "0.1 TeV-1")
alpha = Parameter("alpha", "1.0", frozen=True)
@staticmethod
def evaluate(energy, index, norm, reference, lambda_, alpha):
"""Evaluate the model (static function)."""
pwl = norm * (energy / reference) ** (-index)
cutoff = np.exp(-np.power(energy * lambda_, alpha))
return pwl * cutoff
class ExpCutoffPowerLaw3FGLSpectralModel(SpectralModel):
r"""Spectral exponential cutoff power-law model used for 3FGL.
For more information see :ref:`exp-cutoff-powerlaw-3fgl-spectral-model`.
Parameters
----------
index : `~astropy.units.Quantity`
:math:`\Gamma`
amplitude : `~astropy.units.Quantity`
:math:`\phi_0`
reference : `~astropy.units.Quantity`
:math:`E_0`
ecut : `~astropy.units.Quantity`
:math:`E_{C}`
"""
tag = ["ExpCutoffPowerLaw3FGLSpectralModel", "ecpl-3fgl"]
index = Parameter("index", 1.5)
amplitude = Parameter("amplitude", "1e-12 cm-2 s-1 TeV-1", scale_method="scale10", interp="log")
reference = Parameter("reference", "1 TeV", frozen=True)
ecut = Parameter("ecut", "10 TeV")
@staticmethod
def evaluate(energy, index, amplitude, reference, ecut):
"""Evaluate the model (static function)."""
pwl = amplitude * (energy / reference) ** (-index)
cutoff = np.exp((reference - energy) / ecut)
return pwl * cutoff
class SuperExpCutoffPowerLaw3FGLSpectralModel(SpectralModel):
r"""Spectral super exponential cutoff power-law model used for 3FGL.
For more information see :ref:`super-exp-cutoff-powerlaw-3fgl-spectral-model`.
.. math::
\phi(E) = \phi_0 \cdot \left(\frac{E}{E_0}\right)^{-\Gamma_1}
\exp \left( \left(\frac{E_0}{E_{C}} \right)^{\Gamma_2} -
\left(\frac{E}{E_{C}} \right)^{\Gamma_2}
\right)
Parameters
----------
index_1 : `~astropy.units.Quantity`
:math:`\Gamma_1`
index_2 : `~astropy.units.Quantity`
:math:`\Gamma_2`
amplitude : `~astropy.units.Quantity`
:math:`\phi_0`
reference : `~astropy.units.Quantity`
:math:`E_0`
ecut : `~astropy.units.Quantity`
:math:`E_{C}`
"""
tag = ["SuperExpCutoffPowerLaw3FGLSpectralModel", "secpl-3fgl"]
amplitude = Parameter("amplitude", "1e-12 cm-2 s-1 TeV-1", scale_method="scale10", interp="log")
reference = Parameter("reference", "1 TeV", frozen=True)
ecut = Parameter("ecut", "10 TeV")
index_1 = Parameter("index_1", 1.5)
index_2 = Parameter("index_2", 2)
@staticmethod
def evaluate(energy, amplitude, reference, ecut, index_1, index_2):
"""Evaluate the model (static function)."""
pwl = amplitude * (energy / reference) ** (-index_1)
cutoff = np.exp((reference / ecut) ** index_2 - (energy / ecut) ** index_2)
return pwl * cutoff
class SuperExpCutoffPowerLaw4FGLSpectralModel(SpectralModel):
r"""Spectral super exponential cutoff power-law model used for 4FGL.
For more information see :ref:`super-exp-cutoff-powerlaw-4fgl-spectral-model`.
Parameters
----------
index_1 : `~astropy.units.Quantity`
:math:`\Gamma_1`
index_2 : `~astropy.units.Quantity`
:math:`\Gamma_2`
amplitude : `~astropy.units.Quantity`
:math:`\phi_0`
reference : `~astropy.units.Quantity`
:math:`E_0`
expfactor : `~astropy.units.Quantity`
:math:`a`, given as dimensionless value but
internally assumes unit of :math:`[E_0]` power :math:`-\Gamma_2`
"""
tag = ["SuperExpCutoffPowerLaw4FGLSpectralModel", "secpl-4fgl"]
amplitude = Parameter("amplitude", "1e-12 cm-2 s-1 TeV-1", scale_method="scale10", interp="log")
reference = Parameter("reference", "1 TeV", frozen=True)
expfactor = Parameter("expfactor", "1e-2")
index_1 = Parameter("index_1", 1.5)
index_2 = Parameter("index_2", 2)
@staticmethod
def evaluate(energy, amplitude, reference, expfactor, index_1, index_2):
"""Evaluate the model (static function)."""
pwl = amplitude * (energy / reference) ** (-index_1)
cutoff = np.exp(
expfactor
/ reference.unit ** index_2
* (reference ** index_2 - energy ** index_2)
)
return pwl * cutoff
class LogParabolaSpectralModel(SpectralModel):
r"""Spectral log parabola model.
For more information see :ref:`logparabola-spectral-model`.
Parameters
----------
amplitude : `~astropy.units.Quantity`
:math:`\phi_0`
reference : `~astropy.units.Quantity`
:math:`E_0`
alpha : `~astropy.units.Quantity`
:math:`\alpha`
beta : `~astropy.units.Quantity`
:math:`\beta`
See Also
--------
LogParabolaNormSpectralModel
"""
tag = ["LogParabolaSpectralModel", "lp"]
amplitude = Parameter("amplitude", "1e-12 cm-2 s-1 TeV-1", scale_method="scale10", interp="log")
reference = Parameter("reference", "10 TeV", frozen=True)
alpha = Parameter("alpha", 2)
beta = Parameter("beta", 1)
@classmethod
def from_log10(cls, amplitude, reference, alpha, beta):
"""Construct from :math:`log_{10}` parametrization."""
beta_ = beta / np.log(10)
return cls(amplitude=amplitude, reference=reference, alpha=alpha, beta=beta_)
@staticmethod
def evaluate(energy, amplitude, reference, alpha, beta):
"""Evaluate the model (static function)."""
xx = energy / reference
exponent = -alpha - beta * np.log(xx)
return amplitude * np.power(xx, exponent)
@property
def e_peak(self):
r"""Spectral energy distribution peak energy (`~astropy.units.Quantity`).
This is the peak in E^2 x dN/dE and is given by:
.. math::
E_{Peak} = E_{0} \exp{ (2 - \alpha) / (2 * \beta)}
"""
reference = self.reference.quantity
alpha = self.alpha.quantity
beta = self.beta.quantity
return reference * np.exp((2 - alpha) / (2 * beta))
class LogParabolaNormSpectralModel(SpectralModel):
r"""Norm spectral log parabola model.
Parameters
----------
norm : `~astropy.units.Quantity`
:math:`\phi_0`
reference : `~astropy.units.Quantity`
:math:`E_0`
alpha : `~astropy.units.Quantity`
:math:`\alpha`
beta : `~astropy.units.Quantity`
:math:`\beta`
See Also
--------
LogParabolaSpectralModel
"""
tag = ["LogParabolaNormSpectralModel", "lp-norm"]
norm = Parameter("norm", 1, unit="", interp="log")
reference = Parameter("reference", "10 TeV", frozen=True)
alpha = Parameter("alpha", 2)
beta = Parameter("beta", 1)
@classmethod
def from_log10(cls, norm, reference, alpha, beta):
"""Construct from :math:`log_{10}` parametrization."""
beta_ = beta / np.log(10)
return cls(norm=norm, reference=reference, alpha=alpha, beta=beta_)
@staticmethod
def evaluate(energy, norm, reference, alpha, beta):
"""Evaluate the model (static function)."""
xx = energy / reference
exponent = -alpha - beta * np.log(xx)
return norm * np.power(xx, exponent)
class TemplateSpectralModel(SpectralModel):
"""A model generated from a table of energy and value arrays.
For more information see :ref:`template-spectral-model`.
Parameters
----------
energy : `~astropy.units.Quantity`
Array of energies at which the model values are given
values : array
Array with the values of the model at energies ``energy``.
interp_kwargs : dict
Interpolation keyword arguments passed to `scipy.interpolate.RegularGridInterpolator`.
By default all values outside the interpolation range are set to zero.
If you want to apply linear extrapolation you can pass `interp_kwargs={'fill_value':
'extrapolate', 'kind': 'linear'}`. If you want to choose the interpolation
scaling applied to values, you can use `interp_kwargs={"values_scale": "log"}`.
meta : dict, optional
Meta information, meta['filename'] will be used for serialization
"""
tag = ["TemplateSpectralModel", "template"]
def __init__(
self, energy, values, interp_kwargs=None, meta=None,
):
self.energy = energy
self.values = u.Quantity(values, copy=False)
self.meta = dict() if meta is None else meta
interp_kwargs = interp_kwargs or {}
interp_kwargs.setdefault("values_scale", "log")
interp_kwargs.setdefault("points_scale", ("log",))
self._evaluate = ScaledRegularGridInterpolator(
points=(energy,), values=values, **interp_kwargs
)
super().__init__()
@classmethod
def read_xspec_model(cls, filename, param, **kwargs):
"""Read XSPEC table model.
The input is a table containing absorbed values from a XSPEC model as a
function of energy.
TODO: Format of the file should be described and discussed in
https://gamma-astro-data-formats.readthedocs.io/en/latest/index.html
Parameters
----------
filename : str
File containing the XSPEC model
param : float
Model parameter value
Examples
--------
Fill table from an EBL model (Franceschini, 2008)
>>> from gammapy.modeling.models import TemplateSpectralModel
>>> filename = '$GAMMAPY_DATA/ebl/ebl_franceschini.fits.gz'
>>> table_model = TemplateSpectralModel.read_xspec_model(filename=filename, param=0.3)
"""
filename = make_path(filename)
# Check if parameter value is in range
table_param = Table.read(filename, hdu="PARAMETERS")
pmin = table_param["MINIMUM"]
pmax = table_param["MAXIMUM"]
if param < pmin or param > pmax:
raise ValueError(f"Out of range: param={param}, min={pmin}, max={pmax}")
# Get energy values
table_energy = Table.read(filename, hdu="ENERGIES")
energy_lo = table_energy["ENERG_LO"]
energy_hi = table_energy["ENERG_HI"]
# set energy to log-centers
energy = np.sqrt(energy_lo * energy_hi)
# Get spectrum values (no interpolation, take closest value for param)
table_spectra = Table.read(filename, hdu="SPECTRA")
idx = np.abs(table_spectra["PARAMVAL"] - param).argmin()
values = u.Quantity(table_spectra[idx][1], "", copy=False) # no dimension
kwargs.setdefault("interp_kwargs", {"values_scale": "lin"})
return cls(energy=energy, values=values, **kwargs)
def evaluate(self, energy):
"""Evaluate the model (static function)."""
return self._evaluate((energy,), clip=True)
def to_dict(self, full_output=False):
return {
"type": self.tag[0],
"energy": {
"data": self.energy.data.tolist(),
"unit": str(self.energy.unit),
},
"values": {
"data": self.values.data.tolist(),
"unit": str(self.values.unit),
},
}
@classmethod
def from_dict(cls, data):
energy = u.Quantity(data["energy"]["data"], data["energy"]["unit"])
values = u.Quantity(data["values"]["data"], data["values"]["unit"])
return cls(energy=energy, values=values)
@classmethod
def from_region_map(cls, map, **kwargs):
"""Create model from region map"""
energy = map.geom.axes["energy_true"].center
values = map.quantity[:, 0, 0]
return cls(energy=energy, values=values, **kwargs)
class ScaleSpectralModel(SpectralModel):
"""Wrapper to scale another spectral model by a norm factor.
Parameters
----------
model : `SpectralModel`
Spectral model to wrap.
norm : float
Multiplicative norm factor for the model value.
"""
tag = ["ScaleSpectralModel", "scale"]
norm = Parameter("norm", 1, unit="", interp="log")
def __init__(self, model, norm=norm.quantity):
self.model = model
self._covariance = None
super().__init__(norm=norm)
def evaluate(self, energy, norm):
return norm * self.model(energy)
def integral(self, energy_min, energy_max, **kwargs):
return self.norm.value * self.model.integral(energy_min, energy_max, **kwargs)
class EBLAbsorptionNormSpectralModel(SpectralModel):
r"""Gamma-ray absorption models.
For more information see :ref:`absorption-spectral-model`.
Parameters
----------
energy : `~astropy.units.Quantity`
Energy node values
param : `~astropy.units.Quantity`
Parameter node values
data : `~astropy.units.Quantity`
Model value
redshift : float
Redshift of the absorption model
alpha_norm: float
Norm of the EBL model
interp_kwargs : dict
Interpolation option passed to `ScaledRegularGridInterpolator`.
By default the models are extrapolated outside the range. To prevent
this and raise an error instead use interp_kwargs = {"extrapolate": False}
"""
tag = ["EBLAbsorptionNormSpectralModel", "ebl-norm"]
alpha_norm = Parameter("alpha_norm", 1.0, frozen=True)
redshift = Parameter("redshift", 0.1, frozen=True)
def __init__(self, energy, param, data, redshift, alpha_norm, interp_kwargs=None):
self.filename = None
# set values log centers
self.param = param
self.energy = energy
self.energy = energy
self.data = u.Quantity(data, copy=False)
interp_kwargs = interp_kwargs or {}
interp_kwargs.setdefault("points_scale", ("lin", "log"))
interp_kwargs.setdefault("values_scale", "log")
interp_kwargs.setdefault("extrapolate", True)
self._evaluate_table_model = ScaledRegularGridInterpolator(
points=(self.param, self.energy), values=self.data, **interp_kwargs
)
super().__init__(redshift=redshift, alpha_norm=alpha_norm)
def to_dict(self, full_output=False):
data = super().to_dict(full_output=full_output)
if self.filename is None:
data["energy"] = {
"data": self.energy.data.tolist(),
"unit": str(self.energy.unit),
}
data["param"] = {
"data": self.param.data.tolist(),
"unit": str(self.param.unit),
}
data["values"] = {
"data": self.data.data.tolist(),
"unit": str(self.data.unit),
}
else:
data["filename"] = str(self.filename)
return data
@classmethod
def from_dict(cls, data):
redshift = [p["value"] for p in data["parameters"] if p["name"] == "redshift"][
0
]
alpha_norm = [
p["value"] for p in data["parameters"] if p["name"] == "alpha_norm"
][0]
if "filename" in data:
return cls.read(data["filename"], redshift=redshift, alpha_norm=alpha_norm)
else:
energy = u.Quantity(data["energy"]["data"], data["energy"]["unit"])
param = u.Quantity(data["param"]["data"], data["param"]["unit"])
values = u.Quantity(data["values"]["data"], data["values"]["unit"])
return cls(
energy=energy,
param=param,
data=values,
redshift=redshift,
alpha_norm=alpha_norm,
)
@classmethod
def read(cls, filename, redshift=0.1, alpha_norm=1, interp_kwargs=None):
"""Build object from an XSPEC model.
Todo: Format of XSPEC binary files should be referenced at https://gamma-astro-data-formats.readthedocs.io/en/latest/
Parameters
----------
filename : str
File containing the model.
redshift : float
Redshift of the absorption model
alpha_norm: float
Norm of the EBL model
interp_kwargs : dict
Interpolation option passed to `ScaledRegularGridInterpolator`.
"""
# Create EBL data array
filename = make_path(filename)
table_param = Table.read(filename, hdu="PARAMETERS")
# TODO: for some reason the table contain duplicated values
param, idx = np.unique(table_param[0]["VALUE"], return_index=True)
# Get energy values
table_energy = Table.read(filename, hdu="ENERGIES")
energy_lo = u.Quantity(
table_energy["ENERG_LO"], "keV", copy=False
) # unit not stored in file
energy_hi = u.Quantity(
table_energy["ENERG_HI"], "keV", copy=False
) # unit not stored in file
energy = np.sqrt(energy_lo * energy_hi)
# Get spectrum values
table_spectra = Table.read(filename, hdu="SPECTRA")
data = table_spectra["INTPSPEC"].data[idx, :]
model = cls(
energy=energy,
param=param,
data=data,
redshift=redshift,
alpha_norm=alpha_norm,
interp_kwargs=interp_kwargs,
)
model.filename = filename
return model
@classmethod
def read_builtin(
cls, reference="dominguez", redshift=0.1, alpha_norm=1, interp_kwargs=None
):
"""Read from one of the built-in absorption models.
Parameters
----------
reference : {'franceschini', 'dominguez', 'finke'}
name of one of the available model in gammapy-data
redshift : float
Redshift of the absorption model
alpha_norm: float
Norm of the EBL model
References
----------
.. [1] Franceschini et al., "Extragalactic optical-infrared background radiation, its time evolution and the cosmic photon-photon opacity",
`Link <https://ui.adsabs.harvard.edu/abs/2008A%26A...487..837F>`__
.. [2] Dominguez et al., " Extragalactic background light inferred from AEGIS galaxy-SED-type fractions"
`Link <https://ui.adsabs.harvard.edu/abs/2011MNRAS.410.2556D>`__
.. [3] Finke et al., "Modeling the Extragalactic Background Light from Stars and Dust"
`Link <https://ui.adsabs.harvard.edu/abs/2010ApJ...712..238F>`__
"""
models = dict()
models["franceschini"] = "$GAMMAPY_DATA/ebl/ebl_franceschini.fits.gz"
models["dominguez"] = "$GAMMAPY_DATA/ebl/ebl_dominguez11.fits.gz"
models["finke"] = "$GAMMAPY_DATA/ebl/frd_abs.fits.gz"
return cls.read(
models[reference], redshift, alpha_norm, interp_kwargs=interp_kwargs
)
def evaluate(self, energy, redshift, alpha_norm):
"""Evaluate model for energy and parameter value."""
absorption = np.clip(self._evaluate_table_model((redshift, energy)), 0, 1)
return np.power(absorption, alpha_norm)
class NaimaSpectralModel(SpectralModel):
r"""A wrapper for Naima models.
For more information see :ref:`naima-spectral-model`.
Parameters
----------
radiative_model : `~naima.models.BaseRadiative`
An instance of a radiative model defined in `~naima.models`
distance : `~astropy.units.Quantity`, optional
Distance to the source. If set to 0, the intrinsic differential
luminosity will be returned. Default is 1 kpc
seed : str or list of str, optional
Seed photon field(s) to be considered for the `radiative_model` flux computation,
in case of a `~naima.models.InverseCompton` model. It can be a subset of the
`seed_photon_fields` list defining the `radiative_model`. Default is the whole list
of photon fields
nested_models : dict
Additionnal parameters for nested models not supplied by the radiative model,
for now this is used only for synchrotron self-compton model
"""
tag = ["NaimaSpectralModel", "naima"]
def __init__(
self, radiative_model, distance=1.0 * u.kpc, seed=None, nested_models=None
):
import naima
self.radiative_model = radiative_model
self._particle_distribution = self.radiative_model.particle_distribution
self.distance = u.Quantity(distance)
self.seed = seed
if nested_models is None:
nested_models = {}
self.nested_models = nested_models
if isinstance(self._particle_distribution, naima.models.TableModel):
param_names = ["amplitude"]
else:
param_names = self._particle_distribution.param_names
parameters = []
for name in param_names:
value = getattr(self._particle_distribution, name)
parameter = Parameter(name, value)
parameters.append(parameter)
# In case of a synchrotron radiative model, append B to the fittable parameters
if "B" in self.radiative_model.param_names:
value = getattr(self.radiative_model, "B")
parameter = Parameter("B", value)
parameters.append(parameter)
# In case of a synchrotron self compton model, append B and Rpwn to the fittable parameters
if (
isinstance(self.radiative_model, naima.models.InverseCompton)
and "SSC" in self.nested_models
):
B = self.nested_models["SSC"]["B"]
radius = self.nested_models["SSC"]["radius"]
parameters.append(Parameter("B", B))
parameters.append(Parameter("radius", radius, frozen=True))
for p in parameters:
p.scale_method = "scale10"
self.default_parameters = Parameters(parameters)
super().__init__()
def _evaluate_ssc(
self, energy,
):
"""
Compute photon density spectrum from synchrotron emission for synchrotron self-compton model,
assuming uniform synchrotron emissivity inside a sphere of radius R
(see Section 4.1 of Atoyan & Aharonian 1996)
based on :
"https://naima.readthedocs.io/en/latest/examples.html#crab-nebula-ssc-model"
"""
import naima
SYN = naima.models.Synchrotron(
self._particle_distribution,
B=self.B.quantity,
Eemax=self.radiative_model.Eemax,
Eemin=self.radiative_model.Eemin,
)
Esy = np.logspace(-7, 9, 100) * u.eV
Lsy = SYN.flux(Esy, distance=0 * u.cm) # use distance 0 to get luminosity
phn_sy = Lsy / (4 * np.pi * self.radius.quantity ** 2 * const.c) * 2.24
# The factor 2.24 comes from the assumption on uniform synchrotron
# emissivity inside a sphere
if "SSC" not in self.radiative_model.seed_photon_fields:
self.radiative_model.seed_photon_fields["SSC"] = {
"isotropic": True,
"type": "array",
"energy": Esy,
"photon_density": phn_sy,
}
else:
self.radiative_model.seed_photon_fields["SSC"]["photon_density"] = phn_sy
dnde = self.radiative_model.flux(
energy, seed=self.seed, distance=self.distance
) + SYN.flux(energy, distance=self.distance)
return dnde
def evaluate(self, energy, **kwargs):
"""Evaluate the model."""
import naima
for name, value in kwargs.items():
setattr(self._particle_distribution, name, value)
if "B" in self.radiative_model.param_names:
self.radiative_model.B = self.B.quantity
if (
isinstance(self.radiative_model, naima.models.InverseCompton)
and "SSC" in self.nested_models
):
dnde = self._evaluate_ssc(energy.flatten())
elif self.seed is not None:
dnde = self.radiative_model.flux(
energy.flatten(), seed=self.seed, distance=self.distance
)
else:
dnde = self.radiative_model.flux(energy.flatten(), distance=self.distance)
dnde = dnde.reshape(energy.shape)
unit = 1 / (energy.unit * u.cm ** 2 * u.s)
return dnde.to(unit)
def to_dict(self, full_output=True):
# for full_output to True otherwise broken
return super().to_dict(full_output=True)
@classmethod
def from_dict(cls, data):
raise NotImplementedError(
"Currently the NaimaSpectralModel cannot be read from YAML"
)
@classmethod
def from_parameters(cls, parameters, **kwargs):
raise NotImplementedError(
"Currently the NaimaSpectralModel cannot be built from a list of parameters."
)
class GaussianSpectralModel(SpectralModel):
r"""Gaussian spectral model.
For more information see :ref:`gaussian-spectral-model`.
Parameters
----------
norm : `~astropy.units.Quantity`
:math:`N_0`
mean : `~astropy.units.Quantity`
:math:`\bar{E}`
sigma : `~astropy.units.Quantity`
:math:`\sigma`
"""
tag = ["GaussianSpectralModel", "gauss"]
norm = Parameter("norm", 1e-12 * u.Unit("cm-2 s-1"), interp="log")
mean = Parameter("mean", 1 * u.TeV)
sigma = Parameter("sigma", 2 * u.TeV)
@staticmethod
def evaluate(energy, norm, mean, sigma):
return (
norm
/ (sigma * np.sqrt(2 * np.pi))
* np.exp(-((energy - mean) ** 2) / (2 * sigma ** 2))
)
def integral(self, energy_min, energy_max, **kwargs):
r"""Integrate Gaussian analytically.
.. math::
F(E_{min}, E_{max}) = \frac{N_0}{2} \left[ erf(\frac{E - \bar{E}}{\sqrt{2} \sigma})\right]_{E_{min}}^{E_{max}}
Parameters
----------
energy_min, energy_max : `~astropy.units.Quantity`
Lower and upper bound of integration range
"""
# kwargs are passed to this function but not used
# this is to get a consistent API with SpectralModel.integral()
u_min = (
(energy_min - self.mean.quantity) / (np.sqrt(2) * self.sigma.quantity)
).to_value("")
u_max = (
(energy_max - self.mean.quantity) / (np.sqrt(2) * self.sigma.quantity)
).to_value("")
return (
self.norm.quantity
/ 2
* (scipy.special.erf(u_max) - scipy.special.erf(u_min))
)
def energy_flux(self, energy_min, energy_max):
r"""Compute energy flux in given energy range analytically.
.. math::
G(E_{min}, E_{max}) = \frac{N_0 \sigma}{\sqrt{2*\pi}}* \left[ - \exp(\frac{E_{min}-\bar{E}}{\sqrt{2} \sigma})
\right]_{E_{min}}^{E_{max}} + \frac{N_0 * \bar{E}}{2} \left[ erf(\frac{E - \bar{E}}{\sqrt{2} \sigma})
\right]_{E_{min}}^{E_{max}}
Parameters
----------
energy_min, energy_max : `~astropy.units.Quantity`
Lower and upper bound of integration range.
"""
u_min = (
(energy_min - self.mean.quantity) / (np.sqrt(2) * self.sigma.quantity)
).to_value("")
u_max = (
(energy_max - self.mean.quantity) / (np.sqrt(2) * self.sigma.quantity)
).to_value("")
a = self.norm.quantity * self.sigma.quantity / np.sqrt(2 * np.pi)
b = self.norm.quantity * self.mean.quantity / 2
return a * (np.exp(-(u_min ** 2)) - np.exp(-(u_max ** 2))) + b * (
scipy.special.erf(u_max) - scipy.special.erf(u_min)
)
|
StarcoderdataPython
|
187895
|
<filename>application/api/dashboard.py
from flask_restful import Resource, reqparse
from application.common.common_exception import ResourceNotAvailableException
from application.common.constants import APIMessages
from application.common.response import (api_response, STATUS_OK)
from application.common.token import token_required
from application.model.models import Organization
class SideBarMenu(Resource):
"""
URL: /api/sidebar-menu
Returns the list of allowed modules for the given user for the given
Organisation
Actions:
GET:
- Returns the List of allowed permissions for the user in the given
Organization.
"""
@token_required
def get(self, session):
sidebar_menu_parser = reqparse.RequestParser()
sidebar_menu_parser.add_argument('org_id',
help=APIMessages.PARSER_MESSAGE.format(
'org_id'), required=True,
type=int, location='args')
sidebar_menu_args = sidebar_menu_parser.parse_args()
org_obj = Organization.query.filter_by(
org_id=sidebar_menu_args['org_id']).first()
if not org_obj:
raise ResourceNotAvailableException(
APIMessages.INVALID_ORG_ID)
sidebar_menu = dict()
sidebar_menu["org_name"] = org_obj.org_name
sidebar_menu["org_id"] = org_obj.org_id
sidebar_menu["allow_modules"] = ["user-mgt", "project-mgt",
"test-suite",
"list of all the module to load"]
return api_response(True, APIMessages.SUCCESS, STATUS_OK, sidebar_menu)
|
StarcoderdataPython
|
78122
|
<filename>enaml/qt/qt_label.py
#------------------------------------------------------------------------------
# Copyright (c) 2013, Nucleic Development Team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file COPYING.txt, distributed with this software.
#------------------------------------------------------------------------------
from PyQt4.QtCore import Qt
from PyQt4.QtGui import QLabel
from atom.api import Typed
from enaml.widgets.label import ProxyLabel
from .qt_constraints_widget import size_hint_guard
from .qt_control import QtControl
ALIGN_MAP = {
'left': Qt.AlignLeft,
'right': Qt.AlignRight,
'center': Qt.AlignHCenter,
'justify': Qt.AlignJustify,
}
VERTICAL_ALIGN_MAP = {
'top': Qt.AlignTop,
'bottom': Qt.AlignBottom,
'center': Qt.AlignVCenter,
}
class QtLabel(QtControl, ProxyLabel):
""" A Qt implementation of an Enaml ProxyLabel.
"""
#: A reference to the widget created by the proxy.
widget = Typed(QLabel)
#--------------------------------------------------------------------------
# Initialization API
#--------------------------------------------------------------------------
def create_widget(self):
""" Create the underlying label widget.
"""
self.widget = QLabel(self.parent_widget())
def init_widget(self):
""" Initialize the underlying widget.
"""
super(QtLabel, self).init_widget()
d = self.declaration
self.set_text(d.text, sh_guard=False)
self.set_align(d.align)
self.set_vertical_align(d.vertical_align)
#--------------------------------------------------------------------------
# ProxyLabel API
#--------------------------------------------------------------------------
def set_text(self, text, sh_guard=True):
""" Set the text in the widget.
"""
if sh_guard:
with size_hint_guard(self):
self.widget.setText(text)
else:
self.widget.setText(text)
def set_align(self, align):
""" Set the alignment of the text in the widget.
"""
widget = self.widget
alignment = widget.alignment()
alignment &= ~Qt.AlignHorizontal_Mask
alignment |= ALIGN_MAP[align]
widget.setAlignment(alignment)
def set_vertical_align(self, align):
""" Set the vertical alignment of the text in the widget.
"""
widget = self.widget
alignment = widget.alignment()
alignment &= ~Qt.AlignVertical_Mask
alignment |= VERTICAL_ALIGN_MAP[align]
widget.setAlignment(alignment)
|
StarcoderdataPython
|
3523222
|
from django.contrib.auth.models import User
from django.db import models
class UserProfile(models.Model):
user = models.OneToOneField(
User, null=True, blank=True, related_name='user_profile', on_delete=models.CASCADE)
def __str__(self):
return self.user.username if self.user else str(self.pk)
|
StarcoderdataPython
|
9677580
|
<reponame>ilindrey/whatyouknow<filename>apps/comments/urls.py
from django.urls import path, include
from .views import CommentListView, CreateCommentView, EditCommentView
urlpatterns = [
path('ajax/', include([
path('load_comment_list', CommentListView.as_view(), name='comment_list'),
path('create_comment', CreateCommentView.as_view(), name='comment_create'),
path('<int:pk>/', include([
path('edit_comment', EditCommentView.as_view(), name='comment_edit')
])),
])),
]
|
StarcoderdataPython
|
1726150
|
import re
from mongoframes.factory import blueprints
from mongoframes.factory import makers
from mongoframes.factory import quotas
from mongoframes.factory.makers import selections as selection_makers
from mongoframes.factory.makers import text as text_makers
from tests.fixtures import *
def test_maker():
"""
The base maker class should provide context for the current target document.
"""
document = {'foo': 'bar'}
maker = makers.Maker()
# Check the target for the maker is correctly set using the `target` context
# method.
with maker.target(document):
assert maker.document == document
# Once the maker falls out of context check the document has been unset
assert maker.document == None
def test_dict_of():
"""
`DictOf` makers should return a dictionary where each key's value is either
a JSON type value the output of a maker.
"""
maker = makers.DictOf({
'json_type': 'foo',
'maker': makers.Lambda(lambda doc: 'bar')
})
# Check the assembled result
assembled = maker._assemble()
assert assembled == {
'json_type': None,
'maker': 'bar'
}
# Check the finished result
finished = maker._finish(assembled)
assert finished == {
'json_type': 'foo',
'maker': 'bar'
}
def test_faker():
"""
`Faker` makers should call a faker library provider and return the output as
the value.
"""
am_pm = {'AM', 'PM'}
# Configured as assembler
maker = makers.Faker('am_pm')
# Check the assembled result
assembled = maker._assemble()
assert assembled in am_pm
# Check the finished result
finished = maker._finish(assembled)
assert finished in am_pm
# Configured as finisher
maker = makers.Faker('am_pm', assembler=False)
# Check the assembled result
assembled = maker._assemble()
assert assembled == None
# Check the finished result
finished = maker._finish(assembled)
assert finished in am_pm
# Configured with a different locale
maker = makers.Faker('postcode', locale='en_GB')
# Check the assembled result resembles a UK postcode
assembled = maker._assemble()
assert re.match('(\w+?\d{1,2}).*', assembled) and len(assembled) <= 8
def test_lambda():
"""
`Lambda` makers should return the output of the function you initialize them
with.
"""
# Configured as assembler
maker = makers.Lambda(lambda doc: 'foo')
# Check the assembled result
assembled = maker._assemble()
assert assembled == 'foo'
# Check the finished result
finished = maker._finish(assembled)
assert finished == 'foo'
# Configured as finisher
maker = makers.Lambda(lambda doc, v: 'bar', assembler=False, finisher=True)
# Check the assembled result
assembled = maker._assemble()
assert assembled == None
# Check the finished result
finished = maker._finish(assembled)
assert finished == 'bar'
# Configured as both an assembler and finisher
def func(doc, value=None):
if value:
return value + 'bar'
return 'foo'
maker = makers.Lambda(func, finisher=True)
# Check the assembled result
assembled = maker._assemble()
assert assembled == 'foo'
# Check the finished result
finished = maker._finish(assembled)
assert finished == 'foobar'
def test_list_of():
"""
`ListOf` makers should return a list of values generated by calling a maker
multiple times.
"""
# Configured to not reset sub-maker
maker = makers.ListOf(
selection_makers.Cycle(list('abcde')),
quotas.Quota(6)
)
# Check the assembled result
assembled = maker._assemble()
assert assembled == [[i, None] for i in [0, 1, 2, 3, 4, 0]]
# Check the finished result
finished = maker._finish(assembled)
assert finished == list('abcdea')
# Check that calling the maker again continues from where we left off
assembled = maker._assemble()
assert assembled == [[i, None] for i in [1, 2, 3, 4, 0, 1]]
# Configured to reset sub-maker
maker = makers.ListOf(
selection_makers.Cycle(list('abcde')),
quotas.Quota(6),
reset_maker=True
)
# Call the maker twice
assembled = maker._assemble()
assembled = maker._assemble()
# Check the result was reset after the first call
assert assembled == [[i, None] for i in [0, 1, 2, 3, 4, 0]]
def test_static():
"""`Static` makers should return the value you initialize them with"""
# Configured as assembler
value = {'foo': 'bar'}
maker = makers.Static(value)
# Check the assembled result
assembled = maker._assemble()
assert assembled == value
# Check the finished result
finished = maker._finish(assembled)
assert finished == value
# Configured as finisher
value = {'foo': 'bar'}
maker = makers.Static(value, assembler=False)
# Check the assembled result
assembled = maker._assemble()
assert assembled == None
# Check the finished result
finished = maker._finish(assembled)
assert finished == value
def test_sub_factory(mocker):
"""
`SubFactory` makers should return a sub-frame/document using a blueprint.
"""
# Define a blueprint
class InventoryBlueprint(blueprints.Blueprint):
_frame_cls = Inventory
gold = makers.Static(10)
skulls = makers.Static(100)
# Configure the maker
maker = makers.SubFactory(InventoryBlueprint)
# Check the assembled result
assembled = maker._assemble()
assert assembled == {'gold': 10, 'skulls': 100}
# Check the finished result
finished = maker._finish(assembled)
assert isinstance(finished, Inventory)
assert finished._document == {'gold': 10, 'skulls': 100}
# Reset should reset the sub factories associated blueprint
mocker.spy(InventoryBlueprint, 'reset')
maker.reset()
assert InventoryBlueprint.reset.call_count == 1
def test_unique():
"""
`Unique` makers guarentee a unique value is return from the maker they are
wrapped around.
"""
# Confifured as assembler
maker = makers.Unique(makers.Faker('name'))
# Generate 100 random names
names = set([])
for i in range(0, 20):
assembled = maker._assemble()
assert assembled not in names
names.add(assembled)
# Confifured as finisher
maker = makers.Unique(makers.Faker('name'), assembler=False)
# Generate 100 random names
names = set([])
for i in range(0, 20):
finished = maker._finish(maker._assemble())
assert finished not in names
names.add(finished)
# Check that unique will eventually fail if it cannot generate a unique
# response with a maker.
maker = makers.Unique(makers.Static('foo'))
failed = False
try:
for i in range(0, 100):
finished = maker._finish(maker._assemble())
except AssertionError:
failed = True
assert failed
# Check that we can include a set of initial exluded values
maker = makers.Unique(
text_makers.Sequence('test-{index}'),
exclude={'test-3'}
)
names = set([])
for i in range(0, 9):
assembled = maker._assemble()
names.add(assembled)
assert 'test-3' not in names
# Reset should clear the generate unique values from the maker and allow
# those values to be generated again.
maker = makers.Unique(makers.Static('foo'), assembler=False)
failed = False
try:
for i in range(0, 100):
finished = maker._finish(maker._assemble())
maker.reset()
except AssertionError:
failed = True
assert not failed
|
StarcoderdataPython
|
6682917
|
from typing import Optional
from .base import Base
class TzInfo(Base):
# Time zone in seconds from UTC
offset: int
# Name of the time zone
name: str
# Abbreviated name of the time zone
abbr: Optional[str]
# Daylight saving time
dst: Optional[bool]
class Info(Base):
# The latitude (in degrees).
lat: float
# The longitude (in degrees)
lon: float
# Information about the time zone
tzinfo: Optional[TzInfo]
# The normal pressure for the given coordinates (mm Hg)
def_pressure_mm: Optional[int]
# The normal pressure for the given coordinates (hPa)
def_pressure_pa: Optional[int]
# Locality page on Yandex.Weather (https://yandex.ru/pogoda/)
url: str
|
StarcoderdataPython
|
135206
|
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates. All Rights Reserved
import logging
from typing import Any, Callable, Dict, List, Optional
import torch.nn as nn
from pytext.contrib.pytext_lib.data.datasets.batchers import Batcher
from pytext.contrib.pytext_lib.data.datasets.pytext_dataset import PyTextDataset
from pytext.data.sources.data_source import SafeFileWrapper
from pytext.data.sources.tsv import TSV
logger = logging.getLogger(__name__)
class DeprecatedTsvDataset(PyTextDataset):
def __init__(
self,
path: str,
columns: List[Any] = None,
column_mapping: Optional[Dict[str, str]] = None,
delimiter: str = "\t",
batch_size: Optional[int] = None,
is_shuffle: bool = True,
transform: Optional[nn.Module] = None,
custom_batcher: Optional[Batcher] = None,
collate_fn: Optional[Callable] = None,
chunk_size: int = 1000,
is_cycle: bool = False,
length: Optional[int] = None,
rank: int = 0,
world_size: int = 1,
*args,
**kwargs,
):
logger.debug(f"init TsvDataset from: {path}")
columns = columns or ["text", "label"]
if column_mapping:
raise NotImplementedError("column mapping is not supported for tsv yet!")
self.file = SafeFileWrapper(path, encoding="utf-8", errors="replace")
tsv_iterator = TSV(self.file, field_names=columns, delimiter=delimiter)
super().__init__(
iterable=tsv_iterator,
batch_size=batch_size,
is_shuffle=is_shuffle,
transform=transform,
custom_batcher=custom_batcher,
collate_fn=collate_fn,
chunk_size=chunk_size,
is_cycle=is_cycle,
length=length,
rank=rank,
world_size=world_size,
)
|
StarcoderdataPython
|
8070877
|
<reponame>zhnlks/ShiPanE-Python-SDK
# -*- coding: utf-8 -*-
import codecs
import os
import unittest
import six
from six.moves import configparser
if six.PY2:
ConfigParser = configparser.RawConfigParser
else:
ConfigParser = configparser.ConfigParser
from shipane_sdk.joinquant.client import JoinQuantClient
class JoinQuantClientTest(unittest.TestCase):
def setUp(self):
config = ConfigParser()
dir_path = os.path.dirname(os.path.realpath(__file__))
config.readfp(codecs.open('{}/../../config/config.ini'.format(dir_path), encoding="utf_8_sig"))
self._jq_client = JoinQuantClient(**dict(config.items('JoinQuant')))
def test_query(self):
self._jq_client.login()
transactions = self._jq_client.query()
self.assertTrue(isinstance(transactions, list))
|
StarcoderdataPython
|
87548
|
<gh_stars>1-10
from typing import Dict
import numpy as np
from qulacs import QuantumCircuit
from qulacs.gate import CNOT, TOFFOLI, DenseMatrix, to_matrix_gate
def load_circuit_data() -> Dict[str, QuantumCircuit]:
circuits = {}
circuits["empty_circuit"] = empty_circuit()
circuits["x_gate_circuit"] = x_gate_circuit()
circuits["y_gate_circuit"] = y_gate_circuit()
circuits["z_gate_circuit"] = z_gate_circuit()
circuits["cz_gate_circuit"] = cz_gate_circuit()
circuits["cnot_gate_circuit"] = cnot_gate_circuit()
circuits["ctl_wire_should_not_overlap"] = ctl_wire_should_not_overlap()
circuits["swap_circuit"] = swap_circuit()
circuits[
"multiple_swap_gates_should_not_overlap"
] = multiple_swap_gates_should_not_overlap()
circuits["dense_matrix_gate_circuit"] = dense_matrix_gate_circuit()
circuits[
"dense_matrix_gate_with_target_bits"
] = dense_matrix_gate_with_target_bits()
circuits[
"dense_matrix_gate_with_separated_target_bits"
] = dense_matrix_gate_with_separated_target_bits()
circuits[
"dense_matrix_gate_should_not_overlap"
] = dense_matrix_gate_should_not_overlap()
circuits["toffoli_gate_circuit"] = toffoli_gate_circuit()
circuits["xyz_horizontal_circuit"] = xyz_horizontal_circuit()
circuits["xyz_vertical_circuit"] = xyz_vertical_circuit()
return circuits
def empty_circuit() -> QuantumCircuit:
return QuantumCircuit(3)
def x_gate_circuit() -> QuantumCircuit:
circuit = QuantumCircuit(1)
circuit.add_X_gate(0)
return circuit
def y_gate_circuit() -> QuantumCircuit:
circuit = QuantumCircuit(1)
circuit.add_Y_gate(0)
return circuit
def z_gate_circuit() -> QuantumCircuit:
circuit = QuantumCircuit(1)
circuit.add_Z_gate(0)
return circuit
def cz_gate_circuit() -> QuantumCircuit:
circuit = QuantumCircuit(2)
circuit.add_CZ_gate(0, 1)
circuit.add_CZ_gate(1, 0)
return circuit
def cnot_gate_circuit() -> QuantumCircuit:
circuit = QuantumCircuit(2)
circuit.add_CNOT_gate(0, 1)
circuit.add_CNOT_gate(1, 0)
return circuit
def ctl_wire_should_not_overlap() -> QuantumCircuit:
circuit = QuantumCircuit(3)
circuit.add_X_gate(1)
circuit.add_CZ_gate(0, 2)
circuit.add_X_gate(1)
circuit.add_CNOT_gate(0, 2)
circuit.add_X_gate(1)
return circuit
def swap_circuit() -> QuantumCircuit:
circuit = QuantumCircuit(2)
circuit.add_SWAP_gate(0, 1)
circuit.add_SWAP_gate(1, 0)
return circuit
def multiple_swap_gates_should_not_overlap() -> QuantumCircuit:
circuit = QuantumCircuit(4)
circuit.add_SWAP_gate(0, 2)
circuit.add_SWAP_gate(1, 3)
return circuit
def dense_matrix_gate_circuit() -> QuantumCircuit:
circuit = QuantumCircuit(2)
circuit.add_dense_matrix_gate(
[0, 1], [[1, 0, 0, 0], [0, 1, 0, 0], [0, 0, 0, 1], [0, 0, 1, 0]]
)
return circuit
def dense_matrix_gate_with_target_bits() -> QuantumCircuit:
circuit = QuantumCircuit(3)
# CCX0,1, 2
cx_gate = CNOT(1, 2)
cx_mat_gate = to_matrix_gate(cx_gate)
control_index = 0
control_with_value = 1
cx_mat_gate.add_control_qubit(control_index, control_with_value)
circuit.add_gate(cx_mat_gate)
return circuit
def dense_matrix_gate_with_separated_target_bits() -> QuantumCircuit:
circuit = QuantumCircuit(5)
mat = np.identity(2 ** 3)
# 3-qubit gate applied to [0,3,4], and [1] qubit is control-qubit
c_dense_gate = DenseMatrix([0, 3, 4], mat)
control_index = 1
control_with_value = 1
c_dense_gate.add_control_qubit(control_index, control_with_value)
circuit.add_gate(c_dense_gate)
return circuit
def dense_matrix_gate_should_not_overlap() -> QuantumCircuit:
circuit = QuantumCircuit(5)
mat = np.identity(2 ** 3)
# 3-qubit gate applied to [0,2,4]
circuit.add_dense_matrix_gate([0, 2, 4], mat)
# 2-qubit gate applied to [1,3]
mat = np.identity(2 ** 2)
circuit.add_dense_matrix_gate([1, 3], mat)
return circuit
def toffoli_gate_circuit() -> QuantumCircuit:
circuit = QuantumCircuit(3)
ccx = TOFFOLI(0, 1, 2)
circuit.add_gate(ccx)
ccx = TOFFOLI(1, 2, 0)
circuit.add_gate(ccx)
ccx = TOFFOLI(0, 2, 1)
circuit.add_gate(ccx)
return circuit
def xyz_horizontal_circuit() -> QuantumCircuit:
circuit = QuantumCircuit(1)
circuit.add_X_gate(0)
circuit.add_Y_gate(0)
circuit.add_Z_gate(0)
return circuit
def xyz_vertical_circuit() -> QuantumCircuit:
circuit = QuantumCircuit(3)
circuit.add_X_gate(0)
circuit.add_Y_gate(1)
circuit.add_Z_gate(2)
return circuit
|
StarcoderdataPython
|
288669
|
<gh_stars>100-1000
class Tape:
"""
Allows writing to end of a file-like object while maintaining the read pointer accurately.
The read operation actually removes characters read from the buffer.
"""
def __init__(self, initial_value:str=''):
"""
:param initial_value: initialize the Tape with a preset string
"""
self._buffer = initial_value
def read(self, size:int=None):
"""
:param size: number of characters to read from the buffer
:return: string that has been read from the buffer
"""
if size:
result = self._buffer[0:size]
self._buffer = self._buffer[size:]
return result
else:
result = self._buffer
self._buffer = ''
return result
def write(self, s:str):
"""
:param s: some characters to write to the end of the tape
:return: length of characters written
"""
self._buffer += s
return len(s)
def __len__(self):
return len(self._buffer)
def __str__(self):
return self._buffer
|
StarcoderdataPython
|
3273137
|
<reponame>YoungxHelsinki/GoldenRatio
import csv_lab
csv_path = '/Users/young/datahackathon/vuokraovi_retrieve/no_decimal.csv'
csv_list = csv_lab.csv_to_list(csv_path)
pos = 5
columns = ['image']
new_path = 'no_decimal_imgage.csv'
csv_lab.insert_column(csv_list, columns, 5, new_path)
|
StarcoderdataPython
|
396027
|
<reponame>daobook/myst-parser
"""Uses sphinx's pytest fixture to run builds.
see conftest.py for fixture usage
NOTE: sphinx 3 & 4 regress against different output files,
the major difference being sphinx 4 uses docutils 0.17,
which uses semantic HTML tags
(e.g. converting `<div class="section">` to `<section>`)
"""
import os
import re
import pytest
import sphinx
from docutils import VersionInfo, __version_info__
SOURCE_DIR = os.path.abspath(os.path.join(os.path.dirname(__file__), "sourcedirs"))
@pytest.mark.sphinx(
buildername="html", srcdir=os.path.join(SOURCE_DIR, "basic"), freshenv=True
)
def test_basic(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""basic test."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
get_sphinx_app_doctree(
app,
docname="content",
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
)
get_sphinx_app_doctree(
app,
docname="content",
resolve=True,
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
)
get_sphinx_app_output(
app,
filename="content.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
assert app.env.metadata["content"] == {
"author": "<NAME>",
"authors": ["<NAME>", "<NAME>"],
"organization": "EPFL",
"address": "1 Cedar Park Close\nThundersley\nEssex\n",
"contact": "https://example.com",
"version": "1.0",
"revision": "1.1",
"status": "good",
"date": "2/12/1985",
"copyright": "MIT",
"other": "Something else",
"wordcount": {"minutes": 0, "words": 57},
}
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "references"),
freshenv=True,
)
def test_references(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test reference resolution."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(app, docname="index", regress=True)
finally:
get_sphinx_app_doctree(app, docname="index", resolve=True, regress=True)
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="singlehtml",
srcdir=os.path.join(SOURCE_DIR, "references_singlehtml"),
freshenv=True,
confoverrides={"nitpicky": True},
)
def test_references_singlehtml(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test reference resolution for singlehtml builds."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
# try:
# get_sphinx_app_doctree(app, docname="index", regress=True)
# finally:
# get_sphinx_app_doctree(app, docname="index", resolve=True, regress=True)
try:
get_sphinx_app_doctree(
app,
docname="other/other",
regress=True,
replace={"other\\other.md": "other/other.md"},
)
finally:
get_sphinx_app_doctree(
app,
docname="other/other",
resolve=True,
regress=True,
replace={"other\\other.md": "other/other.md"},
)
get_sphinx_app_output(
app,
filename="index.html",
buildername="singlehtml",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "heading_slug_func"),
freshenv=True,
)
def test_heading_slug_func(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test heading_slug_func configuration."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(app, docname="index", regress=True)
finally:
get_sphinx_app_doctree(app, docname="index", resolve=True, regress=True)
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "extended_syntaxes"),
freshenv=True,
)
def test_extended_syntaxes(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
monkeypatch,
):
"""test setting addition configuration values."""
from myst_parser.sphinx_renderer import SphinxRenderer
monkeypatch.setattr(SphinxRenderer, "_random_label", lambda self: "mock-uuid")
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(
app,
docname="index",
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
)
finally:
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="html", srcdir=os.path.join(SOURCE_DIR, "includes"), freshenv=True
)
def test_includes(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test of include directive."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(
app,
docname="index",
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
# fix for Windows CI
replace={
r"subfolder\example2.jpg": "subfolder/example2.jpg",
r"subfolder\\example2.jpg": "subfolder/example2.jpg",
r"subfolder\\\\example2.jpg": "subfolder/example2.jpg",
},
)
finally:
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
replace={
r"'subfolder\\example2'": "'subfolder/example2'",
r'uri="subfolder\\example2"': 'uri="subfolder/example2"',
"_images/example21.jpg": "_images/example2.jpg",
},
)
@pytest.mark.skipif(
__version_info__ < VersionInfo(0, 17, 0, "final", 0, True),
reason="parser option added in docutils 0.17",
)
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "include_from_rst"),
freshenv=True,
)
def test_include_from_rst(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test of include directive inside RST file."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
get_sphinx_app_doctree(
app,
docname="index",
regress=True,
regress_ext=".xml",
)
@pytest.mark.sphinx(
buildername="html", srcdir=os.path.join(SOURCE_DIR, "footnotes"), freshenv=True
)
def test_footnotes(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test of include directive."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(app, docname="footnote_md", regress=True)
finally:
get_sphinx_app_output(
app,
filename="footnote_md.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "commonmark_only"),
freshenv=True,
)
def test_commonmark_only(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""test setting addition configuration values."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert "lexer name '{note}'" in warnings
try:
get_sphinx_app_doctree(app, docname="index", regress=True)
finally:
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "substitutions"),
freshenv=True,
)
def test_substitutions(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
file_regression,
):
"""test setting addition configuration values."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(app, docname="index", regress=True)
file_regression.check(
get_sphinx_app_doctree(app, docname="other").pformat(),
extension=".other.xml",
)
finally:
get_sphinx_app_output(app, filename="index.html", regress_html=True)
@pytest.mark.sphinx(
buildername="gettext", srcdir=os.path.join(SOURCE_DIR, "gettext"), freshenv=True
)
def test_gettext(
app,
status,
warning,
get_sphinx_app_output,
remove_sphinx_builds,
file_regression,
):
"""Test gettext message extraction."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
output = get_sphinx_app_output(app, filename="index.pot", buildername="gettext")
output = re.sub(r"POT-Creation-Date: [0-9: +-]+", "POT-Creation-Date: ", output)
output = re.sub(r"Copyright \(C\) [0-9]{4}", "Copyright (C) XXXX", output)
file_regression.check(output, extension=f".sphinx{sphinx.version_info[0]}.pot")
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "gettext"),
freshenv=True,
confoverrides={"language": "fr", "gettext_compact": False, "locale_dirs": ["."]},
)
def test_gettext_html(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""Test gettext message extraction."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(
app,
docname="index",
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
)
finally:
get_sphinx_app_doctree(
app,
docname="index",
resolve=True,
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
)
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
@pytest.mark.sphinx(
buildername="gettext",
srcdir=os.path.join(SOURCE_DIR, "gettext"),
freshenv=True,
confoverrides={
"gettext_additional_targets": [
"index",
"literal-block",
"doctest-block",
"raw",
"image",
],
},
)
def test_gettext_additional_targets(
app,
status,
warning,
get_sphinx_app_output,
remove_sphinx_builds,
file_regression,
):
"""Test gettext message extraction."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
output = get_sphinx_app_output(app, filename="index.pot", buildername="gettext")
output = re.sub(r"POT-Creation-Date: [0-9: +-]+", "POT-Creation-Date: ", output)
output = re.sub(r"Copyright \(C\) [0-9]{4}", "Copyright (C) XXXX", output)
file_regression.check(output, extension=f".sphinx{sphinx.version_info[0]}.pot")
@pytest.mark.sphinx(
buildername="html", srcdir=os.path.join(SOURCE_DIR, "mathjax"), freshenv=True
)
def test_mathjax_warning(
app,
status,
warning,
remove_sphinx_builds,
):
"""Test mathjax config override warning."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert (
"overridden by myst-parser: 'other' -> 'tex2jax_process|mathjax_process|math|output_area'"
in warnings
)
@pytest.mark.sphinx(
buildername="html",
srcdir=os.path.join(SOURCE_DIR, "fieldlist"),
freshenv=True,
)
def test_fieldlist_extension(
app,
status,
warning,
get_sphinx_app_doctree,
get_sphinx_app_output,
remove_sphinx_builds,
):
"""test setting addition configuration values."""
app.build()
assert "build succeeded" in status.getvalue() # Build succeeded
warnings = warning.getvalue().strip()
assert warnings == ""
try:
get_sphinx_app_doctree(
app,
docname="index",
regress=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.xml",
)
finally:
get_sphinx_app_output(
app,
filename="index.html",
regress_html=True,
regress_ext=f".sphinx{sphinx.version_info[0]}.html",
)
|
StarcoderdataPython
|
3543717
|
import warnings
from dataclasses import dataclass
from pathlib import Path
from typing import List, Union, Dict
from unittest import main, TestCase
from openmaptiles.sql import collect_sql, sql_assert_table, sql_assert_func
from openmaptiles.tileset import ParsedData, Tileset
@dataclass
class Case:
id: str
query: str
reqs: Union[str, List[str], Dict[str, Union[str, List[str]]]] = None
def expected_sql(case: Case):
result = f"DO $$ BEGIN RAISE NOTICE 'Processing layer {case.id}'; END$$;\n\n"
if isinstance(case.reqs, dict):
# Use helper functions for SQL generation. Actual SQL is tested by integration tests
for table in case.reqs.get('tables', []):
result += sql_assert_table(table, case.reqs.get('helpText'), case.id)
for func in case.reqs.get('functions', []):
result += sql_assert_func(func, case.reqs.get('helpText'), case.id)
result += f"""\
-- Layer {case.id} - {case.id}_s.yaml
{case.query}
DO $$ BEGIN RAISE NOTICE 'Finished layer {case.id}'; END$$;
"""
return result
def parsed_data(layers: Union[Case, List[Case]]):
return ParsedData(dict(
tileset=(dict(
attribution='test_attribution',
bounds='test_bounds',
center='test_center',
defaults=dict(srs='test_srs', datasource=dict(srid='test_datasource')),
id='id1',
layers=[
ParsedData(dict(
layer=dict(
buffer_size='test_buffer_size',
datasource=dict(query='test_query'),
id=v.id,
fields={},
requires=[v.reqs] if isinstance(v.reqs, str) else v.reqs or []
),
schema=[ParsedData(v.query, Path(v.id + '_s.yaml'))] if v.query else [],
), Path(f'./{v.id}.yaml')) for v in ([layers] if isinstance(layers, Case) else layers)
],
maxzoom='test_maxzoom',
minzoom='test_minzoom',
name='test_name',
pixel_scale='test_pixel_scale',
version='test_version',
))), Path('./tileset.yaml'))
class SqlTestCase(TestCase):
def _test(self, name, layers: List[Case],
expect: Dict[str, Union[Case, List[Case]]]):
expected_first = """\
-- This SQL code should be executed first
CREATE OR REPLACE FUNCTION slice_language_tags(tags hstore)
RETURNS hstore AS $$
SELECT delete_empty_keys(slice(tags, ARRAY['int_name', 'loc_name', 'name', 'wikidata', 'wikipedia']))
$$ LANGUAGE SQL IMMUTABLE;
"""
expected_last = '-- This SQL code should be executed last\n'
ts = parsed_data(layers)
result = {
k: '\n'.join(
[expected_sql(vv) for vv in ([v] if isinstance(v, Case) else v)]
) for k, v in expect.items()}
# Show entire diff in case assert fails
self.maxDiff = None
self.assertEqual(expected_first + '\n' + '\n'.join(result.values()) + '\n' + expected_last,
collect_sql(ts, parallel=False),
msg=f'{name} - single file')
self.assertEqual((expected_first, result, expected_last),
collect_sql(ts, parallel=True),
msg=f'{name} - parallel')
def test_require(self):
c1 = Case('c1', 'SELECT 1;')
c2 = Case('c2', 'SELECT 2;')
c3r2 = Case('c3', 'SELECT 3;', reqs='c2')
c4r12 = Case('c4', 'SELECT 4;', reqs=['c1', 'c2'])
c5r3 = Case('c5', 'SELECT 5;', reqs='c3')
c6r4 = Case('c6', 'SELECT 6;', reqs='c4')
c7r2 = Case('c7', 'SELECT 7;', reqs=dict(layers='c2'))
c8r12 = Case('c8', 'SELECT 8;', reqs=dict(layers=['c1', 'c2']))
c9 = Case('c9', 'SELECT 9;', reqs=dict(tables=['tbl1']))
c10 = Case('c10', 'SELECT 10;', reqs=dict(tables=['tbl1', 'tbl2']))
c11 = Case('c11', 'SELECT 11;', reqs=dict(functions=['fnc1']))
c12 = Case('c12', 'SELECT 12;', reqs=dict(functions=['fnc1', 'fnc2']))
c13 = Case('c13', 'SELECT 13;', reqs=dict(functions=['fnc1', 'fnc2'],
helpText="Custom 'ERROR MESSAGE' for missing function - single quote"))
c14 = Case('c14', 'SELECT 14;',
reqs=dict(tables=['tbl1'], helpText='Custom "ERROR MESSAGE" for missing table - double quote'))
self._test('a18', [c12], dict(c12=[c12]))
self._test('a01', [], {})
self._test('a02', [c1], dict(c1=c1))
self._test('a03', [c1, c2], dict(c1=c1, c2=c2))
self._test('a04', [c1, c2], dict(c1=c1, c2=c2))
self._test('a05', [c2, c3r2], dict(c2__c3=[c2, c3r2]))
self._test('a06', [c3r2, c2], dict(c2__c3=[c2, c3r2]))
self._test('a07', [c1, c3r2, c2], dict(c1=c1, c2__c3=[c2, c3r2]))
self._test('a08', [c1, c2, c4r12], dict(c1__c2__c4=[c1, c2, c4r12]))
self._test('a09', [c2, c3r2, c5r3], dict(c2__c3__c5=[c2, c3r2, c5r3]))
self._test('a10', [c5r3, c3r2, c2], dict(c2__c3__c5=[c2, c3r2, c5r3]))
self._test('a11', [c1, c2, c4r12, c6r4],
dict(c1__c2__c4__c6=[c1, c2, c4r12, c6r4]))
self._test('a12', [c4r12, c3r2, c1, c2],
dict(c1__c2__c4__c3=[c1, c2, c4r12, c3r2]))
self._test('a13', [c2, c7r2], dict(c2__c7=[c2, c7r2]))
self._test('a14', [c1, c2, c8r12], dict(c1__c2__c8=[c1, c2, c8r12]))
self._test('a15', [c9], dict(c9=[c9]))
self._test('a16', [c10], dict(c10=[c10]))
self._test('a17', [c11], dict(c11=[c11]))
self._test('a18', [c12], dict(c12=[c12]))
self._test('a19', [c13], dict(c13=[c13]))
self._test('a20', [c14], dict(c14=[c14]))
def _ts_parse(self, reqs, expected_layers, expected_tables, expected_funcs, extra_cases=None):
cases = [] if not extra_cases else list(extra_cases)
cases.append(Case('my_id', 'my_query;', reqs=reqs))
ts = Tileset(parsed_data(cases))
self.assertEqual(ts.attribution, 'test_attribution')
self.assertEqual(ts.bounds, 'test_bounds')
self.assertEqual(ts.center, 'test_center')
self.assertEqual(ts.defaults, dict(srs='test_srs', datasource=dict(srid='test_datasource')))
self.assertEqual(ts.id, 'id1')
self.assertEqual(ts.maxzoom, 'test_maxzoom')
self.assertEqual(ts.minzoom, 'test_minzoom')
self.assertEqual(ts.name, 'test_name')
self.assertEqual(ts.pixel_scale, 'test_pixel_scale')
self.assertEqual(ts.version, 'test_version')
self.assertEqual(len(ts.layers), len(cases))
layer = ts.layers_by_id['my_id']
self.assertEqual(layer.id, 'my_id')
self.assertEqual(layer.requires_layers, expected_layers)
self.assertEqual(layer.requires_tables, expected_tables)
self.assertEqual(layer.requires_functions, expected_funcs)
# This test can be deleted once we remove the deprecated property in some future version
with warnings.catch_warnings():
warnings.filterwarnings('ignore', category=DeprecationWarning)
self.assertEqual(layer.requires, expected_layers)
def test_ts_parse(self):
extra = [Case('c1', 'SELECT 1;')]
self._ts_parse(None, [], [], [])
self._ts_parse([], [], [], [])
self._ts_parse({}, [], [], [])
self._ts_parse('c1', ['c1'], [], [], extra)
self._ts_parse(['c1'], ['c1'], [], [], extra)
self._ts_parse(dict(layers='c1'), ['c1'], [], [], extra)
self._ts_parse(dict(layers=['c1']), ['c1'], [], [], extra)
self._ts_parse(dict(tables='a'), [], ['a'], [])
self._ts_parse(dict(tables=['a', 'b']), [], ['a', 'b'], [])
self._ts_parse(dict(functions='x'), [], [], ['x'])
self._ts_parse(dict(functions=['x', 'y']), [], [], ['x', 'y'])
self._ts_parse(dict(layers=['c1'], tables=['a', 'b'], functions=['x', 'y']),
['c1'], ['a', 'b'], ['x', 'y'], extra)
if __name__ == '__main__':
main()
|
StarcoderdataPython
|
1957278
|
<reponame>qianchilang/learning-pytest
def test_option1(pytestconfig):
print('host: %s' % pytestconfig.getoption('host'))
print('port: %s' % pytestconfig.getoption('port'))
def test_option2(config):
print('host: %s' % config.getoption('host'))
print('port: %s' % config.getoption('port'))
|
StarcoderdataPython
|
5019614
|
<reponame>nickgaya/acsearch<gh_stars>0
"""
Implementation of the Aho-Corasick string search algorithm.
See https://en.wikipedia.org/wiki/Aho-Corasick_algorithm
"""
from collections import deque
from functools import wraps
def cached_property(method):
""" Decorator to create a lazy property that is cached on first access """
attr = '_'+method.__name__
@property
@wraps(method)
def func(self):
try:
return getattr(self, attr)
except AttributeError:
value = method(self)
setattr(self, attr, value)
return value
return func
class ACDictionary:
"""
Dictionary of words that builds an Aho-Corasick tree to support efficient
string search.
"""
def __init__(self, words):
"""
Initialize the dictionary with the given words. The dictionary does not
support modification once initialized.
"""
self._root = ACRootNode()
self._len = sum(1 for word in words if self._add_word(word))
def _add_word(self, word):
node = self._root
for char in word:
node = node.add_child(char)
if node:
was_word = node.is_word
node.is_word = True
return not was_word
else:
raise ValueError("Empty word: {!r}".format(word))
def findall(self, text):
"""
Return a list of dictionary words contained in the given text. Each
word will appear once per occurrence in the text.
"""
return [str(wnode) for end, wnode in self._find(text)]
def finditer(self, text):
"""
Yield a sequence of ACMatch objects for each dictionary word contained
in the given text.
"""
for end, wnode in self._find(text):
yield ACMatch(text, end-len(wnode), end)
def _find(self, text):
node = self._root
for end, c in enumerate(text, 1):
node = node.get_next(c)
for wnode in node.get_words():
yield (end, wnode)
def _nodes(self, bf=False, sort=False):
nodes = deque()
nodes.append(self._root)
while nodes:
node = nodes.popleft() if bf else nodes.pop()
yield node
nodes.extend((v for k, v in sorted(node.children.items(),
reverse=not bf))
if sort else node.children.values())
def __iter__(self):
""" Yield the words in the dictionary in sorted order """
for node in self._nodes(sort=True):
if node.is_word:
yield str(node)
def __len__(self):
return self._len
class ACMatch:
""" Object representing a matching substring of a given query """
def __init__(self, string, start, end):
self.string = string
self.start = start
self.end = end
def __str__(self):
return self.string[self.start:self.end]
class ACNode:
""" Aho-Corasick tree node """
def __init__(self, char, parent, is_word=False):
self.char = char
self.parent = parent
self.is_word = is_word
self.children = {}
self.len = parent.len + 1
def add_child(self, char, is_word=False):
"""
Add a child node containing the given char with the given word status.
If a child node already exists for the given char, its word status will
be or-ed with the given word_status.
Returns the child node.
"""
child = self.children.get(char)
if child:
# Update word status of child
child.is_word = child.is_word or is_word
else:
# Create new child
child = ACNode(char, self, is_word)
self.children[char] = child
return child
# Note: should not be accessed until the tree is fully built.
@cached_property
def suffix(self):
"""
Return the node corresponding to the longest dictionary prefix that is
a proper suffix of the current node.
"""
return (self.parent.suffix.get_next(self.char)
if self.parent else self.parent)
# Note: should not be accessed until the tree is fully built.
@cached_property
def dict_suffix(self):
"""
Return the node corresponding to the longest dictionary word that is a
proper suffix of the current node, or None if no such node exists.
"""
if self.suffix.is_word:
return self.suffix
else:
return self.suffix.dict_suffix
def get_next(self, char):
"""
Return the node corresponding to the longest dictionary prefix that is
a suffix of the given char appended to the current node.
"""
if char in self.children:
return self.children[char]
else:
return self.suffix.get_next(char)
def get_words(self):
"""
Yield all nodes corresponding to dictionary words that are suffixes of
the current node.
"""
node = self
while node:
if node.is_word:
yield node
node = node.dict_suffix
def get_chars(self):
"""
Return a list of characters for the given node.
"""
chars = self.parent.get_chars()
chars.append(self.char)
return chars
def __str__(self):
return ''.join(self.get_chars())
def __len__(self):
return self.len
class ACRootNode(ACNode):
""" Aho-Corasick root node """
def __init__(self):
self.char = None
self.parent = self
self.is_word = False
self.children = {}
self.len = 0
self._dict_suffix = None
def get_next(self, char):
if char in self.children:
return self.children[char]
else:
return self
def get_chars(self):
return []
|
StarcoderdataPython
|
11245901
|
# -*- coding: utf-8 -*-
# vim: tabstop=4 shiftwidth=4 softtabstop=4
#
# Copyright (C) 2015-2018 GEM Foundation
#
# OpenQuake is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License as published
# by the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# OpenQuake is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with OpenQuake. If not, see <http://www.gnu.org/licenses/>.
import numpy
from numpy.testing import assert_almost_equal as aae
from nose.plugins.attrib import attr
from openquake.qa_tests_data.scenario import (
case_1, case_2, case_3, case_4, case_5, case_6, case_7, case_8, case_9)
from openquake.baselib.node import floatformat
from openquake.calculators.export import export
from openquake.calculators.tests import CalculatorTestCase
def count_close(gmf_value, gmvs_site_one, gmvs_site_two, delta=0.1):
"""
Count the number of pairs of gmf values
within the specified range.
See https://bugs.launchpad.net/openquake/+bug/1097646
attached Scenario Hazard script.
"""
lower_bound = gmf_value - delta / 2.
upper_bound = gmf_value + delta / 2.
return sum((lower_bound <= v1 <= upper_bound) and
(lower_bound <= v2 <= upper_bound)
for v1, v2 in zip(gmvs_site_one, gmvs_site_two))
class ScenarioTestCase(CalculatorTestCase):
def frequencies(self, case, fst_value, snd_value):
[gmfa] = self.execute(case.__file__, 'job.ini').values()
gmvs0 = gmfa[0, :, 0]
gmvs1 = gmfa[1, :, 0]
realizations = float(self.calc.oqparam.number_of_ground_motion_fields)
gmvs_within_range_fst = count_close(fst_value, gmvs0, gmvs1)
gmvs_within_range_snd = count_close(snd_value, gmvs0, gmvs1)
return (gmvs_within_range_fst / realizations,
gmvs_within_range_snd / realizations)
def medians(self, case):
[gmfa] = self.execute(case.__file__, 'job.ini').values()
median = {imt: [] for imt in self.calc.oqparam.imtls}
for imti, imt in enumerate(self.calc.oqparam.imtls):
for sid in self.calc.sitecol.sids:
gmvs = gmfa[sid, :, imti]
median[imt].append(numpy.median(gmvs))
return median
@attr('qa', 'hazard', 'scenario')
def test_case_1(self):
with floatformat('%5.1E'):
out = self.run_calc(case_1.__file__, 'job.ini', exports='xml')
self.assertEqualFiles('expected.xml', out['gmf_data', 'xml'][0])
@attr('qa', 'hazard', 'scenario')
def test_case_1bis(self):
# 2 out of 3 sites were filtered out
out = self.run_calc(case_1.__file__, 'job.ini',
maximum_distance='5.0', exports='csv')
self.assertEqualFiles(
'BooreAtkinson2008_gmf.csv', out['gmf_data', 'csv'][0])
@attr('qa', 'hazard', 'scenario')
def test_case_2(self):
medians = self.medians(case_2)['PGA']
aae(medians, [0.37412136, 0.19021782, 0.1365383], decimal=2)
@attr('qa', 'hazard', 'scenario')
def test_case_3(self):
medians_dict = self.medians(case_3)
medians_pga = medians_dict['PGA']
medians_sa = medians_dict['SA(0.1)']
aae(medians_pga, [0.48155582, 0.21123045, 0.14484586], decimal=2)
aae(medians_sa, [0.93913177, 0.40880148, 0.2692668], decimal=2)
@attr('qa', 'hazard', 'scenario')
def test_case_4(self):
medians = self.medians(case_4)['PGA']
aae(medians, [0.41615372, 0.22797466, 0.1936226], decimal=2)
# this is a case with a site model
[fname] = export(('site_model', 'xml'), self.calc.datastore)
self.assertEqualFiles('site_model.xml', fname)
@attr('qa', 'hazard', 'scenario')
def test_case_5(self):
f1, f2 = self.frequencies(case_5, 0.5, 1.0)
self.assertAlmostEqual(f1, 0.03, places=2)
self.assertAlmostEqual(f2, 0.003, places=3)
@attr('qa', 'hazard', 'scenario')
def test_case_6(self):
f1, f2 = self.frequencies(case_6, 0.5, 1.0)
self.assertAlmostEqual(f1, 0.05, places=2)
self.assertAlmostEqual(f2, 0.006, places=3)
@attr('qa', 'hazard', 'scenario')
def test_case_7(self):
f1, f2 = self.frequencies(case_7, 0.5, 1.0)
self.assertAlmostEqual(f1, 0.02, places=2)
self.assertAlmostEqual(f2, 0.002, places=3)
@attr('qa', 'hazard', 'scenario')
def test_case_8(self):
# test for a GMPE requiring hypocentral depth, since it was
# broken: https://bugs.launchpad.net/oq-engine/+bug/1334524
# I am not really checking anything, only that it runs
f1, f2 = self.frequencies(case_8, 0.5, 1.0)
self.assertAlmostEqual(f1, 0)
self.assertAlmostEqual(f2, 0)
@attr('qa', 'hazard', 'scenario')
def test_case_9(self):
with floatformat('%10.6E'):
out = self.run_calc(case_9.__file__, 'job.ini', exports='xml')
f1, f2 = out['gmf_data', 'xml']
self.assertEqualFiles('LinLee2008SSlab_gmf.xml', f1)
self.assertEqualFiles('YoungsEtAl1997SSlab_gmf.xml', f2)
out = self.run_calc(case_9.__file__, 'job.ini', exports='csv,npz')
f, _sitefile = out['gmf_data', 'csv']
self.assertEqualFiles('gmf.csv', f)
# test the .npz export
[fname] = out['gmf_data', 'npz']
with numpy.load(fname) as f:
self.assertEqual(len(f.keys()), 2) # rlz-000 rlz-001
data1 = f['rlz-000']
data2 = f['rlz-001']
self.assertEqual(data1.dtype.names, ('lon', 'lat', 'PGA'))
self.assertEqual(data1.shape, (3,))
self.assertEqual(data1['PGA'].shape, (3, 10))
self.assertEqual(data1.dtype.names, data2.dtype.names)
self.assertEqual(data1.shape, data2.shape)
|
StarcoderdataPython
|
130677
|
"""
implementation of criteo dataset
"""
# pylint: disable=unused-argument,missing-docstring
import os
import sys
import re
import random
import numpy as np
from intel_pytorch_extension import core
import inspect
# pytorch
import torch
from torch.utils.data import Dataset, RandomSampler
import os
# add dlrm code path
try:
dlrm_dir_path = os.environ['DLRM_DIR']
sys.path.append(dlrm_dir_path)
except KeyError:
print("ERROR: Please set DLRM_DIR environment variable to the dlrm code location")
sys.exit(0)
#import dataset
import dlrm_data_pytorch as dp
import data_loader_terabyte
class CriteoCalib(Dataset):
def __init__(self,
data_path,
name,
test_num_workers=0,
max_ind_range=-1,
mlperf_bin_loader=False,
sub_sample_rate=0.0,
randomize="total",
memory_map=False):
super().__init__()
self.random_offsets = []
self.use_fixed_size = True
# fixed size queries
self.samples_to_aggregate = 1
if name == "kaggle":
raw_data_file = data_path + "/train.txt"
processed_data_file = data_path + "/kaggleAdDisplayChallenge_processed.npz"
elif name == "terabyte":
raw_data_file = data_path + "/day"
processed_data_file = data_path + "/terabyte_processed.npz"
else:
raise ValueError("only kaggle|terabyte dataset options are supported")
self.use_mlperf_bin_loader = mlperf_bin_loader and memory_map and name == "terabyte"
if self.use_mlperf_bin_loader:
cal_data_file = os.path.join(data_path, 'calibration.npz')
if os.path.isfile(cal_data_file):
print("Found calibration.npz !!")
self.cal_loader = data_loader_terabyte.CalibDataLoader(
data_filename=cal_data_file,
batch_size=1,
)
else:
counts_file = raw_data_file + '_fea_count.npz'
validate_file = data_path + "/terabyte_processed_val.bin"
if os.path.exists(validate_file):
print("Found terabyte_processed_val.bin !!")
self.val_data = data_loader_terabyte.CriteoBinDataset(
data_file=validate_file,
counts_file=counts_file,
batch_size=self.samples_to_aggregate,
max_ind_range=max_ind_range
)
self.val_loader = torch.utils.data.DataLoader(
self.val_data,
batch_size=None,
batch_sampler=None,
shuffle=False,
num_workers=0,
collate_fn=None,
pin_memory=False,
drop_last=False,
)
self.cal_loader = self.val_loader
else:
self.cal_loader = None
else:
self.val_data = dp.CriteoDataset(
dataset=name,
max_ind_range=max_ind_range,
sub_sample_rate=sub_sample_rate,
randomize=randomize,
split="val",
raw_path=raw_data_file,
pro_data=processed_data_file,
memory_map=memory_map
)
self.val_loader = torch.utils.data.DataLoader(
self.val_data,
batch_size=self.samples_to_aggregate,
shuffle=False,
num_workers=test_num_workers,
collate_fn=dp.collate_wrapper_criteo,
pin_memory=False,
drop_last=False,
)
self.cal_loader = self.val_loader
def get_calibration_data_loader(self):
return self.cal_loader
|
StarcoderdataPython
|
317937
|
<gh_stars>0
from setuptools import setup, find_packages
requirements = ['cycler==0.10.0',
'future==0.15.2',
'geopy==1.11.0',
'isodate==0.5.4',
'nose==1.3.7',
'numpy==1.14.4',
'pandas==0.20.2',
'patsy==0.4.1',
'py==1.4.31',
'pyparsing==2.1.9',
'pytest==3.0.3',
'python-dateutil==2.5.3',
'pytz==2018.4',
'requests==2.11.1',
'six==1.10.0',
'slisonner']
setup(
name="slayer",
version='0.3.27',
author="<NAME>",
author_email="<EMAIL>",
description=("Index tabular data into volume slices, convert volumes to"
"projects and environments."),
packages=find_packages(),
zip_safe=True,
install_requires=requirements,
dependency_links=['git+ssh://[email protected]/mathrioshka/slisonner.git#egg=slisonner'],
classifiers=['Programming Language :: Python :: 3.6']
)
|
StarcoderdataPython
|
3312223
|
from abaqusConstants import *
from .ConstrainedSketchGeometry import ConstrainedSketchGeometry
class getPointAtDistance(ConstrainedSketchGeometry):
def __init__(self, point: tuple[float], distance: str, percentage: Boolean = OFF):
"""This method returns a point offset along the given ConstrainedSketchGeometry from the
given end by a specified arc length distance or a percentage of the total length of the
ConstrainedSketchGeometry object.
Parameters
----------
point
A pair of Floats specifying the point from which the distance is to be measured.
distance
A float specifying the arc length distance along the ConstrainedSketchGeometry from the
*point* at which the required point is situated.
percentage
A Boolean that specifies if the *distance* is an absolute distance or is a fraction
relative to the length of the ConstrainedSketchGeometry object.
Returns
-------
A pair of floats representing the point along the edge.
"""
pass
|
StarcoderdataPython
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.