content
stringlengths 5
1.05M
|
---|
from services import UserService as USER_SERVICE
from services import AccountService as ACCOUNT_SERVICE
class ExtendedUserKnot(object):
def __init__(self, user_id):
self.user_id = user_id
self._cached_user = None
self._cached_accounts = None
@property
def user(self):
if self._cached_user is None:
self._cached_user = USER_SERVICE.get_user_by_id(self.user_id)
return self._cached_user
@property
def accounts(self):
if self._cached_accounts is None:
self._cached_accounts = ACCOUNT_SERVICE.get_account_by_user_id(self.user_id)
return self._cached_accounts
|
"""An example DAG demonstrating simple Apache Airflow operators."""
# [START composer_simple]
from __future__ import print_function
# [START composer_simple_define_dag]
import datetime
import random
from airflow import models
# [END composer_simple_define_dag]
# [START composer_simple_operators]
from airflow.operators import bash_operator
from airflow.operators import python_operator
from airflow.operators.dummy_operator import DummyOperator
from datetime import timedelta
from airflow.contrib.operators.bigquery_operator import BigQueryOperator
from airflow.contrib.operators.gcs_to_bq import GoogleCloudStorageToBigQueryOperator
from airflow.operators.bash_operator import BashOperator
# [END composer_simple_operators]
# [START composer_simple_define_dag]
BQ_DATASET_NAME=models.Variable.get('bq_dataset')
BQ_STAGING_DATASET_NAME=models.Variable.get('bq_dataset_uc1_staging')
job_run_date = models.Variable.get('job_run_date')
loadDimension = models.Variable.get('load_dimension')
bq_channel_sales_table = '{0}.{1}${2}'.format(BQ_DATASET_NAME,models.Variable.get('bq_channel_sales_table'),job_run_date.replace('-',''))
default_dag_args = {
# The start_date describes when a DAG is valid / can be run. Set this to a
# fixed point in time rather than dynamically, since it is evaluated every
# time a DAG is parsed. See:
# https://airflow.apache.org/faq.html#what-s-the-deal-with-start-date
'start_date': datetime.datetime(2018, 7, 30),
'retry_delay': timedelta(minutes=5),
'schedule_interval': 'None'
}
# Define a DAG (directed acyclic graph) of tasks.
# Any task you create within the context manager is automatically added to the
# DAG object.
#'Call_center','Catalog','Catalog_page','Catalog_returns','Catalog_sales','Customer','Customer_address','Customer_demographics','Date_dim',
# 'Household_demographics','Income_band','Inventory','Item','Promotion','Reason','Ship_mode','Store','Store_returns','Store_sales','Time_dim',
# 'Warehouse','Web_page','Web_returns','Web_sales','Web_site,' ]
tables=['Date_dim','Store_sales','Store_returns','Store','Catalog_sales','Catalog_returns','Catalog_page','Web_sales','Web_site','Web_returns' ]
#tables=['account_permission']
dag_daily = models.DAG(
'da304_temp',
default_args=default_dag_args,
catchup=False)
def deleteStagingTablesTask(table):
return BigQueryOperator(
task_id='delete_{0}'.format(table),
bql = '''
DROP TABLE IF EXISTS {{params.table}}
''',
params={"table":"{0}.{1}".format(BQ_STAGING_DATASET_NAME,table)},
use_legacy_sql=False,
dag=dag_daily)
def createDimensionStagingDagTasks() :
def createTaskHelper(table):
return GoogleCloudStorageToBigQueryOperator(
task_id = 'create_staging_{0}'.format(table),
field_delimiter = '|',
schema_object = 'schema/{0}.json'.format(table),
source_objects = ['{0}/{1}.dat'.format('dimension',table.lower())],
bucket = 'da304-staging',
destination_project_dataset_table = "{0}.{1}".format(BQ_STAGING_DATASET_NAME,table),
external_table = True,
dag=dag_daily)
complete_dim_stage= DummyOperator(
task_id="Complete_dim_staging",
dag=dag_daily)
tables=['Store','Catalog_page','Date_dim','Web_site']
for table in tables:
deleteStagingTablesTask(table) >> createTaskHelper(table) >> materializeDimensionTables(table) >> complete_dim_stage
return complete_dim_stage
def createFactStagingTables():
def createTaskHelper(table):
return GoogleCloudStorageToBigQueryOperator(
task_id = 'create_staging_{0}'.format(table),
skip_leading_rows=1,
field_delimiter = '|',
schema_object = 'schema/{0}.json'.format(table),
source_objects = ['facts/{0}/{1}.csv'.format(job_run_date,table)],
bucket = 'da304-staging',
destination_project_dataset_table = "{0}.{1}".format(BQ_STAGING_DATASET_NAME,table),
external_table = True,
dag=dag_daily)
tables=['Store_sales','Store_returns','Catalog_sales','Catalog_returns','Web_sales','Web_returns' ]
complete_fact_stage= DummyOperator(
task_id="Complete_Fact_Staging",
dag=dag_daily)
for table in tables:
materializeFactTables(table) >> complete_fact_stage
return complete_fact_stage
def materializeDimensionTables(table):
def createTaskHelper(table):
return BigQueryOperator(
task_id='materialize__{0}'.format(table),
bql='{0}.sql'.format(table),
use_legacy_sql=False,
write_disposition="WRITE_TRUNCATE",
destination_dataset_table='{0}.{1}'.format(BQ_DATASET_NAME,table),
dag=dag_daily)
return createTaskHelper(table)
def materializeFactTables(table):
def createTaskHelper(table):
return BigQueryOperator(
task_id='materialize_{0}'.format(table),
bql='{0}.sql'.format(table),
params={"partition_date":"{0}".format(job_run_date)},
use_legacy_sql=False,
write_disposition="WRITE_TRUNCATE",
destination_dataset_table='{0}.{1}${2}'.format(BQ_DATASET_NAME,table,job_run_date.replace('-','')),
dag=dag_daily)
return createTaskHelper(table)
materialize_q1 = BigQueryOperator(
task_id='calculate_sales_by_channel',
bql='bq_mat_channel_sales.sql',
params={"date_start":"{0}".format(job_run_date)},
use_legacy_sql=False,
write_disposition='WRITE_TRUNCATE',
destination_dataset_table=bq_channel_sales_table,
dag=dag_daily)
if loadDimension == 'yes':
createDimensionStagingDagTasks() >> createFactStagingTables() >> materialize_q1
else:
createFactStagingTables() >> materialize_q1
|
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Ke Sun ([email protected])
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import logging
import time
from pathlib import Path
import torch
import torch.nn as nn
class FullModel(nn.Module):
"""
Distribute the loss on multi-gpu to reduce
the memory cost in the main gpu.
You can check the following discussion.
https://discuss.pytorch.org/t/dataparallel-imbalanced-memory-usage/22551/21
"""
def __init__(self, model, loss):
super(FullModel, self).__init__()
self.model = model
self.loss = loss
def forward(self, inputs, labels):
outputs = self.model(inputs)
loss = self.loss(outputs, labels)
return torch.unsqueeze(loss,0), outputs
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.initialized = False
self.val = None
self.avg = None
self.sum = None
self.count = None
def initialize(self, val, weight):
self.val = val
self.avg = val
self.sum = val * weight
self.count = weight
self.initialized = True
def update(self, val, weight=1):
if not self.initialized:
self.initialize(val, weight)
else:
self.add(val, weight)
def add(self, val, weight):
self.val = val
self.sum += val * weight
self.count += weight
self.avg = self.sum / self.count
def value(self):
return self.val
def average(self):
return self.avg
def create_logger(cfg, cfg_name, phase='train'):
root_output_dir = Path(cfg.OUTPUT_DIR)
# set up logger
if not root_output_dir.exists():
print('=> creating {}'.format(root_output_dir))
root_output_dir.mkdir()
dataset = cfg.DATA.DATASET
model = cfg.MODEL.NAME
cfg_name = os.path.basename(cfg_name).split('.')[0]
final_output_dir = root_output_dir / cfg_name
print('=> creating {}'.format(final_output_dir))
final_output_dir.mkdir(parents=True, exist_ok=True)
time_str = time.strftime('%Y-%m-%d-%H-%M')
log_file = '{}_{}_{}.log'.format(cfg_name, time_str, phase)
final_log_file = final_output_dir / log_file
head = '%(asctime)-15s %(message)s'
logging.basicConfig(filename=str(final_log_file),
format=head)
logger = logging.getLogger()
logger.setLevel(logging.INFO)
console = logging.StreamHandler()
logging.getLogger('').addHandler(console)
if phase == 'train':
tensorboard_log_dir = Path(cfg.LOG_DIR) / \
(cfg_name + '_' + time_str)
print('=> creating {}'.format(tensorboard_log_dir))
tensorboard_log_dir.mkdir(parents=True, exist_ok=True)
return logger, str(final_output_dir), str(tensorboard_log_dir)
else:
return logger, str(final_output_dir)
# -----------------------learning rate update policy--------------------------------
def poly_scheduler(optimizer, max_iters, iteration, cfg):
#update learning rate with poly policy
power = cfg.TRAIN.POWER
base_lr = cfg.TRAIN.LR
for param_group in optimizer.param_groups:
#lr_mult = param_group['lr']/(base_lr*((1.0 - float(iteration)/float(max_iters))**power))
param_group['lr'] = base_lr*((1.0 - float(iteration)/float(max_iters))**power)#*lr_mult
|
from typing import List
def longestCommonPrefix(strs: List[str]) -> str:
if not strs:
return ''
prefix, count = strs[0], len(strs)
for i in range(1, count):
idx = 0
length = min(len(prefix), len(strs[i]))
while idx < length and prefix[idx] == strs[i][idx]:
idx += 1
prefix = strs[i][:idx]
if not prefix:
break
return prefix
def longestCommonPrefix(strs: List[str]) -> str:
if not strs:
return ''
count = len(strs)
for i in range(len(strs[0])):
c = strs[0][i]
# for j in range(count):
# if i == len(strs[j]) or c != strs[j][i]:
# return strs[0][:i]
if any(i == len(strs[j]) or c != strs[j][i] for j in range(count)):
return strs[0][:i]
return strs[0]
def longestCommonPrefix(strs: List[str]) -> str:
def is_common_prefix(length):
common_str, count = strs[0][:length], len(strs)
return all(strs[i][:length] == common_str for i in range(1, count))
if not strs:
return ''
low, high = 0, min(len(s) for s in strs)
while low < high:
# 不加1虽然结果正确,但是会导致超时
mid = (high - low + 1) // 2 + low
if is_common_prefix(mid):
low = mid
else:
high = mid - 1
return strs[0][:low]
|
# Generated by Django 4.0 on 2021-12-24 19:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('usuarios', '0003_alter_usuario_segmentos'),
]
operations = [
migrations.AlterField(
model_name='usuario',
name='foto',
field=models.CharField(blank=True, default='../media/usuarios/default_user.jpeg', max_length=254, null=True),
),
]
|
import math
import pathlib
from typing import List, Dict
import requests
from models import Instrument
from pkg.config import Config
from pkg.gcs_stream_upload import GCSObjectStreamUpload
from pkg.google_storage import GoogleStorage
from pkg.sftp import SFTP
from util.service_logging import log
class CaseMover:
def __init__(self, google_storage: GoogleStorage, config: Config, sftp: SFTP):
self.google_storage = google_storage
self.config = config
self.sftp = sftp
def instrument_needs_updating(self, instrument: Instrument) -> bool:
return self.bdbx_md5_changed(instrument) or self.gcp_missing_files(instrument)
def bdbx_md5_changed(self, instrument: Instrument) -> bool:
blob_md5 = self.google_storage.get_blob_md5(instrument.get_bdbx_blob_filepath())
return instrument.bdbx_md5 != blob_md5
def gcp_missing_files(self, instrument: Instrument) -> bool:
instrument_blobs = self.get_instrument_blobs(instrument)
for file in instrument.files:
if file.lower() not in instrument_blobs:
return True
return False
def get_instrument_blobs(self, instrument: Instrument) -> List[str]:
instrument_blobs = []
for blob in self.google_storage.list_blobs():
if pathlib.Path(blob.name).parent.name == instrument.gcp_folder():
instrument_blobs.append(pathlib.Path(blob.name).name.lower())
return instrument_blobs
def sync_instrument(self, instrument: Instrument) -> None:
blob_filepaths = instrument.get_blob_filepaths()
for file in instrument.files:
blob_filepath = blob_filepaths[file]
sftp_path = f"{instrument.sftp_path}/{file}"
log.info(f"Syncing file from SFTP: {sftp_path} to GCP: {blob_filepath}")
self.sync_file(blob_filepath, sftp_path)
def sync_file(self, blob_filepath: str, sftp_path: str) -> None:
try:
with GCSObjectStreamUpload(
google_storage=self.google_storage,
blob_name=blob_filepath,
chunk_size=self.config.bufsize,
) as blob_stream:
bdbx_details = self.sftp.sftp_connection.stat(sftp_path)
chunks = math.ceil(bdbx_details.st_size / self.config.bufsize)
sftp_file = self.sftp.sftp_connection.open(
sftp_path, bufsize=self.config.bufsize
)
sftp_file.prefetch()
for chunk in range(chunks):
sftp_file.seek(chunk * self.config.bufsize)
blob_stream.write(sftp_file.read(self.config.bufsize))
except Exception:
log.exception("Fatal error while syncing file")
def send_request_to_api(self, instrument_name: str) -> None:
# added 1 second timeout exception pass to the api request
# because the connection to the api was timing out before
# it completed the work. this also allows parallel requests
# to be made to the api.
log.info(
f"Sending request to {self.config.blaise_api_url} "
+ f"for instrument {instrument_name}"
)
try:
requests.post(
(
f"http://{self.config.blaise_api_url}/api/v1/serverparks/"
+ f"{self.config.server_park}/instruments/{instrument_name}/data"
),
headers={"content-type": "application/json"},
json={"instrumentDataPath": instrument_name},
timeout=1,
)
except requests.exceptions.ReadTimeout:
pass
def instrument_exists_in_blaise(self, instrument_name: str) -> bool:
response = requests.get(
f"http://{self.config.blaise_api_url}/api/v1/serverparks/"
+ f"{self.config.server_park}/instruments/{instrument_name}/exists"
)
return response.json()
def filter_existing_instruments(
self, instruments: Dict[str, Instrument]
) -> Dict[str, Instrument]:
filtered_instruments = {}
for key, instrument in instruments.items():
if self.instrument_exists_in_blaise(instrument.gcp_folder()):
log.info(f"Instrument {instrument.gcp_folder()} exists in blaise")
filtered_instruments[key] = instrument
else:
log.info(
f"Instrument {instrument.gcp_folder()} does not exist in blaise, not ingesting..."
)
return filtered_instruments
|
# import pandas, matplotlib, and statsmodels
import pandas as pd
import numpy as np
pd.set_option('display.width', 200)
pd.set_option('display.max_columns', 35)
pd.set_option('display.max_rows', 200)
pd.options.display.float_format = '{:,.2f}'.format
nls97 = pd.read_pickle("data/nls97b.pkl")
# show some descriptive statistics
gpaoverall = nls97.gpaoverall
gpaoverall.mean()
gpaoverall.describe()
gpaoverall.quantile(np.arange(0.1,1.1,0.1))
# subset based on values
gpaoverall.loc[gpaoverall.between(3,3.5)].head(5)
gpaoverall.loc[gpaoverall.between(3,3.5)].count()
gpaoverall.loc[(gpaoverall<2) | (gpaoverall>4)].sample(5, random_state=1)
gpaoverall.loc[gpaoverall>gpaoverall.quantile(0.99)].\
agg(['count','min','max'])
# run tests across all values
gpaoverall.between(3,3.5).sum() # of people with GPA between 3 and 3.5
(gpaoverall>4).any() # any person has GPA greater than 4
(gpaoverall>=0).all() # all people have GPA greater than 0
(gpaoverall>=0).sum() # of people with GPA greater than 0
(gpaoverall==0).sum() # of people with GPA equal to 0
gpaoverall.isnull().sum() # of people with missing value for GPA
# show GPA for high and low wage income earners
nls97.loc[nls97.wageincome > nls97.wageincome.quantile(0.75),'gpaoverall'].mean()
nls97.loc[nls97.wageincome < nls97.wageincome.quantile(0.25),'gpaoverall'].mean()
# show counts for series with categorical data
nls97.maritalstatus.describe()
nls97.maritalstatus.value_counts()
|
"""
day 6 lecture
"""
demo_str = 'this is my string'
#for each_string in demo_str:
# print(each_string)
#for each_word in demo_str.split(): #.split creates list
# print(each_word.upper())
# print(each_word.title())
#for each_word in demo_str.split():
# if each_word == 'my':
# print(each_word)
#for each_word in demo_str.split():
# print(each_word)
# for each_string in each_word:
# print(each_string)
#for each_num in range(1,5):
# print(each_num)
num_list = [213,321,123,312]
max_item = num_list[0]
for eachnum in num_list:
if max_item <= eachnum:
max_item = eachnum
print(max_item)
|
from prestans.http import STATUS
from prestans.rest import RequestHandler
import pytest
import unittest
class NoContentHandler(RequestHandler):
def get(self):
self.response.status = STATUS.NO_CONTENT
def test_app():
from webtest import TestApp
from prestans.rest import RequestRouter
api = RequestRouter([
('/no-content', NoContentHandler)
], application_name="api", debug=True)
return TestApp(app=api)
class Issue154(unittest.TestCase):
def test_204_header_omitted(self):
"""
Request should return no content with header omitted
"""
app = test_app()
resp = app.get('/no-content')
self.assertEqual(resp.status_int, STATUS.NO_CONTENT)
self.assertIsNone(resp.content_type)
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
"""Creates aRT-ratio comparison figures (ECDF) and convergence figures for the comparison of 2 algorithms.
Scale up figures for two algorithms can be done with compall/ppfigs.py
"""
from __future__ import absolute_import
import os
import matplotlib.pyplot as plt
from pdb import set_trace
try:
from matplotlib.transforms import blended_transform_factory as blend
except ImportError:
# compatibility matplotlib 0.8
from matplotlib.transforms import blend_xy_sep_transform as blend
import numpy as np
from .. import toolsstats, readalign, ppfigparam, testbedsettings, toolsdivers
from ..toolsstats import ranksumtest
from ..ppfig import save_figure, plotUnifLogXMarkers
#try:
#supersede this module own ranksumtest method
#from scipy.stats import ranksumtest as ranksumtest
#except ImportError:
#from cocopp.toolsstats import ranksumtest
#pass
dimensions = (2, 3, 5, 10, 20, 40)
styles = [{'color': 'c', 'marker': '+', 'markeredgecolor': 'c',
'markerfacecolor': 'None'},
{'color': 'g', 'marker': 'v', 'markeredgecolor': 'g',
'markerfacecolor': 'None'},
{'color': 'b', 'marker': '*', 'markeredgecolor': 'b',
'markerfacecolor': 'None'},
{'color': 'k', 'marker': 'o', 'markeredgecolor': 'k',
'markerfacecolor': 'None'},
{'color': 'r', 'marker': 's', 'markeredgecolor': 'r',
'markerfacecolor': 'None'},
{'color': 'm', 'marker': 'D', 'markeredgecolor': 'm',
'markerfacecolor': 'None'},
{'color': 'k'},
{'color': 'y'},
{'color': 'k'},
{'color': 'c'},
{'color': 'r'},
{'color': 'm'}]
linewidth = 3 # overwritten by config.py
offset = 0.005
incrstars = 1.5
fthresh = 1e-8
xmax = 1000
dimension_index = dict([(dimensions[i], i) for i in range(len(dimensions))])
def _generateData(entry0, entry1, fthresh=None, downsampling=None):
def alignData(i0, i1):
"""Returns two arrays of fevals aligned on function evaluations.
"""
res = readalign.alignArrayData(readalign.HArrayMultiReader([i0.evals, i1.evals]))
idx = 1 + i0.nbRuns()
data0 = res[:, np.r_[0, 1:idx]]
data1 = res[:, np.r_[0, idx:idx+i1.nbRuns()]]
return data0, data1
def computeERT(hdata, maxevals):
res = []
for i in hdata:
data = i.copy()
data = data[1:]
succ = (np.isnan(data)==False)
if any(np.isnan(data)):
data[np.isnan(data)] = maxevals[np.isnan(data)]
tmp = [i[0]]
tmp.extend(toolsstats.sp(data, issuccessful=succ))
res.append(tmp)
return np.vstack(res)
tmpdata0, tmpdata1 = alignData(entry0, entry1)
tmpdata0 = tmpdata0[::downsampling] #downsampling
tmpdata1 = tmpdata1[::downsampling]
data0 = computeERT(tmpdata0, entry0.maxevals)
data1 = computeERT(tmpdata1, entry1.maxevals)
if fthresh and (tmpdata0[:, 0] < fthresh).any():
if not (tmpdata0[:, 0] == fthresh).any():
tmp0 = entry0.detEvals([fthresh])[0]
tmp0 = np.reshape(np.insert(tmp0, 0, fthresh), (1, -1))
tmp0 = computeERT(tmp0, entry0.maxevals)
data0 = np.concatenate((data0, tmp0))
tmp1 = entry1.detEvals([fthresh])[0]
tmp1 = np.reshape(np.insert(tmp1, 0, fthresh), (1, -1))
tmp1 = computeERT(tmp1, entry1.maxevals)
data1 = np.concatenate((data1, tmp1))
data0 = data0[data0[:, 0] >= fthresh]
data1 = data1[data1[:, 0] >= fthresh]
if xmax:
data0 = data0[data0[:, 0] <= xmax]
data1 = data1[data1[:, 0] <= xmax]
# TODO: watch that it does not become empty.
#set_trace()
return data0, data1
def beautify(xmin=None):
"""Format the figure."""
ax = plt.gca()
yax = ax.get_yaxis()
ax.set_xscale('log')
ax.set_yscale('log')
ymin, ymax = plt.ylim()
ybnd = max(1./ymin, ymax)
plt.ylim(1./ybnd, ybnd)
if ybnd < 100:
yax.grid(True, which='minor')
# We are setting xmin
if xmin:
plt.xlim(xmin=xmin)
plt.xlim(xmax=xmax)
ax.invert_xaxis()
# Annotate figure
ax.set_xlabel('log10(Delta ftarget)')
ax.set_ylabel(r'log10(aRT1/aRT0) or ~#succ') # TODO: replace hard-coded 15
ax.grid(True)
#Tick label handling
xticks = ax.get_xticks()
tmp = []
for i in xticks:
tmp.append('%d' % round(np.log10(i)))
ax.set_xticklabels(tmp)
yticks = ax.get_yticks()
tmp = []
for i in yticks:
tmp.append('%d' % round(np.log10(i)))
ax.set_yticklabels(tmp)
# Reverse yticks below 1
tmp = ax.get_yticks(minor=True)
tmp[tmp<1] = sorted(1/(tmp[tmp<1]*np.power(10, -2*np.floor(np.log10(tmp[tmp<1]))-1)))
tmp = tmp[tmp<plt.ylim()[1]]
tmp = tmp[tmp>plt.ylim()[0]]
ax.set_yticks(tmp, minor=True)
tmp = ax.get_yticklines()
tmp.extend(yax.get_minorticklines())
#set_trace()
for i in tmp:
i.set_markeredgewidth(2)
def annotate(entry0, entry1, dim, minfvalue=1e-8, nbtests=1):
"""Display some annotations associated to the graphs generated."""
ha = 'left'
va = 'center'
lastfvalue = min(entry0.evals[-1][0], entry1.evals[-1][0])
if not minfvalue or minfvalue < lastfvalue:
minfvalue = lastfvalue
line = []
data0 = entry0.detEvals([minfvalue])[0]
evals0 = data0.copy()
succ = (np.isnan(evals0) == False)
evals0[np.isnan(evals0)] = entry0.maxevals[np.isnan(evals0)]
line.append(toolsstats.sp(evals0, issuccessful=succ))
data1 = entry1.detEvals([minfvalue])[0]
evals1 = data1.copy()
succ = (np.isnan(evals1) == False)
evals1[np.isnan(evals1)] = entry1.maxevals[np.isnan(evals1)]
line.append(toolsstats.sp(evals1, issuccessful=succ))
# What's the situation?
txt = '%dD' % dim
if (line[0][2] > 0 and line[1][2] > 0 and line[1][2] < 10):
tmp = str(int(line[1][2]))
tmp2 = str(int(line[0][2]))
txt = tmp + '/' + tmp2
dims = dimension_index
ax = plt.gca()
assert line[0][2] > 0 or line[1][2] > 0
signdata = line[1][0] - line[0][0]
if line[0][2] > 0 and line[1][2] > 0:
trans = ax.transData
annotcoord = [minfvalue, line[1][0]/line[0][0]]
elif line[0][2] == 0:
trans = blend(ax.transData, ax.transAxes)
annotcoord = [minfvalue, -line[1][1]/2 + 0.5 + offset*(5-dims[dim])]
#if va == 'top':
# va = 'bottom'
elif line[1][2] == 0:
trans = blend(ax.transData, ax.transAxes)
annotcoord = [minfvalue, line[0][1]/2 + 0.5 - offset*(5-dims[dim])]
plt.text(annotcoord[0], annotcoord[1], txt, horizontalalignment=ha,
verticalalignment=va, transform=trans)
#ranksum test
line0 = np.power(data0, -1.)
line0[np.isnan(line0)] = -entry0.finalfunvals[np.isnan(line0)]
line1 = np.power(data1, -1.)
line1[np.isnan(line1)] = -entry1.finalfunvals[np.isnan(line1)]
# one-tailed statistics: scipy.stats.mannwhitneyu, two-tailed statistics: scipy.stats.ranksumtest
z, p = ranksumtest(line0, line1)
# Set the correct line in data0 and data1
nbstars = 0
# sign of z-value and data must agree
if ((nbtests * p) < 0.05 and (z * signdata) > 0):
nbstars = int(np.min([5, -np.ceil(np.log10(nbtests * p + 1e-99))]))
if nbstars > 0:
xstars = annotcoord[0] * np.power(incrstars, np.arange(1., 1. + nbstars))
# the additional slicing [0:int(nbstars)] is due to
# np.arange(1., 1. - 0.1 * nbstars, -0.1) not having the right number
# of elements due to numerical error
ystars = [annotcoord[1]] * nbstars
try:
plt.plot(xstars, ystars, marker='*', ls='', color='w',
markersize=5*linewidth, markeredgecolor='k',
markerfacecolor='None',
zorder=20, markeredgewidth = 0.4 * linewidth,
transform=trans, clip_on=False)
except KeyError:
#Version problem
plt.plot(xstars, ystars, marker='+', ls='', color='w',
markersize=2.5*linewidth, markeredgecolor='k',
zorder=20, markeredgewidth = 0.2 * linewidth,
transform=trans, clip_on=False)
def main(dsList0, dsList1, minfvalue=1e-8, outputdir=''):
"""Returns aRT1/aRT0 comparison figure."""
#plt.rc("axes", labelsize=20, titlesize=24)
#plt.rc("xtick", labelsize=20)
#plt.rc("ytick", labelsize=20)
#plt.rc("font", size=20)
#plt.rc("legend", fontsize=20)
# minfvalue = pproc.TargetValues.cast(minfvalue)
funInfos = ppfigparam.read_fun_infos()
dictFun0 = dsList0.dictByFunc()
dictFun1 = dsList1.dictByFunc()
for func in set.intersection(set(dictFun0), set(dictFun1)):
dictDim0 = dictFun0[func].dictByDim()
dictDim1 = dictFun1[func].dictByDim()
filename = os.path.join(outputdir,'ppfig2_f%03d' % (func))
dataperdim = {}
fvalueswitch = {}
nbtests = 0
for i, dim in enumerate(dimensions):
try:
entry0 = dictDim0[dim][0]
entry1 = dictDim1[dim][0]
except KeyError:
continue
nbtests += 1
# generateData:
data = _generateData(entry0, entry1, fthresh=fthresh)
dataperdim[dim] = data
if len(data[0]) == 0 and len(data[1]) == 0:
continue
# TODO: hack, modify slightly so line goes to 'zero'
if minfvalue:
for d in data:
tmp = d[:, 0]
tmp[tmp == 0] = min(min(tmp[tmp > 0]), minfvalue)**2
# plot
idx = np.isfinite(data[0][:, 1]) * np.isfinite(data[1][:, 1])
ydata = data[1][idx, 1]/data[0][idx, 1]
kwargs = styles[i].copy()
kwargs['label'] = '%2d-D' % dim
tmp = plotUnifLogXMarkers(data[0][idx, 0], ydata, nbperdecade=1, logscale=True, **kwargs)
plt.setp(tmp, markersize=3*linewidth)
plt.setp(tmp[0], ls='--')
# This is only one possibility:
#idx = (data[0][:, 3] >= 5) * (data[1][:, 3] >= 5)
idx = ((data[0][:, 1] <= 3 * np.median(entry0.maxevals))
* (data[1][:, 1] <= 3 * np.median(entry1.maxevals)))
if not idx.any():
fvalueswitch[dim] = np.inf
# Hack: fvalueswitch is the smallest value of f where the line
# was still solid.
continue
fvalueswitch[dim] = min(data[0][idx, 0])
ydata = data[1][idx, 1]/data[0][idx, 1]
tmp = plotUnifLogXMarkers(data[0][idx, 0], ydata, nbperdecade=1, logscale=True, **styles[i])
plt.setp(tmp[1], markersize=3*linewidth)
beautify(xmin=minfvalue)
#beautify()
ax = plt.gca()
# Freeze the boundaries
ax.set_autoscale_on(False)
#trans = transforms.blended_transform_factory(ax.transData, ax.transAxes)
# Plot everything else
for i, dim in enumerate(dimensions):
try:
entry0 = dictDim0[dim][0]
entry1 = dictDim1[dim][0]
data = dataperdim[dim]
except KeyError:
continue
if len(data[0]) == 0 and len(data[1]) == 0:
continue
# annotation
annotate(entry0, entry1, dim, minfvalue, nbtests=nbtests)
tmp0 = np.isfinite(data[0][:, 1])
tmp1 = np.isfinite(data[1][:, 1])
idx = tmp0 * tmp1
if not idx.any():
continue
#Do not plot anything else if it happens after minfvalue
if data[0][idx, 0][-1] <= minfvalue:
# hack for the legend
continue
# Determine which algorithm went further
algstoppedlast = 0
algstoppedfirst = 1
if np.sum(tmp0) < np.sum(tmp1):
algstoppedlast = 1
algstoppedfirst = 0
#marker if an algorithm stopped
ydata = data[1][idx, 1]/data[0][idx, 1]
plt.plot((data[0][idx, 0][-1], ), (ydata[-1], ), marker='D', ls='',
color=styles[i]['color'], markeredgecolor=styles[i]['color'],
markerfacecolor=styles[i]['color'], markersize=4*linewidth)
tmpy = ydata[-1]
# plot probability of success line
dataofinterest = data[algstoppedlast]
tmp = np.nonzero(idx)[0][-1] # Why [0]?
# add the last line for which both algorithm still have a success
idx = (data[algstoppedfirst][:, 2] == 0.) * (dataofinterest[:, 2] > 0.)
idx[tmp] = True
if np.sum(idx) <= 1:#len(idx) == 0 or not idx.any():
continue
ymin, ymax = plt.ylim()
#orientation = -1
ybnd = ymin
if algstoppedlast == 0:
ybnd = ymax
#orientation = 1
#ydata = orientation * dataofinterest[idx, 2] / 2 + 0.5
ydata = np.power(10, np.log10(ybnd) * (dataofinterest[idx, 2]
-offset*(5-i)*np.log10(ymax/ymin)/np.abs(np.log10(ybnd))))
ls = '-'
if dataofinterest[idx, 0][0] < fvalueswitch[dim]:
ls = '--'
tmp = plt.plot([dataofinterest[idx, 0][0]]*2, (tmpy, ydata[0]),
**styles[i])
plt.setp(tmp, ls=ls, marker='')
tmp = plt.plot((dataofinterest[idx, 0][0], ), (ydata[0], ), marker='D', ls='',
color=styles[i]['color'], markeredgecolor=styles[i]['color'],
markerfacecolor=styles[i]['color'], markersize=4*linewidth)
kwargs = styles[i].copy()
kwargs['ls'] = ls
tmp = plotUnifLogXMarkers(dataofinterest[idx, 0], ydata, nbperdecade=1, logscale=True, **kwargs)
plt.setp(tmp, markersize=3*linewidth)
#Do not plot anything else if it happens after minfvalue
if dataofinterest[idx, 0][-1] <= minfvalue:
continue
#plt.plot((dataofinterest[idx, 0][-1], ), (ydata[-1], ), marker='d',
# color=styles[i]['color'], markeredgecolor=styles[i]['color'],
# markerfacecolor=styles[i]['color'], markersize=4*linewidth)
if func in funInfos.keys():
plt.title(funInfos[func])
if func in testbedsettings.current_testbed.functions_with_legend:
toolsdivers.legend(loc='best')
# save
save_figure(filename, dsList0[0].algId)
plt.close()
#set_trace()
#plt.rcdefaults()
|
'''Trains a simple convnet on the sample video feed data
'''
from __future__ import print_function
import keras
import time
import numpy as np
import skimage.io
import skimage.color
from os import listdir
from os.path import isfile, join
import matplotlib.pyplot as plt
from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense, Dropout, Flatten
from keras.layers import Conv2D, MaxPooling2D
from keras.preprocessing.image import ImageDataGenerator
from keras import backend as K
from keras import optimizers
import h5py
__author__ = "Divya Bhaskara"
# Training parameters
#batch_size = 128
epochs = 10
num_classes = 3
classes = ['falling', 'sitting', 'standing']
# Input image dimensions
img_rows, img_cols = 266, 400
# Extract the data
train_path = "Training/"
validation_path = "Validation/"
validation_samples = 107
batch_size = 16
total_samples = 528
def grayscale(img):
rows = img.shape[0]
cols = img.shape[1]
print(img.shape)
averaged = np.zeros((rows, cols))
for r in range(rows):
for c in range(cols):
luminance = .21 * img[r][c][0] + .72 * img[r][c][1] + .07 * img[r][c][2]
averaged[r][c] = luminance
return averaged
if __name__ == "__main__":
model = Sequential()
if K.image_data_format() == 'channels_first':
input_shape = (3, img_rows, img_cols)
else:
input_shape = (img_rows, img_cols, 3)
# ----- Based off of simple deep net for CIFAR small images dataset ------
model.add(Conv2D(32, (3, 3), activation='relu', padding='same', input_shape=input_shape))
model.add(Conv2D(32, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(64, (3, 3), padding='same', activation='relu'))
model.add(Conv2D(64, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(128, (3, 3), padding='same', activation='relu'))
model.add(Conv2D(128, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Conv2D(256, (3, 3), padding='same', activation='relu'))
model.add(Conv2D(256, (3, 3), activation='relu'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Dropout(0.25))
model.add(Flatten())
model.add(Dense(512, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='sparse_categorical_crossentropy',
optimizer=optimizers.SGD(lr=1e-3, momentum=0.9),
metrics=['accuracy'])
# Set up data
train_datagen = ImageDataGenerator(rescale=1. / 255)
validation_datagen = ImageDataGenerator(rescale=1. / 255)
train_generator = train_datagen.flow_from_directory(
train_path,
target_size=(img_rows, img_cols),
batch_size=batch_size,
class_mode='binary')
validation_generator = validation_datagen.flow_from_directory(
validation_path,
target_size=(img_rows, img_cols),
batch_size=batch_size,
class_mode='binary')
start = time.time()
model.fit_generator(
train_generator,
epochs=epochs,
steps_per_epoch = total_samples//batch_size,
validation_data=validation_generator,
validation_steps=validation_samples)
# Find training time
end = time.time()
print('Training time:', end-start)
start = time.time()
# Save the model
model.save('GlennaFalling.hd5')
end = time.time()
print('Saving time: ', end-start)
|
# Copyright 2021-2022 The Kubeflow Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for kfp.components.structures."""
import os
import sys
import tempfile
import textwrap
import unittest
from absl.testing import parameterized
from kfp import compiler
from kfp.components import structures
V1_YAML_IF_PLACEHOLDER = textwrap.dedent("""\
implementation:
container:
args:
- if:
cond:
isPresent: optional_input_1
else:
- --arg2
- default
then:
- --arg1
- {inputUri: optional_input_1}
image: alpine
inputs:
- {name: optional_input_1, optional: true, type: String}
name: component_if
""")
COMPONENT_SPEC_IF_PLACEHOLDER = structures.ComponentSpec(
name='component_if',
implementation=structures.Implementation(
container=structures.ContainerSpec(
image='alpine',
args=[
structures.IfPresentPlaceholder(
if_structure=structures.IfPresentPlaceholderStructure(
input_name='optional_input_1',
then=[
'--arg1',
structures.InputUriPlaceholder(
input_name='optional_input_1'),
],
otherwise=[
'--arg2',
'default',
]))
])),
inputs={
'optional_input_1': structures.InputSpec(type='String', default=None)
},
)
V1_YAML_CONCAT_PLACEHOLDER = textwrap.dedent("""\
name: component_concat
implementation:
container:
args:
- concat: ['--arg1', {inputValue: input_prefix}]
image: alpine
inputs:
- {name: input_prefix, type: String}
""")
COMPONENT_SPEC_CONCAT_PLACEHOLDER = structures.ComponentSpec(
name='component_concat',
implementation=structures.Implementation(
container=structures.ContainerSpec(
image='alpine',
args=[
structures.ConcatPlaceholder(items=[
'--arg1',
structures.InputValuePlaceholder(input_name='input_prefix'),
])
])),
inputs={'input_prefix': structures.InputSpec(type='String')},
)
V1_YAML_NESTED_PLACEHOLDER = textwrap.dedent("""\
name: component_nested
implementation:
container:
args:
- concat:
- --arg1
- if:
cond:
isPresent: input_prefix
else:
- --arg2
- default
- concat:
- --arg1
- {inputValue: input_prefix}
then:
- --arg1
- {inputValue: input_prefix}
image: alpine
inputs:
- {name: input_prefix, optional: false, type: String}
""")
COMPONENT_SPEC_NESTED_PLACEHOLDER = structures.ComponentSpec(
name='component_nested',
implementation=structures.Implementation(
container=structures.ContainerSpec(
image='alpine',
args=[
structures.ConcatPlaceholder(items=[
'--arg1',
structures.IfPresentPlaceholder(
if_structure=structures.IfPresentPlaceholderStructure(
input_name='input_prefix',
then=[
'--arg1',
structures.InputValuePlaceholder(
input_name='input_prefix'),
],
otherwise=[
'--arg2',
'default',
structures.ConcatPlaceholder(items=[
'--arg1',
structures.InputValuePlaceholder(
input_name='input_prefix'),
]),
])),
])
])),
inputs={'input_prefix': structures.InputSpec(type='String')},
)
class StructuresTest(parameterized.TestCase):
def test_component_spec_with_placeholder_referencing_nonexisting_input_output(
self):
with self.assertRaisesRegex(
ValueError,
r'^Argument \"InputValuePlaceholder[\s\S]*\'input000\'[\s\S]*references non-existing input.'
):
structures.ComponentSpec(
name='component_1',
implementation=structures.Implementation(
container=structures.ContainerSpec(
image='alpine',
command=[
'sh',
'-c',
'set -ex\necho "$0" > "$1"',
structures.InputValuePlaceholder(
input_name='input000'),
structures.OutputPathPlaceholder(
output_name='output1'),
],
)),
inputs={'input1': structures.InputSpec(type='String')},
outputs={'output1': structures.OutputSpec(type='String')},
)
with self.assertRaisesRegex(
ValueError,
r'^Argument \"OutputPathPlaceholder[\s\S]*\'output000\'[\s\S]*references non-existing output.'
):
structures.ComponentSpec(
name='component_1',
implementation=structures.Implementation(
container=structures.ContainerSpec(
image='alpine',
command=[
'sh',
'-c',
'set -ex\necho "$0" > "$1"',
structures.InputValuePlaceholder(
input_name='input1'),
structures.OutputPathPlaceholder(
output_name='output000'),
],
)),
inputs={'input1': structures.InputSpec(type='String')},
outputs={'output1': structures.OutputSpec(type='String')},
)
def test_simple_component_spec_save_to_component_yaml(self):
# tests writing old style (less verbose) and reading in new style (more verbose)
original_component_spec = structures.ComponentSpec(
name='component_1',
implementation=structures.Implementation(
container=structures.ContainerSpec(
image='alpine',
command=[
'sh',
'-c',
'set -ex\necho "$0" > "$1"',
structures.InputValuePlaceholder(input_name='input1'),
structures.OutputParameterPlaceholder(
output_name='output1'),
],
)),
inputs={'input1': structures.InputSpec(type='String')},
outputs={'output1': structures.OutputSpec(type='String')},
)
from kfp.components import yaml_component
yaml_component = yaml_component.YamlComponent(
component_spec=original_component_spec)
with tempfile.TemporaryDirectory() as tempdir:
output_path = os.path.join(tempdir, 'component.yaml')
compiler.Compiler().compile(yaml_component, output_path)
# test that it can be read back correctly
with open(output_path, 'r') as f:
contents = f.read()
new_component_spec = structures.ComponentSpec.load_from_component_yaml(
contents)
self.assertEqual(original_component_spec, new_component_spec)
def test_simple_component_spec_load_from_v2_component_yaml(self):
component_yaml_v2 = textwrap.dedent("""\
components:
comp-component-1:
executorLabel: exec-component-1
inputDefinitions:
parameters:
input1:
parameterType: STRING
outputDefinitions:
parameters:
output1:
parameterType: STRING
deploymentSpec:
executors:
exec-component-1:
container:
command:
- sh
- -c
- 'set -ex
echo "$0" > "$1"'
- '{{$.inputs.parameters[''input1'']}}'
- '{{$.outputs.parameters[''output1''].output_file}}'
image: alpine
pipelineInfo:
name: component-1
root:
dag:
tasks:
component-1:
cachingOptions:
enableCache: true
componentRef:
name: comp-component-1
inputs:
parameters:
input1:
componentInputParameter: input1
taskInfo:
name: component-1
inputDefinitions:
parameters:
input1:
parameterType: STRING
schemaVersion: 2.1.0
sdkVersion: kfp-2.0.0-alpha.2
""")
generated_spec = structures.ComponentSpec.load_from_component_yaml(
component_yaml_v2)
expected_spec = structures.ComponentSpec(
name='component-1',
implementation=structures.Implementation(
container=structures.ContainerSpec(
image='alpine',
command=[
'sh',
'-c',
'set -ex\necho "$0" > "$1"',
structures.InputValuePlaceholder(input_name='input1'),
structures.OutputParameterPlaceholder(
output_name='output1'),
],
)),
inputs={'input1': structures.InputSpec(type='String')},
outputs={'output1': structures.OutputSpec(type='String')})
self.assertEqual(generated_spec, expected_spec)
@parameterized.parameters(
{
'yaml': V1_YAML_IF_PLACEHOLDER,
'expected_component': COMPONENT_SPEC_IF_PLACEHOLDER
},
{
'yaml': V1_YAML_CONCAT_PLACEHOLDER,
'expected_component': COMPONENT_SPEC_CONCAT_PLACEHOLDER
},
{
'yaml': V1_YAML_NESTED_PLACEHOLDER,
'expected_component': COMPONENT_SPEC_NESTED_PLACEHOLDER
},
)
def test_component_spec_placeholder_load_from_v2_component_yaml(
self, yaml, expected_component):
generated_spec = structures.ComponentSpec.load_from_component_yaml(yaml)
self.assertEqual(generated_spec, expected_component)
def test_component_spec_load_from_v1_component_yaml(self):
component_yaml_v1 = textwrap.dedent("""\
name: Component with 2 inputs and 2 outputs
inputs:
- {name: Input parameter, type: String}
- {name: Input artifact}
outputs:
- {name: Output 1}
- {name: Output 2}
implementation:
container:
image: busybox
command: [sh, -c, '
mkdir -p $(dirname "$2")
mkdir -p $(dirname "$3")
echo "$0" > "$2"
cp "$1" "$3"
'
]
args:
- {inputValue: Input parameter}
- {inputPath: Input artifact}
- {outputPath: Output 1}
- {outputPath: Output 2}
""")
generated_spec = structures.ComponentSpec.load_from_component_yaml(
component_yaml_v1)
expected_spec = structures.ComponentSpec(
name='Component with 2 inputs and 2 outputs',
implementation=structures.Implementation(
container=structures.ContainerSpec(
image='busybox',
command=[
'sh',
'-c',
(' mkdir -p $(dirname "$2") mkdir -p $(dirname "$3") '
'echo "$0" > "$2" cp "$1" "$3" '),
],
args=[
structures.InputValuePlaceholder(
input_name='input_parameter'),
structures.InputPathPlaceholder(
input_name='input_artifact'),
structures.OutputPathPlaceholder(
output_name='output_1'),
structures.OutputPathPlaceholder(
output_name='output_2'),
],
env={},
)),
inputs={
'input_parameter': structures.InputSpec(type='String'),
'input_artifact': structures.InputSpec(type='Artifact')
},
outputs={
'output_1': structures.OutputSpec(type='Artifact'),
'output_2': structures.OutputSpec(type='Artifact'),
})
self.assertEqual(generated_spec, expected_spec)
class TestInputValuePlaceholder(unittest.TestCase):
def test_to_placeholder(self):
structure = structures.InputValuePlaceholder('input1')
actual = structure.to_placeholder()
expected = "{{$.inputs.parameters['input1']}}"
self.assertEqual(
actual,
expected,
)
def test_from_placeholder_single_quote(self):
placeholder = "{{$.inputs.parameters['input1']}}"
expected = structures.InputValuePlaceholder('input1')
actual = structures.InputValuePlaceholder.from_placeholder(placeholder)
self.assertEqual(
actual,
expected,
)
def test_from_placeholder_double_single_quote(self):
placeholder = "{{$.inputs.parameters[''input1'']}}"
expected = structures.InputValuePlaceholder('input1')
actual = structures.InputValuePlaceholder.from_placeholder(placeholder)
self.assertEqual(
actual,
expected,
)
def test_from_placeholder_double_quote(self):
placeholder = '{{$.inputs.parameters["input1"]}}'
expected = structures.InputValuePlaceholder('input1')
actual = structures.InputValuePlaceholder.from_placeholder(placeholder)
self.assertEqual(
actual,
expected,
)
class TestInputPathPlaceholder(unittest.TestCase):
def test_to_placeholder(self):
structure = structures.InputPathPlaceholder('input1')
actual = structure.to_placeholder()
expected = "{{$.inputs.artifacts['input1'].path}}"
self.assertEqual(
actual,
expected,
)
def test_from_placeholder(self):
placeholder = "{{$.inputs.artifacts['input1'].path}}"
expected = structures.InputPathPlaceholder('input1')
actual = structures.InputPathPlaceholder.from_placeholder(placeholder)
self.assertEqual(
actual,
expected,
)
class TestInputUriPlaceholder(unittest.TestCase):
def test_to_placeholder(self):
structure = structures.InputUriPlaceholder('input1')
actual = structure.to_placeholder()
expected = "{{$.inputs.artifacts['input1'].uri}}"
self.assertEqual(
actual,
expected,
)
def test_from_placeholder(self):
placeholder = "{{$.inputs.artifacts['input1'].uri}}"
expected = structures.InputUriPlaceholder('input1')
actual = structures.InputUriPlaceholder.from_placeholder(placeholder)
self.assertEqual(
actual,
expected,
)
class TestOutputPathPlaceholder(unittest.TestCase):
def test_to_placeholder(self):
structure = structures.OutputPathPlaceholder('output1')
actual = structure.to_placeholder()
expected = "{{$.outputs.artifacts['output1'].path}}"
self.assertEqual(
actual,
expected,
)
def test_from_placeholder(self):
placeholder = "{{$.outputs.artifacts['output1'].path}}"
expected = structures.OutputPathPlaceholder('output1')
actual = structures.OutputPathPlaceholder.from_placeholder(placeholder)
self.assertEqual(
actual,
expected,
)
class TestOutputParameterPlaceholder(unittest.TestCase):
def test_to_placeholder(self):
structure = structures.OutputParameterPlaceholder('output1')
actual = structure.to_placeholder()
expected = "{{$.outputs.parameters['output1'].output_file}}"
self.assertEqual(
actual,
expected,
)
def test_from_placeholder(self):
placeholder = "{{$.outputs.parameters['output1'].output_file}}"
expected = structures.OutputParameterPlaceholder('output1')
actual = structures.OutputParameterPlaceholder.from_placeholder(
placeholder)
self.assertEqual(
actual,
expected,
)
class TestOutputUriPlaceholder(unittest.TestCase):
def test_to_placeholder(self):
structure = structures.OutputUriPlaceholder('output1')
actual = structure.to_placeholder()
expected = "{{$.outputs.artifacts['output1'].uri}}"
self.assertEqual(
actual,
expected,
)
def test_from_placeholder(self):
placeholder = "{{$.outputs.artifacts['output1'].uri}}"
expected = structures.OutputUriPlaceholder('output1')
actual = structures.OutputUriPlaceholder.from_placeholder(placeholder)
self.assertEqual(
actual,
expected,
)
class TestIfPresentPlaceholderStructure(unittest.TestCase):
def test_otherwise(self):
obj = structures.IfPresentPlaceholderStructure(
then='then', input_name='input_name', otherwise=['something'])
self.assertEqual(obj.otherwise, ['something'])
obj = structures.IfPresentPlaceholderStructure(
then='then', input_name='input_name', otherwise=[])
self.assertEqual(obj.otherwise, None)
class TestContainerSpec(unittest.TestCase):
def test_command_and_args(self):
obj = structures.ContainerSpec(
image='image', command=['command'], args=['args'])
self.assertEqual(obj.command, ['command'])
self.assertEqual(obj.args, ['args'])
obj = structures.ContainerSpec(image='image', command=[], args=[])
self.assertEqual(obj.command, None)
self.assertEqual(obj.args, None)
def test_env(self):
obj = structures.ContainerSpec(
image='image',
command=['command'],
args=['args'],
env={'env': 'env'})
self.assertEqual(obj.env, {'env': 'env'})
obj = structures.ContainerSpec(
image='image', command=[], args=[], env={})
self.assertEqual(obj.env, None)
def test_from_container_dict_no_placeholders(self):
component_spec = structures.ComponentSpec(
name='test',
implementation=structures.Implementation(
container=structures.ContainerSpec(
image='python:3.7',
command=[
'sh', '-c',
'\nif ! [ -x "$(command -v pip)" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location \'kfp==2.0.0-alpha.2\' && "$0" "$@"\n',
'sh', '-ec',
'program_path=$(mktemp -d)\nprintf "%s" "$0" > "$program_path/ephemeral_component.py"\npython3 -m kfp.components.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@"\n',
'\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef concat_message(first: str, second: str) -> str:\n return first + second\n\n'
],
args=[
'--executor_input', '{{$}}', '--function_to_execute',
'concat_message'
],
env=None,
resources=None),
graph=None,
importer=None),
description=None,
inputs={
'first': structures.InputSpec(type='String', default=None),
'second': structures.InputSpec(type='String', default=None)
},
outputs={'Output': structures.OutputSpec(type='String')})
container_dict = {
'args': [
'--executor_input', '{{$}}', '--function_to_execute', 'fail_op'
],
'command': [
'sh', '-c',
'\nif ! [ -x "$(command -v pip)" ]; then\n python3 -m ensurepip || python3 -m ensurepip --user || apt-get install python3-pip\nfi\n\nPIP_DISABLE_PIP_VERSION_CHECK=1 python3 -m pip install --quiet --no-warn-script-location \'kfp==2.0.0-alpha.2\' && "$0" "$@"\n',
'sh', '-ec',
'program_path=$(mktemp -d)\nprintf "%s" "$0" > "$program_path/ephemeral_component.py"\npython3 -m kfp.components.executor_main --component_module_path "$program_path/ephemeral_component.py" "$@"\n',
'\nimport kfp\nfrom kfp import dsl\nfrom kfp.dsl import *\nfrom typing import *\n\ndef fail_op(message: str):\n """Fails."""\n import sys\n print(message)\n sys.exit(1)\n\n'
],
'image': 'python:3.7'
}
loaded_container_spec = structures.ContainerSpec.from_container_dict(
container_dict)
class TestComponentSpec(unittest.TestCase):
def test_inputs(self):
obj = structures.ComponentSpec(
name='name',
implementation=structures.Implementation(container=None),
inputs={})
self.assertEqual(obj.inputs, None)
def test_outputs(self):
obj = structures.ComponentSpec(
name='name',
implementation=structures.Implementation(container=None),
outputs={})
self.assertEqual(obj.outputs, None)
class TestInputSpec(unittest.TestCase):
def test_equality(self):
self.assertEqual(
structures.InputSpec(type='str', default=None),
structures.InputSpec(type='str', default=None))
self.assertNotEqual(
structures.InputSpec(type='str', default=None),
structures.InputSpec(type='str', default='test'))
self.assertEqual(
structures.InputSpec(type='List', default=None),
structures.InputSpec(type='typing.List', default=None))
self.assertEqual(
structures.InputSpec(type='List', default=None),
structures.InputSpec(type='typing.List[int]', default=None))
self.assertEqual(
structures.InputSpec(type='List'),
structures.InputSpec(type='typing.List[typing.Dict[str, str]]'))
def test_optional(self):
input_spec = structures.InputSpec(type='str', default='test')
self.assertEqual(input_spec.default, 'test')
self.assertEqual(input_spec._optional, True)
input_spec = structures.InputSpec(type='str', default=None)
self.assertEqual(input_spec.default, None)
self.assertEqual(input_spec._optional, True)
input_spec = structures.InputSpec(type='str')
self.assertEqual(input_spec.default, None)
self.assertEqual(input_spec._optional, False)
def test_from_ir_parameter_dict(self):
parameter_dict = {'parameterType': 'STRING'}
input_spec = structures.InputSpec.from_ir_parameter_dict(parameter_dict)
self.assertEqual(input_spec.type, 'String')
self.assertEqual(input_spec.default, None)
parameter_dict = {'parameterType': 'NUMBER_INTEGER'}
input_spec = structures.InputSpec.from_ir_parameter_dict(parameter_dict)
self.assertEqual(input_spec.type, 'Integer')
self.assertEqual(input_spec.default, None)
parameter_dict = {
'defaultValue': 'default value',
'parameterType': 'STRING'
}
input_spec = structures.InputSpec.from_ir_parameter_dict(parameter_dict)
self.assertEqual(input_spec.type, 'String')
self.assertEqual(input_spec.default, 'default value')
input_spec = structures.InputSpec.from_ir_parameter_dict(parameter_dict)
self.assertEqual(input_spec.type, 'String')
self.assertEqual(input_spec.default, 'default value')
class TestOutputSpec(parameterized.TestCase):
def test_from_ir_parameter_dict(self):
parameter_dict = {'parameterType': 'STRING'}
output_spec = structures.OutputSpec.from_ir_parameter_dict(
parameter_dict)
self.assertEqual(output_spec.type, 'String')
artifact_dict = {
'artifactType': {
'schemaTitle': 'system.Artifact',
'schemaVersion': '0.0.1'
}
}
output_spec = structures.OutputSpec.from_ir_parameter_dict(
artifact_dict)
self.assertEqual(output_spec.type, 'Artifact')
class TestProcessCommandArg(unittest.TestCase):
def test_string(self):
arg = 'test'
struct = structures.maybe_convert_command_arg_to_placeholder(arg)
self.assertEqual(struct, arg)
def test_input_value_placeholder(self):
arg = "{{$.inputs.parameters['input1']}}"
actual = structures.maybe_convert_command_arg_to_placeholder(arg)
expected = structures.InputValuePlaceholder(input_name='input1')
self.assertEqual(actual, expected)
def test_input_path_placeholder(self):
arg = "{{$.inputs.artifacts['input1'].path}}"
actual = structures.maybe_convert_command_arg_to_placeholder(arg)
expected = structures.InputPathPlaceholder('input1')
self.assertEqual(actual, expected)
def test_input_uri_placeholder(self):
arg = "{{$.inputs.artifacts['input1'].uri}}"
actual = structures.maybe_convert_command_arg_to_placeholder(arg)
expected = structures.InputUriPlaceholder('input1')
self.assertEqual(actual, expected)
def test_output_path_placeholder(self):
arg = "{{$.outputs.artifacts['output1'].path}}"
actual = structures.maybe_convert_command_arg_to_placeholder(arg)
expected = structures.OutputPathPlaceholder('output1')
self.assertEqual(actual, expected)
def test_output_uri_placeholder(self):
placeholder = "{{$.outputs.artifacts['output1'].uri}}"
actual = structures.maybe_convert_command_arg_to_placeholder(
placeholder)
expected = structures.OutputUriPlaceholder('output1')
self.assertEqual(actual, expected)
def test_output_parameter_placeholder(self):
placeholder = "{{$.outputs.parameters['output1'].output_file}}"
actual = structures.maybe_convert_command_arg_to_placeholder(
placeholder)
expected = structures.OutputParameterPlaceholder('output1')
self.assertEqual(actual, expected)
def test_concat_placeholder(self):
placeholder = "{{$.inputs.parameters[''input1'']}}+{{$.inputs.parameters[''input2'']}}"
actual = structures.maybe_convert_command_arg_to_placeholder(
placeholder)
expected = structures.ConcatPlaceholder(items=[
structures.InputValuePlaceholder(input_name='input1'), '+',
structures.InputValuePlaceholder(input_name='input2')
])
self.assertEqual(actual, expected)
V1_YAML = textwrap.dedent("""\
implementation:
container:
args:
- if:
cond:
isPresent: optional_input_1
else:
- --arg2
- default
then:
- --arg1
- {inputUri: optional_input_1}
image: alpine
inputs:
- {name: optional_input_1, optional: true, type: String}
name: component_if
""")
COMPILER_CLI_TEST_DATA_DIR = os.path.join(
os.path.dirname(os.path.dirname(__file__)), 'compiler_cli_tests',
'test_data')
SUPPORTED_COMPONENTS_COMPILE_TEST_CASES = [
{
'file': 'pipeline_with_importer',
'component_name': 'pass_through_op'
},
{
'file': 'pipeline_with_env',
'component_name': 'print_env_op'
},
{
'file': 'pipeline_with_loops',
'component_name': 'args_generator_op'
},
{
'file': 'pipeline_with_loops',
'component_name': 'print_struct'
},
{
'file': 'pipeline_with_loops',
'component_name': 'print_text'
},
{
'file': 'v2_component_with_pip_index_urls',
'component_name': 'component_op'
},
{
'file': 'pipeline_with_condition',
'component_name': 'flip_coin_op'
},
{
'file': 'pipeline_with_condition',
'component_name': 'print_op'
},
{
'file': 'pipeline_with_task_final_status',
'component_name': 'fail_op'
},
{
'file': 'pipeline_with_task_final_status',
'component_name': 'print_op'
},
{
'file': 'pipeline_with_loops_and_conditions',
'component_name': 'args_generator_op'
},
{
'file': 'pipeline_with_loops_and_conditions',
'component_name': 'flip_coin_op'
},
{
'file': 'pipeline_with_loops_and_conditions',
'component_name': 'print_struct'
},
{
'file': 'pipeline_with_loops_and_conditions',
'component_name': 'print_text'
},
{
'file': 'v2_component_with_optional_inputs',
'component_name': 'component_op'
},
{
'file': 'lightweight_python_functions_v2_with_outputs',
'component_name': 'add_numbers'
},
{
'file': 'lightweight_python_functions_v2_with_outputs',
'component_name': 'concat_message'
},
{
'file': 'lightweight_python_functions_v2_with_outputs',
'component_name': 'output_artifact'
},
{
'file': 'pipeline_with_nested_loops',
'component_name': 'print_op'
},
{
'file': 'pipeline_with_nested_conditions',
'component_name': 'flip_coin_op'
},
{
'file': 'pipeline_with_nested_conditions',
'component_name': 'print_op'
},
{
'file': 'pipeline_with_params_containing_format',
'component_name': 'print_op'
},
{
'file': 'pipeline_with_params_containing_format',
'component_name': 'print_op2'
},
{
'file': 'pipeline_with_exit_handler',
'component_name': 'fail_op'
},
{
'file': 'pipeline_with_exit_handler',
'component_name': 'print_op'
},
{
'file': 'pipeline_with_placeholders',
'component_name': 'print_op'
},
]
class TestReadInComponent(parameterized.TestCase):
def test_read_v1(self):
component_spec = structures.ComponentSpec.load_from_component_yaml(
V1_YAML_IF_PLACEHOLDER)
self.assertEqual(component_spec.name, 'component-if')
self.assertEqual(component_spec.implementation.container.image,
'alpine')
@parameterized.parameters(SUPPORTED_COMPONENTS_COMPILE_TEST_CASES)
def test_read_in_all_v2_components(self, file, component_name):
try:
sys.path.insert(0, COMPILER_CLI_TEST_DATA_DIR)
mod = __import__(file, fromlist=[component_name])
component = getattr(mod, component_name)
finally:
del sys.path[0]
with tempfile.TemporaryDirectory() as tmpdir:
component_file = os.path.join(tmpdir, 'component.yaml')
compiler.Compiler().compile(component, component_file)
with open(component_file, 'r') as f:
yaml_str = f.read()
loaded_component_spec = structures.ComponentSpec.load_from_component_yaml(
yaml_str)
self.assertEqual(component.component_spec, loaded_component_spec)
def test_simple_placeholder(self):
compiled_yaml = textwrap.dedent("""
components:
comp-component1:
executorLabel: exec-component1
inputDefinitions:
parameters:
input1:
parameterType: STRING
outputDefinitions:
artifacts:
output1:
artifactType:
schemaTitle: system.Artifact
schemaVersion: 0.0.1
deploymentSpec:
executors:
exec-component1:
container:
args:
- '{{$.inputs.parameters[''input1'']}}'
- '{{$.outputs.artifacts[''output1''].path}}'
command:
- sh
- -c
- echo "$0" >> "$1"
image: alpine
pipelineInfo:
name: component1
root:
dag:
tasks:
component1:
cachingOptions:
enableCache: true
componentRef:
name: comp-component1
inputs:
parameters:
input1:
componentInputParameter: input1
taskInfo:
name: component1
inputDefinitions:
parameters:
input1:
parameterType: STRING
schemaVersion: 2.1.0
sdkVersion: kfp-2.0.0-alpha.2""")
loaded_component_spec = structures.ComponentSpec.load_from_component_yaml(
compiled_yaml)
component_spec = structures.ComponentSpec(
name='component1',
implementation=structures.Implementation(
container=structures.ContainerSpec(
image='alpine',
command=['sh', '-c', 'echo "$0" >> "$1"'],
args=[
structures.InputValuePlaceholder(input_name='input1'),
structures.OutputPathPlaceholder(output_name='output1')
],
env=None,
resources=None),
graph=None,
importer=None),
description=None,
inputs={
'input1': structures.InputSpec(type='String', default=None)
},
outputs={'output1': structures.OutputSpec(type='Artifact')})
self.assertEqual(loaded_component_spec, component_spec)
def test_if_placeholder(self):
compiled_yaml = textwrap.dedent("""
components:
comp-if:
executorLabel: exec-if
inputDefinitions:
parameters:
optional_input_1:
parameterType: STRING
deploymentSpec:
executors:
exec-if:
container:
args:
- 'input: '
- '{{$.inputs.parameters[''optional_input_1'']}}'
command:
- sh
- -c
- echo "$0" "$1"
image: alpine
pipelineInfo:
name: if
root:
dag:
tasks:
if:
cachingOptions:
enableCache: true
componentRef:
name: comp-if
inputs:
parameters:
optional_input_1:
componentInputParameter: optional_input_1
taskInfo:
name: if
inputDefinitions:
parameters:
optional_input_1:
parameterType: STRING
schemaVersion: 2.1.0
sdkVersion: kfp-2.0.0-alpha.2""")
loaded_component_spec = structures.ComponentSpec.load_from_component_yaml(
compiled_yaml)
component_spec = structures.ComponentSpec(
name='if',
implementation=structures.Implementation(
container=structures.ContainerSpec(
image='alpine',
command=['sh', '-c', 'echo "$0" "$1"'],
args=[
'input: ',
structures.InputValuePlaceholder(
input_name='optional_input_1')
],
env=None,
resources=None),
graph=None,
importer=None),
description=None,
inputs={
'optional_input_1':
structures.InputSpec(type='String', default=None)
},
outputs=None)
self.assertEqual(loaded_component_spec, component_spec)
def test_concat_placeholder(self):
compiled_yaml = textwrap.dedent("""
components:
comp-concat:
executorLabel: exec-concat
inputDefinitions:
parameters:
input1:
parameterType: STRING
input2:
parameterType: STRING
deploymentSpec:
executors:
exec-concat:
container:
command:
- sh
- -c
- echo "$0"
- '{{$.inputs.parameters[''input1'']}}+{{$.inputs.parameters[''input2'']}}'
image: alpine
pipelineInfo:
name: concat
root:
dag:
tasks:
concat:
cachingOptions:
enableCache: true
componentRef:
name: comp-concat
inputs:
parameters:
input1:
componentInputParameter: input1
input2:
componentInputParameter: input2
taskInfo:
name: concat
inputDefinitions:
parameters:
input1:
parameterType: STRING
input2:
parameterType: STRING
schemaVersion: 2.1.0
sdkVersion: kfp-2.0.0-alpha.2""")
loaded_component_spec = structures.ComponentSpec.load_from_component_yaml(
compiled_yaml)
component_spec = structures.ComponentSpec(
name='concat',
implementation=structures.Implementation(
container=structures.ContainerSpec(
image='alpine',
command=[
'sh', '-c', 'echo "$0"',
structures.ConcatPlaceholder(items=[
structures.InputValuePlaceholder(
input_name='input1'), '+',
structures.InputValuePlaceholder(
input_name='input2')
])
],
args=None,
env=None,
resources=None),
graph=None,
importer=None),
description=None,
inputs={
'input1': structures.InputSpec(type='String', default=None),
'input2': structures.InputSpec(type='String', default=None)
},
outputs=None)
self.assertEqual(loaded_component_spec, component_spec)
if __name__ == '__main__':
unittest.main()
|
from os import walk, mkdir
from PIL import Image
from shutil import copyfile, rmtree
GENERATED_WARNING = "/** \n * This file was auto-generated with object-generator.py \n */\n\n"
def capitalizeName(name):
if ("zother" in name):
name = name[1:]
names = name.split('-')
result = ""
for n in names:
result += n.capitalize()
result += " "
return result.strip()
#
# Css Generation
#
css_file = open("css/objects.css", "a+")
css_file.truncate(0)
css_file.write(GENERATED_WARNING)
rmtree('img/flat-tiles')
mkdir('img/flat-tiles')
for (dirpath, dirnames, filenames) in walk("img/tiles"):
css_file.write("/* {} */\n".format(dirpath))
for file in filenames:
if file != ".DS_Store":
copyfile(dirpath + '/' + file, "img/flat-tiles/" + file)
name = file[:-4]
css_file.write(".sprite-icon.{} {{ \n background-image: url('/nh/img/flat-tiles/{}');\n}}\n ".format(name, file))
css_file.close()
print("Generated {}".format(css_file.name))
#
# JS Generation
#
js_file = open("js/data/sprites.js", "a+")
js_file.truncate(0)
js_file.writelines([
GENERATED_WARNING,
"'use strict';\n\n",
"var data = {\n",
" tiles: [\n"])
SIZE_LISTS = ["buildings", "structures"]
SIZE_LIST_RATIO = {"buildings":60, "structures":60}
def writeSizedObject(directory, file_path, building_name):
with Image.open(file_path) as img:
width, height = img.size
width = (width / SIZE_LIST_RATIO[directory]) * 16
height = (height / SIZE_LIST_RATIO[directory]) * 16
js_file.write(" '{}': {{\n".format(building_name))
js_file.write(" 'sprite': '{}',\n".format(file_path))
js_file.write(" 'width': {},\n".format(width))
js_file.write(" 'height': {},\n".format(height))
js_file.write(" },\n")
for (dirpath, dirnames, filenames) in walk("img/tiles"):
directory = dirpath[dirpath.rindex('/') + 1:]
if (directory != "tiles"):
if (directory in SIZE_LISTS):
js_file.write(" {}: {{\n".format(directory))
else:
js_file.write(" {}: [\n".format(directory))
for file in filenames:
if file != ".DS_Store":
name = file[:-4]
if (directory in SIZE_LISTS):
writeSizedObject(directory, "{}/{}".format('img/flat-tiles', file), name)
else:
js_file.write(" '{}',\n".format(name))
if (directory in SIZE_LISTS):
js_file.write(" },\n")
else:
js_file.write(" ],\n")
js_file.writelines([
"};\n\n",
"// nodeJS would also like to use this file\n",
"if (typeof module !== 'undefined') {\n",
" module.exports = data;\n",
"}\n\n"])
js_file.close()
print("Generated {}".format(js_file.name))
#
# JS Map files
#
js_file = open("js/data/map-sprites.js", "a+")
js_file.truncate(0)
js_file.writelines([
GENERATED_WARNING,
"'use strict';\n\n",
"var maps = [\n"])
for (dirpath, dirnames, filenames) in walk("img/layouts/maps/"):
for file in sorted(filenames):
if file != ".DS_Store":
js_file.write(" '{}',\n".format(file))
js_file.writelines([
"];\n\n",
"// nodeJS would also like to use this file\n",
"if (typeof module !== 'undefined') {\n",
" module.exports = maps;\n",
"}\n\n"])
js_file.close()
print("Generated {}".format(js_file.name))
#
# Html Generation
# The generated file must be copy and pasted into index.html in the indicated section
#
html_file = open("objects.html", "a+")
html_file.truncate(0)
html_file.write("<!-- \n * START auto-generated section from objects.html \n --> \n\n")
def writeObjectHtml(data, type, directory, name):
if (directory == 'maptiles/'):
pass
else:
html_file.write(' ')
html_file.write('<li class="tools {}" data-{}="{}"><div class="link"><i class="sprite-icon {}"></i>{}</div></li>\n'
.format(type, data, name, name, capitalizeName(name)))
for (dirpath, dirnames, filenames) in walk("img/tiles"):
directory = dirpath[dirpath.rindex('/') + 1:]
if (directory == "tiles"):
continue
html_file.writelines([
' ',
'<li class="divider"></li>\n',
' ',
'<li class="has-dropdown">\n',
' ',
' <a href="#" class="show-for-xlarge-up" title="{}">{}</a>\n'.format(directory,capitalizeName(directory)),
' ',
' <a href="#" class="hide-for-xlarge-up" title="{}">{}</a>\n'.format(directory,capitalizeName(directory)),
' ',
' <ul class="dropdown">\n',
])
subCats = { "flowers": "", "structures": "", "other" : ""}
num = 0
for file in sorted(filenames):
if file != ".DS_Store":
name = file[:-4]
if(directory in subCats):
old = subCats[directory]
subCats[directory] = file[:file.find('-')]
if (old != subCats[directory]):
if old != "":
# spacing so the dropdown list opens at the correct height
num += 26
html_file.write(' </ul>\n')
html_file.writelines([
' ',
' <li class="tools parent" data-type="{}"><div class="link"><i class="sprite-icon"></i>{}</div></li>\n'.format(subCats[directory],capitalizeName(subCats[directory])),
' ',
' <ul class="submenu" style="top:{}px">\n'.format(num),
])
if (directory in subCats):
html_file.write(' ')
if (directory in SIZE_LISTS):
html_file.write('<li class="tools {}" data-id="{}"><div class="link"><i class="sprite-icon {}"></i>{}</div></li>\n'
.format("building", name, name, capitalizeName(name[name.find('-'):])))
else:
html_file.write('<li class="tools {}" data-type="{}"><div class="link"><i class="sprite-icon {}"></i>{}</div></li>\n'
.format("brush", name, name, capitalizeName(name[name.find('-'):])))
elif (directory in SIZE_LISTS):
writeObjectHtml("id", "building", "", name)
else:
writeObjectHtml("type","brush", directory + "/", name)
if(directory in subCats):
html_file.write(' </ul>\n')
html_file.writelines([
' </ul>\n',
' </li>\n',
])
html_file.write("<!-- \n * STOP auto-generated section from objects.html \n --> \n")
html_file.close()
print("Generated {}".format(html_file.name))
|
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: POGOProtos/Map/Fort/FortSponsor.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='POGOProtos/Map/Fort/FortSponsor.proto',
package='POGOProtos.Map.Fort',
syntax='proto3',
serialized_pb=_b('\n%POGOProtos/Map/Fort/FortSponsor.proto\x12\x13POGOProtos.Map.Fort*B\n\x0b\x46ortSponsor\x12\x11\n\rUNSET_SPONSOR\x10\x00\x12\r\n\tMCDONALDS\x10\x01\x12\x11\n\rPOKEMON_STORE\x10\x02\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_FORTSPONSOR = _descriptor.EnumDescriptor(
name='FortSponsor',
full_name='POGOProtos.Map.Fort.FortSponsor',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNSET_SPONSOR', index=0, number=0,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='MCDONALDS', index=1, number=1,
options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='POKEMON_STORE', index=2, number=2,
options=None,
type=None),
],
containing_type=None,
options=None,
serialized_start=62,
serialized_end=128,
)
_sym_db.RegisterEnumDescriptor(_FORTSPONSOR)
FortSponsor = enum_type_wrapper.EnumTypeWrapper(_FORTSPONSOR)
UNSET_SPONSOR = 0
MCDONALDS = 1
POKEMON_STORE = 2
DESCRIPTOR.enum_types_by_name['FortSponsor'] = _FORTSPONSOR
# @@protoc_insertion_point(module_scope)
|
import multiprocessing
def do_calculation(data):
return data * 2
def start_process():
print('Starting', multiprocessing.current_process().name)
if __name__ == '__main__':
inputs = list(range(10))
print('Input :', inputs)
builtin_outputs = map(do_calculation, inputs)
print('Built-in:', builtin_outputs)
pool_size = multiprocessing.cpu_count() * 2
pool = multiprocessing.Pool(
processes=pool_size,
initializer=start_process,
)
pool_outputs = pool.map(do_calculation, inputs)
pool.close() # no more tasks
pool.join() # wrap up current tasks
print('Pool :', pool_outputs)
|
#!/usr/bin/env python
import argparse
import sys
import os
import cv2
import pickle
import numpy as np
from matplotlib.image import imread
from os.path import isdir, join, exists, splitext
from os import listdir
from pathlib import Path
# -d /media/miro/WD/jetbot_obstacle_avoidance/data
# -d /media/miro/WD/L-CAS/LCAS_1200/data
# =size 224
if __name__ == "__main__":
parser = argparse.ArgumentParser(help="")
parser.add_argument("-d", type=str, required=True, help="Data directory")
parser.add_argument("-size", type=int, default=0, help="Size of the output, no resize for size set to 0")
parser.add_argument("-prefix", type=str, default="data", help="Pickle file prefix")
args = parser.parse_args()
if not exists(args.d):
exit("{} does not exist".format(args.d))
if not isdir(args.d):
exit("{} is not a directory".format(args.d))
name_to_idx = dict()
idx_to_name = dict()
class_count = 0
X = []
y = []
for c in listdir(args.d):
c_dir = join(args.d, c)
if isdir(c_dir):
name_to_idx[c] = class_count
idx_to_name[class_count] = c
for f in listdir(c_dir):
if splitext(f)[-1] == ".jpg":
print(join(args.d, c, f))
im = imread(join(args.d, c, f))
# print("{} {} {} {} {}".format(type(im), im.shape, im[0].dtype, np.amin(im), np.amax(im)))
# im: <class 'numpy.ndarray'> (2464, 3280, 3) uint8 0 255
# 2 ways to resize image: opencv and scikit-image
if args.size != 0:
im = cv2.resize(im, (args.size, args.size), interpolation=cv2.INTER_CUBIC)
# im: <class 'numpy.ndarray'> (224, 224, 3) uint8 0 255
X.append(im)
y.append(class_count)
class_count = class_count + 1
X = np.array(X)
y = np.array(y).astype(np.uint16)
print(name_to_idx)
print(idx_to_name)
print("X", X[0].dtype, X.shape)
print("y", y[0].dtype, y.shape)
# pickle data
with open(join(Path(args.d).parent, args.prefix + "_" + str(args.size) + '.pckl'), 'wb') as f:
pickle.dump([X, y, name_to_idx, idx_to_name], f)
print("Exported to ", f.name)
|
import string
import time
import os
import argparse
from pathlib import Path
import json
import matplotlib.pyplot as plt
import seaborn as sns
from distutils import dir_util
from pprint import pprint
import pickle
import pandas as pd
import random
import pprint
import numpy as np
import datetime
# BayesCMD packages
from bayescmd.results_handling import kde_plot
from bayescmd.results_handling import scatter_dist_plot
from bayescmd.results_handling import data_import
from bayescmd.results_handling import plot_repeated_outputs
from bayescmd.results_handling import histogram_plot
from bayescmd.results_handling import data_merge_by_batch
from bayescmd.abc import import_actual_data
from bayescmd.abc import priors_creator
from bayescmd.abc import get_distance
from bayescmd.abc import inputParse
from bayescmd.bcmdModel import ModelBCMD
from subprocess import TimeoutExpired, CalledProcessError # noqa
# Google BigQuery
from google.cloud import bigquery
get_ipython().run_line_magic('load_ext', 'google.cloud.bigquery')
client = bigquery.Client.from_service_account_json(
"../gcloud/hypothermia-auth.json"
)
def generate_histogram_query(project, neonate, n_bins, distance):
histogram_query = """
SELECT
MIN(data.{distance}) AS min,
MAX(data.{distance}) AS max,
COUNT(data.{distance}) AS num,
INTEGER((data.{distance}-value.min)/(value.max-value.min)*{n_bins}) AS group_
FROM
[{project}:neo_desat.{neonate}_gradient] data
CROSS JOIN (
SELECT
MAX({distance}) AS max,
MIN({distance}) AS min
FROM
[{project}:neo_desat.{neonate}_gradient]) value
GROUP BY
group_
ORDER BY
group_
""".format(neonate=neonate, n_bins=n_bins, distance=distance, project=project)
return histogram_query
# In[4]:
def generate_posterior_query(project, neonate, distance, parameters, limit=50000):
unpacked_params = ",\n".join(parameters)
posterior_query = """
SELECT
{unpacked_params},
{distance},
idx
FROM
`{project}.neo_desat.{neonate}_gradient`
ORDER BY
{distance} ASC
LIMIT
{limit}
""".format(project=project, neonate=neonate, unpacked_params=unpacked_params, distance=distance, limit=limit)
return posterior_query
def load_configuration(neonate, verbose=False):
current_file = Path(os.path.abspath(''))
config_file = os.path.join(current_file.parents[1],
'config_files',
'abc',
'neo_config.json'
)
with open(config_file, 'r') as conf_f:
conf = json.load(conf_f)
params = conf['priors']
input_path = os.path.join(current_file.parents[1],
'data',
'formatted_data',
'{}_formatted.csv'.format(neonate))
d0 = import_actual_data(input_path)
targets = conf['targets']
model_name = conf['model_name']
inputs = conf['inputs']
config = {
"model_name": model_name,
"targets": targets,
"inputs": inputs,
"parameters": params,
"input_path": input_path,
"zero_flag": conf['zero_flag'],
}
if verbose:
pprint(config)
return config, d0
configuration = {}
neonates = ['neo007', 'neo021']
class Timer(object):
def __init__(self, name=None):
self.name = name
def __enter__(self):
self.tstart = time.time()
def __exit__(self, type, value, traceback):
if self.name:
print('[%s]' % self.name,)
print('Elapsed: %s' % (time.time() - self.tstart))
def run_model(model):
"""Run a BCMD Model.
Parameters
----------
model : :obj:`bayescmd.bcmdModel.ModelBCMD`
An initialised instance of a ModelBCMD class.
Returns
-------
output : :obj:`dict`
Dictionary of parsed model output.
"""
model.create_initialised_input()
model.run_from_buffer()
output = model.output_parse()
return output
def get_output(model_name,
p,
times,
input_data,
d0,
targets,
distance='euclidean',
zero_flag=None):
"""Generate model output and distances.
Parameters
----------
model_name : :obj:`str`
Name of model
p : :obj:`dict`
Dict of form {'parameter': value} for which posteriors are being
investigated.
times : :obj:`list` of :obj:`float`
List of times at which the data was collected.
input_data : :obj:`dict`
Dictionary of input data as generated by :obj:`abc.inputParse`.
d0 : :obj:`dict`
Dictionary of real data, as generated by :obj:`abc.import_actual_data`.
targets : :obj:`list` of :obj:`str`
List of model outputs against which the model is being optimised.
distance : :obj:`str`
Distance measure. One of 'euclidean', 'manhattan', 'MAE', 'MSE'.
zero_flag : dict
Dictionary of form target(:obj:`str`): bool, where bool indicates
whether to zero that target.
Note: zero_flag keys should match targets list.
Returns
-------
:obj:`tuple`
A tuple of (p, model output data).
"""
_model = ModelBCMD(
model_name, inputs=input_data, params=p, times=times, outputs=targets, suppress=True)
output = run_model(_model)
dist = get_distance(
d0,
output,
targets,
distance=distance.split("_")[-1],
zero_flag=zero_flag)
try:
for k, v in dist.items():
p[k] = v
except AttributeError as e:
print("Error in finding distance.\n dist is {}:".format(dist))
pprint.pprint(p)
pprint.pprint(output)
raise e
if zero_flag:
for k, boolean in zero_flag.items():
if boolean:
output[k] = [x - output[k][0] for x in output[k]]
return p, output
# In[10]:
def get_repeated_outputs(df,
model_name,
parameters,
input_path,
inputs,
targets,
n_repeats,
zero_flag,
neonate,
tolerance=None,
limit=None,
frac=None,
openopt_path=None,
offset=None,
distance='euclidean'
):
"""Generate model output and distances multiple times.
Parameters
----------
model_name : :obj:`str`
Names of model. Should match the modeldef file for model being generated
i.e. model_name of 'model`' should have a modeldef file
'model1.modeldef'.
parameters : :obj:`dict` of :obj:`str`: :obj:`tuple`
Dict of model parameters to compare, with value tuple of the prior max
and min.
input_path : :obj:`str`
Path to the true data file
inputs : :obj:`list` of :obj:`str`
List of model inputs.
targets : :obj:`list` of :obj:`str`
List of model outputs against which the model is being o config = configuration[NEONATE]['bayescmd_config']ptimised.
n_repeats : :obj: `int` config = configuration[NEONATE]['bayescmd_config']
Number of times to generate output data config = configuration[NEONATE]['bayescmd_config']
frac : :obj:`float` config = configuration[NEONATE]['bayescmd_config']
Fraction of results to consider. Should be given as a pe config = configuration[NEONATE]['bayescmd_config']rcentage i.e.
1=1%, 0.1=0.1% config = configuration[NEONATE]['bayescmd_config']
zero_flag : dict config = configuration[NEONATE]['bayescmd_config']
Dictionary of form target(:obj:`str`): bool, where bool config = configuration[NEONATE]['bayescmd_config']indicates
whether to zero that target. config = configuration[NEONATE]['bayescmd_config']
Note: zero_flag keys should match targets list.
openopt_path : :obj:`str` or :obj:`None`
Path to the openopt data file if it exists. Default is None.
offset : :obj:`dict`
Dictionary of offset parameters if they are needed
distance : :obj:`str`, optional
Distance measure. One of 'euclidean', 'manhattan', 'MAE', 'MSE'.
Returns
-------
fig : :obj:`matplotlib.figure`
Figure containing all axes.
"""
p_names = list(parameters.keys())
sorted_df = df.sort_values(by=distance)
if tolerance:
accepted_limit = sum(df[distance].values < tolerance)
elif limit:
accepted_limit = limit
elif frac:
accepted_limit = frac_calculator(sorted_df, frac)
else:
raise ValueError('No limit or fraction given.')
df_list = []
if n_repeats > accepted_limit:
print(
"Setting number of repeats to quarter of the posterior size\n",
file=sys.stderr)
n_repeats = int(accepted_limit / 4)
d0 = import_actual_data(input_path)
input_data = inputParse(d0, inputs)
true_data = pd.read_csv(input_path)
times = true_data['t'].values
if openopt_path:
openopt_data = pd.read_csv(openopt_path)
if n_repeats > accepted_limit:
raise ValueError(
"Number of requested model runs greater than posterior size:"
"\n\tPosterior Size: {}\n\tNumber of runs: {}".format(
accepted_limit, n_repeats))
rand_selection = list(range(accepted_limit))
random.shuffle(rand_selection)
outputs_list = []
posteriors = sorted_df.iloc[:accepted_limit][p_names].values
select_idx = 0
with Timer("Running repeat outputs"):
while len(outputs_list) < n_repeats:
try:
idx = rand_selection.pop()
p = dict(zip(p_names, posteriors[idx]))
if offset:
p = {**p, **offset}
output = get_output(
model_name,
p,
times,
input_data,
d0,
targets,
distance=distance,
zero_flag=zero_flag)
outputs_list.append(output)
print("Sample {}, idx:{}".format(len(outputs_list), idx))
except (TimeoutError, TimeoutExpired) as e:
print("Timed out for Sample {}, idx:{}".format(
len(outputs_list), idx))
pprint.pprint(p)
rand_selection.insert(0, idx)
except (CalledProcessError) as e:
print("CalledProcessError for Sample {}, idx:{}".format(
len(outputs_list), idx))
pprint.pprint(p)
rand_selection.insert(0, idx)
d = {"Errors": {}, "Outputs": {}}
d['Errors']['Average'] = np.nanmean(
[o[0]['TOTAL'] for o in outputs_list])
for target in targets:
d['Errors'][target] = np.nanmean(
[o[0][target] for o in outputs_list])
d['Outputs'][target] = [o[1][target] for o in outputs_list]
for ii, target in enumerate(targets):
x = [j for j in times for n in range(len(d['Outputs'][target]))]
with Timer('Transposing {}'.format(target)):
y = np.array(d['Outputs'][target]).transpose()
y = y.ravel()
with Timer("Crafting DataFrame for {}".format(target)):
model_name_col = [neonate]*len(x)
target_col = [target]*len(x)
df1 = pd.DataFrame(
{"Time": x, "Posterior": y, "Neonate": model_name_col, "Output": target_col})
with Timer("Appending dataframe for {}".format(target)):
df_list.append(df1.copy())
del df1
return pd.concat(df_list), true_data
# In[ ]:
labels = {"t": "Time (sec)",
"SaO2sup": "SaO2 (%)",
"P_a": "ABP (mmHg)",
"PaCO2": "PaCO$_2$ (mmHg)",
"temp": "Temperature ($^{\circ}$C)",
"TOI": "TOI (%)",
"HbT": "$\Delta$HbT $(\mu M)$",
"Hbdiff": "$\Delta$HbD $(\mu M)$",
"CCO": "$\Delta$CCO $(\mu M)$"
}
LIM = 4000
neonates = ["neo007", "neo021"]
signals = ['CCO', 'HbT', 'Hbdiff']
for SIGNAL in [''] + signals:
print("Working on {} ".format(SIGNAL if SIGNAL != '' else "TOTAL"))
for NEONATE in neonates:
print("Working on {} ".format(NEONATE))
configuration[NEONATE] = {}
config, d0 = load_configuration(NEONATE)
configuration[NEONATE]['bayescmd_config'] = config
configuration[NEONATE]['original_data'] = d0
if SIGNAL != '':
distance = SIGNAL + "_NRMSE"
else:
distance = "NRMSE"
configuration[NEONATE]['histogram_query'] = generate_histogram_query('hypothermia-bayescmd',
NEONATE,
100,
distance)
configuration[NEONATE]['posterior_query'] = generate_posterior_query('hypothermia-bayescmd',
NEONATE,
distance,
list(
configuration[NEONATE]['bayescmd_config']['parameters'].keys()),
limit=LIM)
# Set config and create figure path
figPath = "/home/buck06191/Dropbox/phd/desat_neonate/ABC/Figures/{}_gradient/{}".format(
NEONATE, distance)
dir_util.mkpath(figPath)
# Get posterior
print("\tRunning SQL query")
df_post = client.query(
configuration[NEONATE]['posterior_query']).to_dataframe()
# Plot posterior predictive
config["offset"] = {}
print("\tGetting Posterior Predictive")
with Timer("Getting outputs"):
df_dict = {}
df_list, true_data = get_repeated_outputs(df_post, n_repeats=LIM/2, limit=LIM,
distance=distance, neonate=NEONATE, **config)
df_dict[NEONATE] = df_list
ylabel_dict = labels
all_outputs = pd.concat(list(df_dict.values()))
with Timer("Plotting line plot"):
g = sns.FacetGrid(all_outputs, row='Output',
hue='Neonate', height=2.5, aspect=2, sharey=False)
for ii, ax in enumerate(g.axes.flatten()):
ax.plot(true_data['t'], true_data[signals[ii]], 'k', '-')
g = (g.map_dataframe(sns.lineplot, x='Time', y='Posterior',
estimator=np.median, ci=95)).add_legend()
plt.setp(g._legend.get_title(), fontsize=12)
plt.setp(g._legend.get_texts(), fontsize=11)
g = g.set_titles('')
g = g.set_xlabels('Time (sec)', fontsize=12)
for ii, ax in enumerate(g.axes.flatten()):
ax.set_ylabel(ylabel_dict[signals[ii]], fontsize=12)
nticks = 5
ax.set_xticklabels(ax.get_xticklabels(), fontsize=11)
ax.autoscale()
y_min, y_max = ax.get_ylim()
ax.set_yticks(np.linspace(y_min, y_max, nticks))
ax.set_yticklabels(["{:.2g}".format(y)
for y in np.linspace(y_min, y_max, nticks)], fontdict={'fontsize': 11})
ax.set_title(
string.ascii_lowercase[ii] + ")", fontsize=10, loc='left')
g.fig.align_ylabels()
g.fig.subplots_adjust(hspace=0.15)
g.savefig(figPath+'/{}_postpred.png'.format(NEONATE),
dpi=250, bbox_inches='tight', transparent=True)
|
import t
class b07(t.Test):
class TestResource(t.Resource):
def forbidden(self, req, rsp):
return req.cookies.get('id') != 'foo'
def to_html(self, req, rsp):
return "nom nom"
def test_ok(self):
self.req.headers['cookie'] = 'id=foo'
self.go()
t.eq(self.rsp.status, '200 OK')
t.eq(self.rsp.body, 'nom nom')
def test_not_ok(self):
self.req.headers['cookie'] = 'bar'
self.go()
t.eq(self.rsp.status, '403 Forbidden')
t.eq(self.rsp.body, '') |
import app
from waitress import serve
from paste.translogger import TransLogger
serve(TransLogger(app.app, setup_console_handler=False), port=3005, host="0.0.0.0")
|
# SPDX-License-Identifier: Apache-2.0
#
# Copyright (C) 2021, ARM Limited and contributors.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
r"""
This module provides classes to build kernel modules from source on the fly.
Here is an example of such module::
import time
from lisa.target import Target
from lisa.trace import DmesgCollector
from lisa._kmod import KmodSrc
from lisa.utils import setup_logging
setup_logging()
target = Target(
kind='linux',
name='my_board',
host='192.158.1.38',
username='root',
password='root',
lazy_platinfo=True,
kernel_src='/path/to/kernel/tree/',
kmod_build_env='alpine',
# kmod_make_vars={'CC': 'clang'},
)
# Example module from: https://tldp.org/LDP/lkmpg/2.6/html/x279.html
code = r'''
/*
* hello-4.c - Demonstrates module documentation.
*/
#include <linux/module.h> /* Needed by all modules */
#include <linux/kernel.h> /* Needed for KERN_INFO */
#include <linux/init.h> /* Needed for the macros */
#define DRIVER_AUTHOR "XXX"
#define DRIVER_DESC "A sample driver"
static int __init init_hello(void)
{
printk(KERN_INFO "Hello, world\n");
return 0;
}
static void __exit cleanup_hello(void)
{
printk(KERN_INFO "Goodbye, worldn");
}
module_init(init_hello);
module_exit(cleanup_hello);
/*
* You can use strings, like this:
*/
/*
* Get rid of taint message by declaring code as GPL.
*/
MODULE_LICENSE("GPL");
/*
* Or with defines, like this
*/
MODULE_AUTHOR(DRIVER_AUTHOR); /* Who wrote this module? */
MODULE_DESCRIPTION(DRIVER_DESC); /* What does this module do */
'''
# This object represents the kernel sources, and needs to be turned into a
# DynamicKmod to be compiled and run.
src = KmodSrc({'hello.c': code})
# Create a DynamicKmod from the target and the module sources.
kmod = target.get_kmod(src=src)
# Collect the dmesg output while running the module
dmesg_coll = DmesgCollector(target, output_path='dmesg.log')
# kmod.run() will compile the module, install it and then uninstall it at the
# end of the "with" statement.
with dmesg_coll, kmod.run():
time.sleep(1)
for entry in dmesg_coll.entries:
print(entry)
"""
import abc
import urllib.request
import urllib.parse
from pathlib import Path, PurePosixPath
import tempfile
import tarfile
import gzip
import bz2
import lzma
import os
import io
import shutil
import contextlib
import subprocess
import copy
import re
import functools
import bisect
import threading
import hashlib
import itertools
import logging
import datetime
import pwd
import glob
import collections
import hashlib
from shlex import quote
from io import BytesIO
from elftools.elf.elffile import ELFFile
from devlib.target import KernelVersion, TypedKernelConfig, KernelConfigTristate
from devlib.host import LocalConnection
from devlib.exception import TargetStableError
from lisa.utils import nullcontext, Loggable, LISA_CACHE_HOME, checksum, DirCache, chain_cm, memoized, LISA_HOST_ABI, subprocess_log, SerializeViaConstructor
from lisa._assets import ASSETS_PATH, HOST_PATH
from lisa._unshare import ensure_root
import lisa._git as git
_ALPINE_ROOTFS_URL = 'https://dl-cdn.alpinelinux.org/alpine/v{minor}/releases/{arch}/alpine-minirootfs-{version}-{arch}.tar.gz'
def _any_abi_to_kernel_arch(abi):
return {
'armeabi': 'arm',
'armv7': 'arm',
'aarch64': 'arm64',
}.get(abi, abi)
def _url_path(url):
return PurePosixPath(
urllib.parse.unquote(
urllib.parse.urlparse(url).path
)
)
@contextlib.contextmanager
def _make_chroot(make_vars, bind_paths=None, alpine_version='3.14.2', overlay_backend=None):
"""
Create a chroot folder ready to be used to build a kernel.
"""
logger = logging.getLogger(f'{__name__}.alpine_chroot')
def mount_binds(chroot, bind_paths, mount=True):
for src, dst in bind_paths.items():
dst = Path(dst).resolve()
dst = (chroot / dst.relative_to('/')).resolve()
# This will be unmounted by the destroy script
if mount:
dst.mkdir(parents=True, exist_ok=True)
cmd = ['mount', '--bind', '--', src, dst]
else:
cmd = ['umount', '-n', '--', dst]
subprocess_log(cmd, logger=logger, level=logging.DEBUG)
def populate(key, path, init_cache=True):
version, arch, packages, use_qemu = key
path = path.resolve()
# Packages have already been installed, so we can speed things up a
# bit
if init_cache:
packages = packages.split(' ')
_version = version.split('.')
minor = '.'.join(_version[:-1])
url = _ALPINE_ROOTFS_URL.format(
minor=minor,
arch=arch,
version=version,
)
with tempfile.NamedTemporaryFile(dir=path) as f:
tar_path = Path(f.name).resolve()
logger.info(f'Setting up Alpine chroot from {url} -> {tar_path}')
with urllib.request.urlopen(url) as url, open(tar_path, 'wb') as f:
shutil.copyfileobj(url, f)
with tarfile.open(tar_path, 'r') as f:
f.extractall(path=path)
else:
packages = []
shutil.copy('/etc/resolv.conf', path / 'etc' / 'resolv.conf')
if packages:
cmd = _make_chroot_cmd(path, ['apk', 'add', *packages])
subprocess_log(cmd, logger=logger, level=logging.DEBUG)
packages = [
'bash',
'binutils',
'coreutils',
'diffutils',
'make',
'file',
'gawk',
'sed',
'musl-dev',
'elfutils-dev',
'gmp-dev',
'libffi-dev',
'openssl-dev',
'linux-headers',
'musl',
'bison',
'flex',
'python3',
# TODO: As of october 2021 for some reason, the kernel still needs GCC
# to build some tools even when compiling with clang
'gcc',
]
make_vars = make_vars or {}
try:
cc = make_vars['CC']
except KeyError:
cc = 'gcc'
if cc == 'clang':
packages.extend([
'lld',
'llvm',
])
packages.append(cc)
devlib_arch = make_vars.get('ARCH', LISA_HOST_ABI)
use_qemu = (
devlib_arch != LISA_HOST_ABI and
# Since clang binaries support cross compilation without issues,
# there is no need to use QEMU that will slow everything down.
make_vars.get('CC') != 'clang'
)
qemu_arch = {
'arm64': 'aarch64',
'armeabi': 'arm',
'armv7': 'arm',
}.get(devlib_arch, devlib_arch)
binfmt_path = Path('/proc/sys/fs/binfmt_misc/', f'qemu-{qemu_arch}')
if use_qemu and not binfmt_path.exists():
raise ValueError(f'Alpine chroot is setup for {qemu_arch} architecture but QEMU userspace emulation is not installed on the host (missing {binfmt_path})')
if use_qemu:
chroot_arch = devlib_arch
else:
chroot_arch = LISA_HOST_ABI
alpine_arch = {
'arm64': 'aarch64',
'armeabi': 'armv7',
}.get(chroot_arch, chroot_arch)
# Add LISA static binaries inside the chroot
bind_paths = {
**dict(bind_paths or {}),
str((Path(ASSETS_PATH) / 'binaries' / devlib_arch).resolve()): '/usr/local/bin/'
}
dir_cache = DirCache(
category='alpine_chroot',
populate=populate,
)
key = (
alpine_version,
alpine_arch,
' '.join(sorted(packages)),
use_qemu,
)
cache_path = dir_cache.get_entry(key)
with _overlay_folders([cache_path], backend=overlay_backend) as path:
# We need to "repopulate" the overlay in order to get a working
# system with /etc/resolv.conf etc
try:
populate(key, path, init_cache=False)
mount_binds(path, bind_paths)
yield path
finally:
mount_binds(path, bind_paths, mount=False)
def _make_chroot_cmd(chroot, cmd):
chroot = Path(chroot).resolve()
return ['chroot', chroot, *cmd]
@contextlib.contextmanager
def _overlay_folders(lowers, upper=None, backend=None, copy_filter=None):
"""
Overlay folders on top of each other.
:param lowers: List of read-only lower layers. The end of the list takes
precedence.
:type lowers: list(str)
:param upper: Read-write upper layer taking all the changes made to the
mount point. If left out, a throw-away upper layer will be used.
:type upper: str or None
:param backend: Backend to use, one of:
* ``overlayfs``: Uses Linux overlayfs mounts. This is the fastest and
most space efficient method.
* ``copy``: This uses plain copies to simulate overlayfs. Note that the
simulation is not entirely transparent, as a higher layer is not able
to hide files in lower layers like it can do with overlayfs and
whiteout files.
* ``None``: defaults to ``overlayfs``.
:type backend: str or None
"""
logger = logging.getLogger(f'{__name__}.overlay')
backend = KernelTree._resolve_overlay_backend(backend)
def make_dir(root, name):
path = Path(root) / name
path.mkdir(parents=True)
return path.resolve()
with tempfile.TemporaryDirectory() as temp:
mount_point = make_dir(temp, 'overlaid')
# Work folder has to be in the same filesystem as the upper dir
if upper:
@contextlib.contextmanager
def dirs_cm():
with tempfile.TemporaryDirectory(
# We cannot use a subfolder of "upper" to host "work" so we
# have to use the parent folder.
dir=upper.parent,
prefix='.overlayfs_work_'
) as work:
yield dict(
work=Path(work),
upper=Path(upper),
)
else:
@contextlib.contextmanager
def dirs_cm():
yield dict(
work=make_dir(temp, 'work'),
upper=make_dir(temp, 'upper'),
)
@contextlib.contextmanager
def do_mount(dirs):
dirs['lower'] = ':'.join(map(str, reversed(list(lowers))))
cmd = ['mount', '-t', 'overlay', 'overlay', '-o', 'lowerdir={lower},workdir={work},upperdir={upper}'.format(**dirs), '--', mount_point]
subprocess_log(cmd, logger=logger, level=logging.DEBUG)
try:
yield mount_point
finally:
# Use lazy unmount, so it will not fail if it still in use for
# some reason. That said, all supporting folders are going to
# be removed so an external user working outside of the "with"
# statement will have issues, which is expected (and not
# supported).
subprocess_log(
['umount', '-nl', '--', mount_point],
logger=logger,
level=logging.DEBUG
)
if copy_filter is None:
copy_filter = lambda src, dst: True
@contextlib.contextmanager
def do_copy(dirs):
def _copytree(src, dst):
base_src = Path(src)
base_dst = Path(dst)
def copy_file(src, dst):
if not copy_filter(
src=Path(src).relative_to(base_src),
dst=Path(dst).relative_to(base_dst)
):
return dst
if os.path.islink(src):
if os.path.lexists(dst):
os.unlink(dst)
linkto = os.readlink(src)
os.symlink(linkto, dst)
shutil.copystat(src, dst, follow_symlinks=False)
return dst
else:
try:
dst_mtime = os.path.getmtime(dst)
except OSError:
if os.path.lexists(dst):
os.remove(dst)
return shutil.copy2(src=src, dst=dst)
else:
src_mtime = os.path.getmtime(src)
# Only copy files that have been modified more recently
if src_mtime > dst_mtime:
os.remove(dst)
return shutil.copy2(src=src, dst=dst)
else:
return dst
shutil.copytree(
src=str(src),
dst=str(dst),
# Make symlinks go through copy_function
symlinks=False,
dirs_exist_ok=True,
copy_function=copy_file,
)
logger.debug(f'Copying trees instead of overlayfs for {mount_point}')
for src in lowers:
_copytree(src=src, dst=mount_point)
try:
yield mount_point
finally:
# If the user selected a custom upper layer, sync back the
# result in it
if upper:
shutil.rmtree(upper)
shutil.move(src=mount_point, dst=upper)
if backend == 'overlayfs':
action = do_mount
elif backend == 'copy':
action = do_copy
else:
raise ValueError(f'Unknwon overlay backend "{backend}"')
with dirs_cm() as dirs:
with action(dirs) as mnt:
yield mnt
class OverlayResource(abc.ABC):
"""
Resource to be applied as an overlay in an existing folder.
"""
@abc.abstractmethod
def write_to(self, dst):
"""
Write the resource to the ``dst`` path.
"""
pass
@abc.abstractmethod
def _get_checksum(self):
"""
Return the checksum of the resource.
"""
pass
class _FileOverlayBase(OverlayResource):
"""
:meta public:
Base class for file overlays.
"""
pass
class FileOverlay(_FileOverlayBase):
"""
Overlay representing a file content.
"""
@classmethod
def from_content(cls, content):
"""
Build the file from its ``content``.
"""
return _ContentFileOverlay(content)
@classmethod
def from_path(cls, path, decompress=False):
"""
Build the file from an existing path.
:param decompress: If ``True``, the file will be decompressed according
to its extension. E.g. an ``.gz`` file would be inferred as gzip
and decompressed. If ``False``, the extension is ignored.
:type decompress: bool
"""
if decompress:
return _CompressedPathFileOverlay(path)
else:
return _PathFileOverlay(path)
class _PathOverlayBase(_FileOverlayBase):
"""
:meta public:
Base class for path-based overlays.
"""
# This is racy with write_to(), but we are not trying to make something
# really secure here, we just want to compute a unique token to be used as
# a cache key
def _get_checksum(self):
with open(self.path, 'rb') as f:
check = checksum(f, 'sha256')
return f'{self.__class__.__name__}-{check}'
def __str__(self):
return str(self.path)
class _PathFileOverlay(_PathOverlayBase):
def __init__(self, path):
self.path = path
def write_to(self, dst):
shutil.copy2(self.path, dst)
class _CompressedPathFileOverlay(_PathOverlayBase):
_OPEN_FUNCTIONS = {
'.gz': gzip.open,
'.xz': lzma.open,
'.bz': bz2.open,
}
def __init__(self, path):
path = Path(path)
try:
open_f = self._OPEN_FUNCTIONS[path.suffix]
except KeyError:
raise ValueError(f'Could not detect compression format of "{path}". Tried {", ".join(cls._OPEN_FUNCTIONS.keys())}')
else:
self.open_f = open_f
self.path = path
def write_to(self, dst):
with self.open_f(self.path) as src, open(dst, 'wb') as dst:
shutil.copyfileobj(src, dst)
class _ContentFileOverlay(_FileOverlayBase):
def __init__(self, content):
self.content = content
def write_to(self, dst):
with open(dst, 'wb') as f:
f.write(self.content)
def _get_checksum(self):
check = checksum(io.BytesIO(self.content), 'sha256')
return f'{self.__class__.__name__}-{check}'
class TarOverlay(_PathOverlayBase):
"""
The ``__init__`` constructor is considered private. Use factory classmethod
to create instances.
"""
def __init__(self, path):
self.path = path
@classmethod
def from_path(cls, path):
"""
Build the overlay from the ``path`` to an existing tar archive.
"""
return cls(path)
def write_to(self, dst):
with tarfile.open(self.path) as tar:
tar.extractall(dst)
class KernelTree(Loggable, SerializeViaConstructor):
"""
:param path_cm: Context manager factory expected to return a path to a
prepared kernel tree.
:type path_cm: collections.abc.Callable
:param make_vars: Variables passed on ``make`` command line when preparing
the kernel tree.
:type make_vars: dict(str, object)
:param build_env: Build environment to use. Can be one of:
* ``alpine``: (default) Alpine linux chroot, providing a controlled
environment
* ``host``: No specific env is setup, whatever the host is using will
be picked.
* ``None``: defaults to ``host``.
:type build_env: str or None
:param overlay_backend: Backend used to create folder overlays. One of:
* ``overlayfs``: Use overlayfs Linux filesystem. This is the fastest
and the recommanded option.
* ``copy``: Use plain folder copies. This can be used as an alternative
if overlayfs cannot be used for some reason.
* ``None``: default to ``overlayfs``.
"""
# Preserve checksum attribute when serializing, as it will allow hitting
# the module cache without actually setting up the kernel tree in many
# cases.
_SERIALIZE_PRESERVED_ATTRS = {'checksum'}
_KERNEL_ARCHIVE_URL_TEMPLATE = 'https://cdn.kernel.org/pub/linux/kernel/v{main_number}.x/linux-{version}.tar.xz'
# We are only really interested in clang starting from version 13,
# when the "musttail" return attribute was introduced.
# On top of that, the kernel does not handle clang < 10.0.1
_MIN_CLANG_VERSION = 11
def __init__(self, path_cm, make_vars, build_env=None, overlay_backend=None):
self._make_path_cm = path_cm
self.build_env = self._resolve_build_env(build_env)
self.make_vars = make_vars or {}
self.overlay_backend = self._resolve_overlay_backend(overlay_backend)
self._path_cm = None
self.path = None
self.checksum = None
@staticmethod
def _resolve_build_env(build_env):
return build_env or 'host'
@staticmethod
def _resolve_overlay_backend(overlay_backend):
return overlay_backend or 'overlayfs'
def _to_spec(self):
return dict(
path=self.path,
checksum=self.checksum,
)
def _update_spec(self, spec):
def update(x):
val = spec.get(x)
if val is not None:
setattr(self, x, val)
if spec:
for attr in ('path', 'checksum'):
update(attr)
# It is expected that the same object can be used more than once, so
# __enter__ and __exit__ must not do anything destructive.
def __enter__(self):
cm = self._make_path_cm()
spec = cm.__enter__()
assert 'path' in spec
self._update_spec(spec)
self._path_cm = cm
return self
def __exit__(self, *args, **kwargs):
# Reset the path as it cannot be used outside the with statement but
# not the checksum, since it could still be reused to hit the cache
self.path = None
try:
ret = self._path_cm.__exit__(*args, **kwargs)
finally:
self._path_cm = None
return ret
_URL_CACHE = {}
@classmethod
def _open_url(cls, version):
url, response = cls._get_url_response(version)
if response is None:
response = urllib.request.urlopen(url)
return response
@classmethod
def _get_url(cls, version):
url, response = cls._get_url_response(version)
with (response or nullcontext()):
return url
@classmethod
def _get_url_response(cls, version):
def replace_None(tuple_):
return tuple(
0 if x is None else x
for x in tuple_
)
def make_url(parts):
# Remove trailing 0 as this seems to be the pattern followed by
# cdn.kernel.org URLs
parts = parts if parts[-1] else parts[:-1]
return cls._KERNEL_ARCHIVE_URL_TEMPLATE.format(
main_number=parts[0],
version='.'.join(map(str, parts)),
)
@functools.lru_cache
def get_available_versions():
url = make_url(orig_version.parts)
parsed = urllib.parse.urlparse(url)
index_url = parsed._replace(path=str(_url_path(url).parent)).geturl()
with urllib.request.urlopen(index_url) as url_f:
html = url_f.read()
files = re.findall(rb'href="linux-(.*)\.tar\.xz"', html)
return sorted(
(
replace_None(
KernelVersion(name.decode()).parts
)
for name in files
),
)
def decrement_version(parts):
parts = replace_None(parts)
versions = get_available_versions()
i = bisect.bisect(versions, parts) - 2
try:
return versions[i]
except IndexError:
raise ValueError(f'Could not find any kernel tarball for version {parts}')
orig_version = version
parts = version.parts
logger = cls.get_logger()
try:
url = cls._URL_CACHE[str(version)]
except KeyError:
while True:
url = make_url(parts)
logger.debug(f'Trying to fetch {url} for kernel version {orig_version} ...')
try:
response = urllib.request.urlopen(url)
except urllib.error.HTTPError as e:
# Maybe this is a development kernel and no release has been
# done for that version yet, keep trying with lower versions
if e.code == 404:
try:
parts = decrement_version(parts)
except ValueError:
raise ValueError('Cannot fetch any tarball matching {orig_version}')
else:
cls._URL_CACHE[str(version)] = response.url
return (url, response)
else:
return (url, None)
@classmethod
def _prepare_tree(cls, path, make_vars, build_env, apply_overlays):
logger = cls.get_logger()
_make_vars = [
f'{name}={val}'
for name, val in sorted((make_vars or {}).items())
if val is not None
]
nr_cpus = int(os.cpu_count() * 1.5)
def make(*targets):
return ['make', f'-j{nr_cpus}', '-C', path, '--', *_make_vars, *targets]
# We need to clean first, as binaries compiled in e.g. scripts/ will
# probably not work inside the Alpine container, since they would be
# linked against shared libraries on the host system
if build_env == 'host':
cmds = [None]
else:
cmds = [
make('mrproper')
]
cmds.append(make('olddefconfig', 'modules_prepare'))
bind_paths = {path: path}
if build_env == 'alpine':
@contextlib.contextmanager
def cmd_cm(cmds):
with _make_chroot(bind_paths=bind_paths, make_vars=make_vars) as chroot:
yield [
_make_chroot_cmd(chroot, cmd) if cmd else None
for cmd in cmds
]
else:
cmd_cm = lambda cmds: nullcontext(cmds)
with cmd_cm(cmds) as _cmds:
pre, post = _cmds
logger.info(f'Preparing kernel tree for modules')
if pre is not None:
subprocess_log(pre, logger=logger, level=logging.DEBUG)
# Apply the overlays before running make, so that it sees the
# correct headers and conf etc
apply_overlays()
subprocess_log(post, logger=logger, level=logging.DEBUG)
# Re-apply the overlays, since we could have overwritten important
# things, such as include/linux/vermagic.h
apply_overlays()
@classmethod
def _process_make_vars(cls, build_env, make_vars, abi=None):
env = {
k: v
for k, v in (
(k, os.getenv(k)) for k in {
'CROSS_COMPILE',
'ARCH',
}
)
if v is not None
}
make_vars = {
**env,
**dict(make_vars or {})
}
if abi is None:
abi = make_vars.get('ARCH', LISA_HOST_ABI)
arch = _any_abi_to_kernel_arch(abi)
make_vars['ARCH'] = arch
make_vars, cc = cls._resolve_toolchain(abi, make_vars, build_env)
if build_env == 'alpine':
if cc == 'clang':
make_vars['LLVM'] = '1'
else:
# Disable CROSS_COMPILE as we are going to build in a "native"
# Alpine chroot, so there is no need for a cross compiler
make_vars.pop('CROSS_COMPILE', None)
return (make_vars, cc)
@classmethod
def _check_cc_version(cls, cc):
if cc == 'clang':
version = subprocess.check_output([cc, '--version'])
m = re.match(rb'.*clang version ([0-9]+)\.', version)
if m:
major = int(m.group(1))
if major >= cls._MIN_CLANG_VERSION:
return True
else:
return True
return False
@classmethod
def _resolve_toolchain(cls, abi, make_vars, build_env):
logger = cls.get_logger()
build_env = KernelTree._resolve_build_env(build_env)
if abi == LISA_HOST_ABI:
toolchain = None
else:
try:
toolchain = make_vars['CROSS_COMPILE']
except KeyError:
try:
toolchain = os.environ['CROSS_COMPILE']
except KeyError:
if abi in ('arm64', 'aarch64'):
toolchain = 'aarch64-linux-gnu-'
elif 'arm' in abi:
toolchain = 'arm-linux-gnueabi-'
else:
raise KeyError('CROSS_COMPILE env var needs to be set')
logger.debug(f'CROSS_COMPILE env var not set, assuming "{toolchain}"')
commands = {
# Try clang first so we can use the "musttail" return attribute
# when possible
**{
cc: [cc, *([f'--target={toolchain}'] if toolchain else []), '-x' 'c', '-c', '-', '-o', '/dev/null']
# Try the default "clang" name first in case it's good enough
for cc in ['clang'] + [
f'clang-{i}'
# Try the most recent ones first
for i in reversed(
# Cover for the next 10 years starting from 2021
range(cls._MIN_CLANG_VERSION, cls._MIN_CLANG_VERSION + 10 * 2)
)
]
},
'gcc': [f'{toolchain or ""}gcc', '-x' 'c', '-c', '-', '-o', '/dev/null']
}
cc = None
if 'CC' in make_vars:
cc = make_vars['CC'] or 'gcc'
try:
commands = {cc: commands[cc]}
except KeyError:
commands = {}
# Default to clang on alpine, as it will be in a high-enough version
# and since Alpine does not ship any cross-toolchain for GCC, this will
# avoid having to use QEMU userspace emulation which is really slow.
elif build_env == 'alpine':
cc = 'clang'
# Only run the check on host build env, as other build envs are
# expected to be correctly configured.
if build_env == 'host' and commands:
for cc, cmd in commands.items():
pretty_cmd = ' '.join(cmd)
try:
subprocess.check_output(
cmd,
# Most basic compiler input that will not do anything.
input=b';',
stderr=subprocess.STDOUT
)
except subprocess.CalledProcessError as e:
logger.debug(f'Checking {cc} compiler: {pretty_cmd} failed with:\n{e.output.decode()}')
continue
except FileNotFoundError as e:
logger.debug(f'Checking {cc} compiler: {e}')
continue
else:
if cls._check_cc_version(cc):
break
else:
raise ValueError(f'Could not find a working toolchain for CROSS_COMPILE={toolchain}')
if cc is None:
raise ValueError(f'Could not detect which compiler to use')
logger.info(f'CROSS_COMPILE not set by user, detected CROSS_COMPILE={toolchain} and CC={cc}')
detected = {}
if toolchain:
detected['CROSS_COMPILE'] = toolchain
# For some reason Kbuild does not appreciate CC=gcc, even though
# it's happy with CC=clang
if cc != 'gcc':
detected['CC'] = cc
make_vars = {
**detected,
**make_vars,
}
return (make_vars, cc)
@classmethod
@SerializeViaConstructor.constructor
def from_target(cls, target, tree_path=None, make_vars=None, cache=True, build_env=None, overlay_backend=None):
"""
Build the tree from the given :class:`lisa.target.Target`.
This will try multiple strategies in order to get the best kernel tree
possible given the input:
* Using ``/lib/modules/$(uname -r)/build`` path. This is limited by
a number of factor since that tree is usually incomplete and can
have symlinks pointing outside of it, making it unusable for bind
mounts.
* Using either a source tree or a kernel.org tarball matching
kernel version (downloaded automatically).
* The source tree as above, plus ``/sys/kheaders.tar.xz`` and
``/proc/config.gz``. This is the method that is the most likely
to lead to a working kernel module as it allows precisely
matching the vermagic. It will require the following configs:
.. code-block:: sh
# For /sys/kheaders.tar.xz
CONFIG_IKHEADERS=y
# For /proc/config.gz
CONFIG_IKCONFIG=y
:param target: Target to use.
:type target: lisa.target.Target
:param tree_path: If provided, a path to a kernel tree. If not given,
other places will be tried (like /lib/modules if possible, or
downloading a tarball from kernel.org for the matching version.)
:type tree_path: str or None
:param make_vars: Variables passed on ``make`` command line.
:type make_vars: dict(str, object)
:param cache: If ``True``, will attempt to cache intermediate steps.
:type cache: bool
:param build_env: See :class:`lisa._kmod.KernelTree`.
:type build_env: str or None
:param overlay_backend: See :class:`lisa._kmod.KernelTree`.
:type overlay_backend: str or None
"""
make_vars, cc = cls._process_make_vars(
make_vars=make_vars,
abi=target.plat_info['abi'],
build_env=build_env,
)
kernel_info = target.plat_info['kernel']
@contextlib.contextmanager
def from_installed_headers():
"""
Get the kernel tree from /lib/modules
"""
if build_env == 'alpine':
raise ValueError(f'Building from /lib/modules is not supported with the Alpine build environment as /lib/modules might not be self contained (i.e. symlinks pointing outside)')
else:
if isinstance(target.conn, LocalConnection):
# We could use this instead, except that Ubuntu does not have
# /proc/config.gz, so that is a bit more "reliable"
# target.plat_info['kernel']['config']['CONFIG_CC_VERSION_TEXT']
with open('/proc/version', 'r') as f:
proc_version = f.read()
# If the compiler used to build the kernel is different from the
# one we selected, we unfortunately cannot use the installed
# headers under /lib/modules, since we won't be able to re-run
# modules_prepare (unless we make a copy, resolving all
# symlinks in passing).
if cc in proc_version:
uname_r = target.execute('uname -r').strip()
target_path = Path('/lib', 'modules', uname_r, 'build')
# On a local connection, we can just directly yield the path
# directly rather than make a copy, as it will not be written to.
if target_path.is_dir():
# Unfortunately, we cannot use cls.from_overlays() and
# re-run modules_prepare, as some distro such as Ubuntu
# create build folders full of relative symlinks
# pointing outside of it, which renders it unusable in
# an overlay. Without the overlay, we cannot modify
# anything since the folder is owned by an apt package.
yield dict(
path=target_path,
# Since we basically know it's the distro kernel,
# we can cache the result
checksum=hashlib.sha256(
f'distro-kernel-{uname_r}'.encode()
).hexdigest()
)
else:
raise ValueError(f'{target_path} is not a folder')
else:
raise ValueError(f'The chosen compiler ({cc}) is different from the one used to build the kernel ({proc_version}), /lib/modules/ tree will not be used')
else:
raise ValueError(f'Building from /lib/modules/.../build/ is only supported for local targets')
@contextlib.contextmanager
def from_sysfs_headers():
"""
From /sys/kheaders.tar.xz
"""
version = kernel_info['version']
config = kernel_info['config']
if not (
config.get('CONFIG_IKHEADERS') == KernelConfigTristate.YES and
config.get('CONFIG_IKCONFIG_PROC') == KernelConfigTristate.YES
):
raise ValueError('Needs CONFIG_IKHEADERS=y and CONFIG_IKCONFIG_PROC=y')
else:
with tempfile.TemporaryDirectory() as temp:
temp = Path(temp)
target.cached_pull('/proc/config.gz', str(temp))
target.cached_pull('/sys/kernel/kheaders.tar.xz', str(temp), via_temp=True)
with cls.from_overlays(
version=version,
overlays={
FileOverlay.from_path(temp / 'config.gz', decompress=True): '.config',
TarOverlay.from_path(temp / 'kheaders.tar.xz'): '.',
},
make_vars=make_vars,
cache=cache,
tree_path=tree_path,
build_env=build_env,
overlay_backend=overlay_backend,
) as tree:
yield tree._to_spec()
@contextlib.contextmanager
def from_user_tree():
"""
Purely from the tree passed by the user.
"""
if tree_path is None:
raise ValueError('Use tree_path != None to build from a user-provided tree')
else:
# We still need to run make modules_prepare on the provided
# tree
with cls.from_overlays(
tree_path=tree_path,
version=kernel_info['version'],
cache=cache,
make_vars=make_vars,
build_env=build_env,
overlay_backend=overlay_backend,
) as tree:
yield tree._to_spec()
@contextlib.contextmanager
def try_loaders(loaders):
logger = cls.get_logger()
exceps = []
inner_excep = None
for loader in loaders:
logger.debug(f'Trying to load kernel tree using loader {loader.__name__} ...')
try:
with loader() as spec:
try:
yield spec
break
except BaseException as e:
inner_excep = e
raise
except Exception as e:
if inner_excep is e:
raise
else:
logger.debug(f'Failed to load kernel tree using loader {loader.__name__}: {e.__class__.__name__}: {e}')
exceps.append((loader, e))
else:
logger.debug(f'Loaded kernel tree using loader {loader.__name__}')
else:
excep_str = "\n".join(
f"{loader.__name__}: {e.__class__.__name__}: {e}"
for loader, e in exceps
)
raise ValueError(f'Could not load kernel trees:\n{excep_str}')
# Try these loaders in the given order, until one succeeds
loaders = [from_installed_headers, from_sysfs_headers, from_user_tree]
return cls(
path_cm=functools.partial(try_loaders, loaders),
make_vars=make_vars,
build_env=build_env,
overlay_backend=overlay_backend,
)
@classmethod
@SerializeViaConstructor.constructor
def from_path(cls, path, make_vars=None, cache=True, build_env=None):
"""
Build a tree from the given ``path`` to sources.
"""
return cls.from_overlays(
tree_path=path,
make_vars=make_vars,
cache=cache,
build_env=build_env,
)
@classmethod
@SerializeViaConstructor.constructor
def from_overlays(cls, version=None, tree_path=None, overlays=None, make_vars=None, cache=True, build_env=None, overlay_backend=None):
"""
Build a tree from the given overlays, to be applied on a source tree.
:param version: Version of the kernel to be used.
:type version: devlib.target.KernelVersion or str
:param overlays: List of overlays to apply on the tree.
:type overlays: list(OverlayResource)
"""
logger = cls.get_logger()
overlays = overlays or {}
make_vars, cc = cls._process_make_vars(
make_vars=make_vars,
build_env=build_env,
)
build_env = KernelTree._resolve_build_env(build_env)
overlay_backend = KernelTree._resolve_overlay_backend(overlay_backend)
if tree_path:
try:
config = Path(tree_path, '.config').read_bytes()
except FileNotFoundError:
restore_config = lambda _: None
else:
restore_config = lambda path: path.write_bytes(config)
else:
restore_config = lambda _: None
def copy_filter(src, dst, remove_obj=False):
return not (
(remove_obj and (src.suffix == '.o')) or
any(
# Skip some folders that are useless to build a kernel
# module.
path.name == '.git'
for path in src.parents
)
)
def apply_overlays(path):
# Ensure .config is present if it was at the beginning, so that it
# survives make mrproper in _prepare_tree()
restore_config(path / '.config')
for overlay, dst in overlays.items():
logger.debug(f'Unpacking overlay {overlay} -> {dst}')
overlay.write_to(os.path.join(path, dst))
def prepare_overlay(path):
cls._prepare_tree(
path,
make_vars=make_vars,
build_env=build_env,
apply_overlays=functools.partial(apply_overlays, path),
)
@contextlib.contextmanager
def overlay_cm(args):
base_path, tree_key = args
base_path = Path(base_path).resolve()
if cache and tree_key is not None:
# Compute a unique token for the overlay. It includes:
# * The hash of all overlays resources. It should be
# relatively inexpensive to compute, as most overlays are
# pretty small.
# * The key that comes with the tree passed as base_path, if it
# comes from a reliably read-only source.
# * The build environment
# * All the variables passed to "make". This is very important
# as things such as a toolchain change can make a kernel tree
# unsuitable for compiling a module.
key = (
sorted(
overlay._get_checksum()
for overlay, dst in overlays.items()
) + [
str(tree_key),
str(build_env),
] + [
# We need to take checksum the make variables
# as well, as it can influence the kernel tree
# a great deal (e.g. changing toolchain)
f'{k}={v}'
for k, v in sorted((make_vars or {}).items())
]
)
def populate(key, path):
# Prepare the overlay separately from the final
# overlayfs mount point, so that we never end up
# sharing the same upper between 2 mounts, which is not
# allowed by the kernel.
with _overlay_folders(
lowers=[base_path],
upper=path,
backend=overlay_backend,
copy_filter=functools.partial(copy_filter, remove_obj=True)
) as path:
prepare_overlay(path)
dir_cache = DirCache(
category='kernels_overlays',
populate=populate,
)
cache_path = dir_cache.get_entry(key)
with _overlay_folders([base_path, cache_path], backend=overlay_backend, copy_filter=copy_filter) as path:
yield dict(
path=path,
checksum=dir_cache.get_key_token(key),
)
else:
with _overlay_folders([base_path], backend=overlay_backend, copy_filter=copy_filter) as path:
prepare_overlay(path)
yield dict(
path=path,
checksum=None,
)
if not (version is None or isinstance(version, KernelVersion)):
version = KernelVersion(version)
@contextlib.contextmanager
def tree_cm():
if tree_path:
logger.debug(f'Using provided kernel tree at: {tree_path}')
try:
repo_root = git.find_root(tree_path)
sha1 = git.get_sha1(tree_path)
patch = git.get_uncommited_patch(tree_path)
except (FileNotFoundError, subprocess.CalledProcessError):
key = None
else:
if repo_root.resolve() == Path(tree_path).resolve():
patch_sha1 = hashlib.sha1(patch.encode()).hexdigest()
key = f'{sha1}-{patch_sha1}'
else:
key = None
yield (tree_path, key)
elif version is None:
raise ValueError('Kernel version is required in order to download the kernel sources')
elif cache:
dir_cache = DirCache(
category='kernels',
populate=lambda url, path: cls._make_tree(version, path),
)
url = cls._get_url(version)
# Assume that the URL will always provide the same tarball
yield (
dir_cache.get_entry([url]),
url,
)
else:
with tempfile.TemporaryDirectory() as path:
yield (
cls._make_tree(version, path),
version,
)
cm = chain_cm(overlay_cm, tree_cm)
return cls(
path_cm=cm,
make_vars=make_vars,
build_env=build_env,
)
@classmethod
def _make_tree(cls, version, path):
response = cls._open_url(version)
filename = _url_path(response.url).name
tar_path = os.path.join(path, filename)
extract_folder = os.path.join(path, 'extract')
# Unfortunately we can't feed url_f directly to tarfile as it needs to
# seek()
try:
with response as url_f, open(tar_path, 'wb') as tar_f:
shutil.copyfileobj(url_f, tar_f)
with tarfile.open(tar_path) as tar:
# Account for a top-level folder in the archive
prefix = os.path.commonpath(
member.path
for member in tar.getmembers()
)
tar.extractall(extract_folder)
except BaseException:
with contextlib.suppress(FileNotFoundError):
shutil.rmtree(extract_folder, ignore_errors=True)
raise
finally:
with contextlib.suppress(FileNotFoundError):
os.remove(tar_path)
return os.path.join(extract_folder, prefix)
class KmodSrc(Loggable):
"""
Sources of a kernel module.
:param src: Mapping of source path to file content.
:type src: dict(str, str)
:param name: If provided, sets the name of the module. If ``None``, a name
will be created using a checksum of the sources.
:type name: str or None
"""
def __init__(self, src, name=None):
def encode(x):
if isinstance(x, str):
return x.encode('utf-8')
else:
return x
self.src = {
name: encode(content)
for name, content in src.items()
}
self._mod_name = name
@property
def c_files(self):
return {
name: content
for name, content in self.src.items()
if name.endswith('.c')
}
@property
@memoized
def checksum(self):
"""
Checksum of the module's sources.
"""
m = hashlib.sha256()
for name, content in sorted(self.src.items()):
m.update(name.encode('utf-8'))
m.update(content)
return m.hexdigest()
@property
@memoized
def mod_name(self):
"""
Name of the module.
"""
if self._mod_name:
return self._mod_name
else:
# Kernel macro MODULE_NAME_LEN
max_size = 64 - 8 - 1
return f'lisa-{self.checksum}'[:max_size]
@property
@memoized
def makefile(self):
try:
return self.src['Kbuild']
except KeyError:
try:
return self.src['Makefile']
except KeyError:
name = self.mod_name
return '\n'.join((
f'obj-m := {name}.o',
f'{name}-y := ' + ' '.join(
f'{Path(filename).stem}.o'
for filename in sorted(self.c_files.keys())
)
)).encode('utf-8')
def compile(self, kernel_tree, make_vars=None):
"""
Compile the module and returns the ``bytestring`` content of the
``.ko`` file.
:param kernel_tree: Kernel tree to build the module against.
:type kernel_tree: KernelTree
:param make_vars: Variables passed on ``make`` command line. This can
be used for variables only impacting the module, otherwise it's
better to set them when creating the ``kernel_tree``.
:type make_vars: dict(str, object) or None
"""
make_vars = dict(make_vars or {})
tree_path = Path(kernel_tree.path)
# "inherit" the build env from the KernelTree as we must use the same
# environment as what was used for "make modules_prepare"
build_env = kernel_tree.build_env
bind_paths = {tree_path: tree_path}
logger = self.logger
def populate_mod(path):
mod_path = Path(path)
src = {
**self.src,
'Kbuild': self.makefile,
}
for name, content in src.items():
with open(mod_path / name, 'wb') as f:
f.write(content)
def make_cmd(tree_path, mod_path, make_vars):
make_vars = make_vars.copy()
make_vars.update(
M=mod_path,
LISA_KMOD_NAME=self.mod_name,
)
make_vars = [
f'{name}={value}'
for name, value in sorted(make_vars.items())
if value is not None
]
cmd = ['make', '-C', tree_path, *make_vars, 'modules']
return cmd
def find_mod_file(path):
filenames = glob.glob(str(path.resolve() / '*.ko'))
if not filenames:
raise FileNotFoundError(f'Could not find .ko file in {path}')
elif len(filenames) > 1:
raise ValueError(f'Found more than one .ko file in {path}: {filenames}')
else:
return filenames[0]
if build_env == 'alpine':
@contextlib.contextmanager
def cmd_cm():
with _make_chroot(bind_paths=bind_paths, make_vars=make_vars) as chroot:
# Do not use a CM here to avoid choking on permission
# issues. Since the chroot itself will be entirely
# removed it's not a problem.
mod_path = Path(tempfile.mkdtemp(dir=chroot / 'tmp'))
cmd = make_cmd(
tree_path=tree_path,
mod_path=f'/{mod_path.relative_to(chroot)}',
make_vars=make_vars,
)
yield (mod_path, _make_chroot_cmd(chroot, cmd), {})
else:
@contextlib.contextmanager
def cmd_cm():
with tempfile.TemporaryDirectory() as mod_path:
cmd = make_cmd(
tree_path=tree_path,
mod_path=mod_path,
make_vars=make_vars,
)
yield (mod_path, cmd, {'PATH': HOST_PATH})
with cmd_cm() as (mod_path, cmd, env):
mod_path = Path(mod_path)
populate_mod(mod_path)
logger.info(f'Compiling kernel module {self.mod_name}')
env = {**os.environ, **env}
subprocess_log(cmd, logger=logger, level=logging.DEBUG, env=env)
mod_file = find_mod_file(mod_path)
with open(mod_file, 'rb') as f:
return f.read()
@classmethod
def from_path(cls, path, extra=None):
"""
Build an instance from the path to the sources.
:param extra: Extra sources to use, same as ``src`` parameter
:class:`lisa._kmod.KmodSrc`.
:type extra: dict(str, str)
"""
def get_files(root, dirs, files):
for f in files:
yield (Path(root) / f).resolve()
path = Path(path).resolve()
src = {
str(f.relative_to(path)): f.read_bytes()
for files in itertools.starmap(get_files, os.walk(path))
for f in files
}
src.update(extra or {})
return cls(src=src)
class DynamicKmod(Loggable):
"""
Dynamic kernel module that can be compiled on the go by LISA.
:param target: Target that will be used to load the module.
:type target: lisa.target.Target
:param src: Sources of the module.
:type src: lisa._kmod.KmodSrc
:param kernel_tree: Kernel source tree to use to build the module against.
:type kernel_tree: lisa._kmod.KernelTree
"""
def __init__(self, target, src, kernel_tree=None):
self.src = src
self.target = target
self._user_kernel_tree = kernel_tree
@classmethod
def from_target(cls, target, **kwargs):
"""
Build a module from the given target. Use this constructor on
subclasses rather than making assumptions on the signature of the
class.
:Variable keyword arguments: Forwarded to ``__init__``.
"""
return cls(target=target, **kwargs)
@staticmethod
def _resolve_tree_spec(tree):
if isinstance(tree, KernelTree):
spec = {
'build_env': tree.build_env,
'overlay_backend': tree.overlay_backend,
}
else:
spec = {
'tree_path': tree,
'build_env': KernelTree._resolve_build_env(
spec.get('build_env')
),
'overlay_backend': KernelTree._resolve_overlay_backend(
spec.get('overlay_backend')
)
}
return spec
@property
@memoized
def kernel_tree(self):
tree = self._user_kernel_tree
if isinstance(tree, KernelTree):
pass
else:
spec = self._resolve_tree_spec(tree)
tree = KernelTree.from_target(
target=self.target,
**self._resolve_tree_spec(tree),
)
arch = _any_abi_to_kernel_arch(
self.target.plat_info['abi']
)
tree_arch = tree.make_vars['ARCH']
if tree_arch != arch:
raise ValueError(f'The kernel tree ({tree_arch}) was not prepared for the same architecture as the target ({arch}). Please set ARCH={arch} make variable.')
else:
return tree
@property
def _compile_needs_root(self):
spec = self._resolve_tree_spec(self._user_kernel_tree)
return (
spec['build_env'] != 'host' or
spec['overlay_backend'] == 'overlayfs'
)
# Dummy memoized wrapper. The only reason we need one is that _do_compile()
# needs to be pickleable to be sent to a multiprocessing Process, so it
# cannot be overriden by a wrapper
@memoized
def _compile(self):
compile_ = self._do_compile.__func__
if self._compile_needs_root:
compile_ = ensure_root(compile_, inline=True)
bin_, spec = compile_(self)
# Get back KernelTree._to_spec() and update the KernelTree we have in
# this process with it to remember the checksum, in case ensure_root()
# spawned a new process. This is then used by Target.get_kmod() that
# will reinject the known spec when creating new modules from the
# default KernelTree
self.kernel_tree._update_spec(spec)
return bin_
def _do_compile(self):
kernel_tree = self.kernel_tree
src = self.src
def get_key(kernel_tree):
kernel_checksum = kernel_tree.checksum
if kernel_checksum is None:
raise ValueError('Kernel tree has no checksum')
else:
var_tokens = [
f'{k}={v}'
for k, v in sorted(kernel_tree.make_vars.items())
]
# Cache the compilation based on:
# * the kernel tree
# * the make variables
# * the module name
return (kernel_checksum, kernel_tree.build_env, src.checksum, *var_tokens)
def get_bin(kernel_tree):
return src.compile(
kernel_tree=kernel_tree,
make_vars=kernel_tree.make_vars,
)
def lookup_cache(kernel_tree, key, enter_cm=False):
cm = kernel_tree if enter_cm else nullcontext(kernel_tree)
def populate(key, path):
with cm as kernel_tree:
with open(path / 'mod.ko', 'wb') as f:
f.write(get_bin(kernel_tree))
dir_cache = DirCache(
category='kernel_modules',
populate=populate,
)
cache_path = dir_cache.get_entry(key)
with open(cache_path / 'mod.ko', 'rb') as f:
return f.read()
# First try on the "bare" kernel tree, i.e. before calling __enter__().
# If this happens to have enough information to hit the cache, we just
# avoided a possibly costly setup of compilation environment
try:
key = get_key(kernel_tree)
except ValueError:
with kernel_tree as kernel_tree:
if kernel_tree.checksum is None:
# Only cache the module if the kernel tree has a defined
# checksum, which is not always the case when it's not
# coming from a controlled source that is guaranteed to be
# immutable.
return get_bin(kernel_tree)
else:
key = get_key(kernel_tree)
bin_ = lookup_cache(kernel_tree, key)
else:
bin_ = lookup_cache(kernel_tree, key, enter_cm=True)
return (bin_, kernel_tree._to_spec())
def install(self):
"""
Install and load the module on the target.
"""
def target_mktemp():
return target.execute(
f'mktemp -p {quote(target.working_directory)}'
).strip()
target = self.target
content = self._compile()
with tempfile.NamedTemporaryFile('wb', suffix='.ko') as f:
f.write(content)
f.flush()
temp_ko = target_mktemp()
try:
target.push(f.name, temp_ko)
target.execute(f'insmod {quote(temp_ko)}', as_root=True)
finally:
target.remove(temp_ko)
def uninstall(self):
"""
Unload the module from the target.
"""
self.target.execute(f'rmmod {quote(self.src.mod_name)}')
@contextlib.contextmanager
def run(self):
"""
Context manager used to run the module by loading it then unloading it.
"""
try:
self.uninstall()
except Exception:
pass
x = self.install()
try:
yield x
finally:
self.uninstall()
class FtraceDynamicKmod(DynamicKmod):
"""
Dynamic module providing some custom ftrace events.
"""
def _get_symbols(self, section=None):
content = self._compile()
elf = ELFFile(BytesIO(content))
if section:
section_idx = {
s.name: idx
for idx, s in enumerate(elf.iter_sections())
}
idx = section_idx[section]
predicate = lambda s: s.entry['st_shndx'] == idx
else:
predicate = lambda s: True
symtab = elf.get_section_by_name('.symtab')
return sorted(
s.name
for s in symtab.iter_symbols()
if predicate(s)
)
@property
@memoized
def defined_events(self):
"""
Ftrace events defined in that module.
"""
def parse(name):
return re.match(r'__event_(.*)', name)
return sorted(set(
m.group(1)
for m in map(
parse,
self._get_symbols('_ftrace_events')
)
if m
))
class LISAFtraceDynamicKmod(FtraceDynamicKmod):
"""
Module providing ftrace events used in various places by :mod:`lisa`.
The kernel must be compiled with the following options in order for the
module to be created successfully:
.. code-block:: sh
CONFIG_DEBUG_INFO=y
CONFIG_DEBUG_INFO_BTF=y
CONFIG_DEBUG_INFO_REDUCED=n
"""
@classmethod
def from_target(cls, target, **kwargs):
path = Path(ASSETS_PATH) / 'kmodules' / 'sched_tp'
btf_path = '/sys/kernel/btf/vmlinux'
with tempfile.NamedTemporaryFile() as f:
try:
target.cached_pull(btf_path, f.name, via_temp=True)
except FileNotFoundError:
raise FileNotFoundError(f'Could not find {btf_path} on the target. Ensure you compiled your kernel using CONFIG_DEBUG_INFO=y CONFIG_DEBUG_INFO_BTF=y CONFIG_DEBUG_INFO_REDUCED=n')
with open(f.name, 'rb') as f:
btf = f.read()
extra = {
'vmlinux': btf
}
src = KmodSrc.from_path(path, extra=extra)
return cls(
target=target,
src=src,
**kwargs,
)
# vim :set tabstop=4 shiftwidth=4 expandtab textwidth=80
|
import pytest
from _mock_data.xpath.method_2 import exception_handling_for_methods_with_3_arguments_or_more
from browserist import Browser
from browserist.browser.click.button import click_button
from browserist.browser.click.button_if_contains_text import click_button_if_contains_text
from browserist.constant import timeout
from browserist.model.type.callable import BrowserMethodWith3ArgumentsCallable
@pytest.mark.parametrize("method", [
click_button,
])
def test_xpath_exception_handling_for_click_methods_1(
browser_default_headless: Browser,
method: BrowserMethodWith3ArgumentsCallable
) -> None:
exception_handling_for_methods_with_3_arguments_or_more(browser_default_headless, method, timeout.VERY_SHORT)
@pytest.mark.parametrize("method, text", [
(click_button_if_contains_text, "More information..."),
])
def test_xpath_exception_handling_for_click_methods_2(
browser_default_headless: Browser,
method: BrowserMethodWith3ArgumentsCallable,
text: str
) -> None:
exception_handling_for_methods_with_3_arguments_or_more(browser_default_headless, method, text, timeout.VERY_SHORT)
|
# Model Paramters: 206,607
# Peak GPU memory usage: 1.57 G
# RevGNN with 7 layers and 160 channels reaches around 0.8200 test accuracy.
# Final Train: 0.9373, Highest Val: 0.9230, Final Test: 0.8200.
# Training longer should produces better results.
import os.path as osp
import torch
import torch.nn.functional as F
from torch.nn import LayerNorm, Linear
from torch_sparse import SparseTensor
from tqdm import tqdm
import torch_geometric.transforms as T
from torch_geometric.loader import RandomNodeSampler
from torch_geometric.nn import GroupAddRev, SAGEConv
from torch_geometric.utils import index_to_mask
class GNNBlock(torch.nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__(in_channels)
self.norm = LayerNorm(in_channels, elementwise_affine=True)
self.conv = SAGEConv(in_channels, out_channels)
def reset_parameters(self):
self.norm.reset_parameters()
self.conv.reset_parameters()
def forward(self, x, edge_index, dropout_mask=None):
x = self.norm(x).relu()
if self.training and dropout_mask is not None:
x = x * dropout_mask
return self.conv(x, edge_index)
class RevGNN(torch.nn.Module):
def __init__(self, in_channels, hidden_channels, out_channels, num_layers,
dropout, num_groups=2):
super().__init__()
self.dropout = dropout
self.lin1 = Linear(in_channels, hidden_channels)
self.lin2 = Linear(hidden_channels, out_channels)
self.norm = LayerNorm(hidden_channels, elementwise_affine=True)
assert hidden_channels % num_groups == 0
self.convs = torch.nn.ModuleList()
for _ in range(self.num_layers):
conv = GNNBlock(
hidden_channels // num_groups,
hidden_channels // num_groups,
)
self.convs.append(GroupAddRev(conv, num_groups=num_groups))
def reset_parameters(self):
self.lin1.reset_parameters()
self.lin2.reset_parameters()
self.norm.reset_parameters()
for conv in self.convs:
conv.reset_parameters()
def forward(self, x, edge_index):
# Generate a dropout mask which will be shared across GNN blocks:
mask = None
if self.training and self.dropout > 0:
mask = torch.zeros_like(x).bernoulli_(1 - self.dropout)
mask = mask.requires_grad_(False)
mask = mask / (1 - self.dropout)
x = self.lin1(x)
for conv in self.convs:
x = conv(x, edge_index, mask)
x = self.norm(x).relu()
x = F.dropout(x, p=self.dropout, training=self.training)
return self.lin2(x)
from ogb.nodeproppred import Evaluator, PygNodePropPredDataset # noqa
transform = T.AddSelfLoops()
root = osp.join(osp.dirname(osp.realpath(__file__)), '..', 'data', 'products')
dataset = PygNodePropPredDataset('ogbn-products', root, transform=transform)
evaluator = Evaluator(name='ogbn-products')
data = dataset[0]
split_idx = dataset.get_idx_split()
for split in ['train', 'valid', 'test']:
data[f'{split}_mask'] = index_to_mask(split_idx[split], data.y.shape[0])
train_loader = RandomNodeSampler(data, num_parts=10, shuffle=True,
num_workers=5)
# Increase the num_parts of the test loader if you cannot have fix
# the full batch graph into your GPU:
test_loader = RandomNodeSampler(data, num_parts=1, num_workers=5)
device = torch.device('cuda' if torch.cuda.is_available() else 'cpu')
model = RevGNN(
in_channels=dataset.num_features,
hidden_channels=160,
out_channels=dataset.num_classes,
num_layers=7, # You can try 1000 layers for fun
dropout=0.5,
num_groups=2,
).to(device)
optimizer = torch.optim.Adam(model.parameters(), lr=0.003)
def train(epoch):
model.train()
pbar = tqdm(total=len(train_loader))
pbar.set_description(f'Training epoch: {epoch:03d}')
total_loss = total_examples = 0
for data in train_loader:
data = data.to(device)
optimizer.zero_grad()
# Memory-efficient aggregations:
adj_t = SparseTensor.from_edge_index(data.edge_index).t()
out = model(data.x, adj_t)[data.train_mask]
loss = F.cross_entropy(out, data.y[data.train_mask].view(-1))
loss.backward()
optimizer.step()
total_loss += float(loss) * int(data.train_mask.sum())
total_examples += int(data.train_mask.sum())
pbar.update(1)
pbar.close()
return total_loss / total_examples
@torch.no_grad()
def test(epoch):
model.eval()
y_true = {"train": [], "valid": [], "test": []}
y_pred = {"train": [], "valid": [], "test": []}
pbar = tqdm(total=len(test_loader))
pbar.set_description(f'Evaluating epoch: {epoch:03d}')
for data in test_loader:
data = data.to(device)
# Memory-efficient aggregations
adj_t = SparseTensor.from_edge_index(data.edge_index).t()
out = model(data.x, adj_t).argmax(dim=-1, keepdim=True)
for split in ['train', 'valid', 'test']:
mask = data[f'{split}_mask']
y_true[split].append(data.y[mask].cpu())
y_pred[split].append(out[mask].cpu())
pbar.update(1)
pbar.close()
train_acc = evaluator.eval({
'y_true': torch.cat(y_true['train'], dim=0),
'y_pred': torch.cat(y_pred['train'], dim=0),
})['acc']
valid_acc = evaluator.eval({
'y_true': torch.cat(y_true['valid'], dim=0),
'y_pred': torch.cat(y_pred['valid'], dim=0),
})['acc']
test_acc = evaluator.eval({
'y_true': torch.cat(y_true['test'], dim=0),
'y_pred': torch.cat(y_pred['test'], dim=0),
})['acc']
return train_acc, valid_acc, test_acc
for epoch in range(1, 501):
loss = train(epoch)
train_acc, val_acc, test_acc = test(epoch)
print(f'Loss: {loss:.4f}, Train: {train_acc:.4f}, Val: {val_acc:.4f}, '
f'Test: {test_acc:.4f}')
|
###ANSI escape sequenci \033[style;text;background m
### NÃO FUNCIONA NO WINDOWS (NO PYCHARM É POSSIVEL)
print ('\033[4;31;43mOla mundo\033[m') |
"""
75. Sort Colors
https://leetcode.com/problems/sort-colors/
Time complexity: O()
Space complexity: O()
"""
from typing import List
class Solution:
def sortColors(self, nums: List[int]) -> None:
"""
Do not return anything, modify nums in-place instead.
"""
p0 = curr = 0
p2 = len(nums) - 1
while p2 >= curr:
if nums[curr] == 0:
nums[p0], nums[curr] = nums[curr], nums[p0]
p0 += 1
curr += 1
elif nums[curr] == 1:
curr += 1
else:
nums[p2], nums[curr] = nums[curr], nums[p2]
p2 -= 1
# nums.sort()
ans = [
[2,0,2,1,1,0] # [0,0,1,1,2,2]
]
for trails in ans:
print(Solution().maxProduct(trails))
|
# -*- coding: utf-8 -*-
"""
Created on Wed Sep 2 16:49:23 2020
@author: truthless
"""
from LAUG.nlu.gpt.utils import seq2dict
from LAUG.nlu.milu_new.dai_f1_measure import DialogActItemF1Measure
def normalize(data):
string = str(data)
digit2word = {
'0': 'zero', '1': 'one', '2': 'two', '3': 'three', '4': 'four', '5': 'five',
'6': 'six', '7': 'seven', '8': 'eight', '9': 'nine', '10': 'ten', '11': 'eleven',
'12': 'twelve'
}
for key, value in digit2word.items():
string = string.replace(' ' + key + ' ', ' ' + value + ' ')
return eval(string)
def calculateF1gpt(data):
data = normalize(data)
dai_f1_metric = DialogActItemF1Measure()
for item in data:
predict = seq2dict(item[1].replace('\'','').replace('=?','= ?').lower())
target = seq2dict(item[0].replace(' \'','').split('&')[1])
dai_f1_metric([predict], [target])
metric = dai_f1_metric.get_metric(True)
print(metric)
def calculateF1copy(data):
data = normalize(data)
dai_f1_metric = DialogActItemF1Measure()
for item in data:
predict = seq2dict(item[2].replace('i d','id').lower())
target = seq2dict(item[1])
dai_f1_metric([predict], [target])
metric = dai_f1_metric.get_metric(True)
print(metric)
if __name__ == '__main__':
from sys import argv
import json
# files are JSON outputs from run.py file
# if usage is: python evaluate_g.py file1 file2
if len(argv) >3:
if argv[3]=='gpt':
diffs =[]
with open(argv[1], 'r', encoding='utf-8') as f:
data_orig=json.load(f)
data_orig = normalize(data_orig)
with open(argv[2], 'r', encoding='utf-8') as f:
data_aug=json.load(f)
data_aug = normalize(data_aug)
val_pred_count=0
for item1, item2 in zip(data_orig, data_aug):
# if any of the lines have an invalid prediction, skip them
try:
predict1 = seq2dict(item1[1].replace('=?','= ?').lower())
predict2 = seq2dict(item2[1].replace('=?','= ?').lower())
target1 = item1[0].replace(' \'','').split('&')[1]
target2 = item2[0].replace(' \'','').split('&')[1]
input1 = item1[0].replace(' \'','').split('&')[0]
input2 = item2[0].replace(' \'','').split('&')[0]
assert target1 == target2, f"Target output is different: {(target1, target2)}"
target = seq2dict(target1)
# keep track of only those where the prediction on the paraphrased test set is wrong and those for the original test set is correct
if predict1 != predict2 and predict2 != target and predict1 == target:
# print("add sample")
diffs.append({
"original_input": input1,
"paraphrased_input": input2,
"original_prediction": item1[1].replace('=?','= ?').lower(),
"paraphrased_prediction": item2[1].replace('=?','= ?').lower(),
"target": target1
})
val_pred_count += 1
except:
continue
with open("diffs.json", "w") as f:
json.dump(diffs, f, indent=4)
print(f"Number of valid predictions from both files: {val_pred_count}\n\tTotal number of predictions: {len(diffs)}\n\tNumber of invalid predictions from either file: {len(diffs)- val_pred_count}")
# if only one file is provided: i.e. python evaluate_g.py <file>.json, return the F1 score
else:
data=[]
if argv[2]=='gpt':
with open(argv[1], 'r', encoding='utf-8') as f:
data=json.load(f)
calculateF1gpt(data)
if argv[2]=='copy':
with open(argv[1], 'r', encoding='utf-8') as f:
data=json.load(f)
calculateF1copy(data) |
from specusticc.configs_init.config_loader import ConfigLoader
from specusticc.configs_init.model.agent_config import AgentConfig
from specusticc.configs_init.model.configs_wrapper import ConfigsWrapper
from specusticc.configs_init.model.loader_config import LoaderConfig
from specusticc.configs_init.model.market_config import MarketConfig
from specusticc.configs_init.model.preprocessor_config import (
PreprocessorConfig,
DateRange,
)
class Configer:
def __init__(self, save_path: str):
self.__save_path = save_path
self.__dict_config = {}
self.__configs: ConfigsWrapper = ConfigsWrapper()
def get_configs_wrapper(self) -> ConfigsWrapper:
return self.__configs
def create_all_configs(self):
self.__fetch_dict_config()
self.__create_loader_config()
self.__create_preprocessor_config()
self.__create_agent_config()
self.__create_market_config()
def __fetch_dict_config(self):
config_path = f"{self.__save_path}/config.json"
loader = ConfigLoader()
loader.load_and_preprocess_config(config_path)
self.__dict_config = loader.get_config()
def __create_loader_config(self):
loader_config = LoaderConfig()
import_dict_config = self.__dict_config.get("import") or {}
input_dict_config = import_dict_config.get("input") or {}
target_dict_config = import_dict_config.get("target") or {}
context_dict_config = import_dict_config.get("context") or {}
loader_config.datasource = import_dict_config.get("datasource", "csv")
loader_config.database_path = import_dict_config.get("database_path", "")
loader_config.input_tickers = input_dict_config.get("tickers", [])
loader_config.output_tickers = target_dict_config.get("tickers", [])
loader_config.context_tickers = context_dict_config.get("tickers", [])
self.__configs.loader = loader_config
def __create_preprocessor_config(self):
preprocessor_config = PreprocessorConfig()
import_dict_config = self.__dict_config.get("import") or {}
input_dict_config = import_dict_config.get("input") or {}
target_dict_config = import_dict_config.get("target") or {}
context_dict_config = import_dict_config.get("context") or {}
train_date_dict = import_dict_config.get("train_date") or {}
test_dates = import_dict_config.get("test_date", [])
preprocessing_dict_config = self.__dict_config.get("preprocessing") or {}
input_columns = input_dict_config.get("columns", [])
if "date" not in input_columns:
input_columns.append("date")
preprocessor_config.input_columns = input_columns
target_columns = target_dict_config.get("columns", [])
if "date" not in target_columns:
target_columns.append("date")
preprocessor_config.output_columns = target_columns
context_columns = context_dict_config.get("columns", [])
if "date" not in context_columns:
context_columns.append("date")
preprocessor_config.context_columns = context_columns
preprocessor_config.context_features = len(context_columns) - 1 # minus date
train_date_range = DateRange(
train_date_dict.get("from"), train_date_dict.get("to")
)
preprocessor_config.train_date = train_date_range
test_date_ranges = []
for test_date in test_dates:
date_range = DateRange(test_date.get("from"), test_date.get("to"))
test_date_ranges.append(date_range)
preprocessor_config.test_dates = test_date_ranges
preprocessor_config.window_length = preprocessing_dict_config.get(
"window_length", 1
)
preprocessor_config.horizon = preprocessing_dict_config.get("horizon", 1)
preprocessor_config.rolling = preprocessing_dict_config.get("rolling", 1)
preprocessor_config.features = len(input_columns) - 1 # minus date
self.__configs.preprocessor = preprocessor_config
def __create_agent_config(self):
agent_config = AgentConfig()
import_dict_config = self.__dict_config.get("import") or {}
input_dict_config = import_dict_config.get("input") or {}
context_dict_config = import_dict_config.get("context") or {}
preprocessing_dict_config = self.__dict_config.get("preprocessing") or {}
agent_dict_config = self.__dict_config.get("agent") or {}
agent_config.input_timesteps = preprocessing_dict_config.get("window_length", 1)
features = (len(input_dict_config.get("columns", 1)) - 1) * len(
input_dict_config.get("tickers", 0)
)
agent_config.input_features = features
agent_config.output_timesteps = preprocessing_dict_config.get("horizon", 1)
agent_config.context_timesteps = preprocessing_dict_config.get(
"window_length", 1
)
features = (len(context_dict_config.get("columns", 1)) - 1) * len(
context_dict_config.get("tickers", 0)
)
agent_config.context_features = features
agent_config.hyperparam_optimization_method = agent_dict_config.get(
"hyperparam_optimization_method", "none"
)
agent_config.save_path = self.__save_path
self.__configs.agent = agent_config
def __create_market_config(self):
market_config = MarketConfig()
agent_dict_config = self.__dict_config.get("agent") or {}
market_config.n_folds = agent_dict_config.get("folds", 1)
self.__configs.market = market_config
|
def create_server(spec_dir, port=8080, debug=False, sync=True, request_context_name=None):
import connexion
import os
import yaml
from parrot_api.core.common import get_subpackage_paths
if sync:
app = connexion.FlaskApp(
__name__, port=port,
specification_dir=spec_dir, debug=debug
)
@app.route('/')
def health_check():
return home()
else:
app = connexion.AioHttpApp(__name__, port=8080, specification_dir=spec_dir, debug=debug)
async def health_check(request):
from aiohttp.web import json_response
return json_response(home())
app.app.router.add_get('/', health_check)
for spec in os.listdir(spec_dir):
app.add_api(specification=spec, validate_responses=debug, pass_context_arg_name=request_context_name)
for path in get_subpackage_paths():
schema_directory = os.path.join(path, 'schemas/')
if os.path.isdir(schema_directory):
for spec_file in [i for i in os.listdir(schema_directory) if i.endswith('yaml') or i.endswith("yml")]:
with open(os.path.join(schema_directory, spec_file), 'rt') as f:
spec = yaml.safe_load(f)
app.add_api(specification=spec, validate_responses=debug)
return app
def home():
return dict(status='ok')
def say_hello():
return dict(response='hello')
async def async_hello(request):
return dict(response='hello {token}'.format(token=request.headers['Authorization'].split(' ')[-1]))
|
# -*- coding: utf-8 -*-
"""
MIT License
Copyright (c) 2019-2020 Terbau
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
from typing import Optional
class Playlist:
__slots__ = ('_image', '_internal_name', '_special_border', '_type',
'_violator', '_display_subname', '_description')
def __init__(self, data: dict) -> None:
self._image = data['image']
self._internal_name = data['playlist_name']
self._special_border = data.get('special_border')
self._type = data.get('_type')
self._violator = data.get('violator')
self._display_subname = data.get('display_subname')
self._description = data.get('description')
def __str__(self) -> str:
return self.internal_name
def __repr__(self) -> str:
return ('<Playlist internal_name={0.internal_name!r} '
'image_url={0.image_url!r}type={0.type!r}>'.format(self))
@property
def image_url(self) -> str:
""":class:`str`: Image url for the playlist."""
return self._image
@property
def internal_name(self) -> str:
""":class:`str`: The internal name of the playlist."""
return self._internal_name
@property
def type(self) -> str:
""":class:`str`: The type of this playlist object."""
return self._type
@property
def special_border(self) -> Optional[str]:
"""Optional[:class:`str`]: Special border of the playlist.
Will be ``None`` if no special border is found for this playlist.
"""
if self._special_border == 'None':
return None
return self._special_border
@property
def violator(self) -> Optional[str]:
"""Optional[:class:`str`]: The violater displayed for this playlist. This is
the little red tag displaying short text on some of the playlists
in-game.
Will be ``None`` if no violator is found for this playlist.
"""
if self._violator == '':
return None
return self._violator
@property
def display_subname(self) -> Optional[str]:
"""Optional[:class:`str`]: The display subname of this playlist.
Will be ``None`` if no display subname is found for this playlist.
"""
return self._display_subname
@property
def description(self) -> Optional[str]:
"""Optional[:class:`str`]: The description of this playlist.
Will be ``None`` if no description is found for this playlist.
"""
return self._description
|
"""
Good morning! Here's your coding interview problem for today.
This problem was asked by Facebook.
Given an N by N matrix, rotate it by 90 degrees clockwise.
For example, given the following matrix:
[[1, 2, 3],
[4, 5, 6],
[7, 8, 9]]
you should return:
[[7, 4, 1],
[8, 5, 2],
[9, 6, 3]]
Follow-up: What if you couldn't use any extra space?
"""
from copy import deepcopy
# naive solution runs in O(n^2) and requires O(n^2) space
# where n is the length of the matrix
def rotate_90(arr_2d:list):
result = deepcopy(arr_2d)
# turn rows of input array to col
for r in range(len(arr_2d)):
row = arr_2d[r]
for i in range(len(row)):
result[i][len(result) -1 - r] = row[i]
return result
def rotate_90_redux(matrix):
n = len(matrix)
for i in range(n //2):
for j in range(i, n - i -1):
p1 = matrix[i][j]
p2 = matrix[j][ n - 1 - i]
p3 = matrix[n - i - 1][n - j - 1]
p4 = matrix[n - j - 1][i]
matrix[j][n-i-1] = p1
matrix[n - i - 1][n - j -1] = p2
matrix[n - j - 1][i] = p3
matrix[i][j] = p4
if __name__ == '__main__':
print(rotate_90_redux([[1, 2, 3],
[4, 5, 6],
[7, 8, 9]])) |
#!/usr/bin/env python
'''
Setup script.
To build qutest install:
[sudo] python setup.py sdist bdist_wheel
'''
from setuptools import setup
setup(
name="qutest",
version="6.9.3",
author="Quantum Leaps",
author_email="[email protected]",
description="QUTest Python scripting support",
long_description=open("README.md").read(),
long_description_content_type="text/markdown",
url="https://www.state-machine.com/qtools/qutest.html",
license="GPL/commercial",
platforms="any",
py_modules=["qutest"],
entry_points={"console_scripts": ["qutest = qutest:main"]},
classifiers=["Development Status :: 5 - Production/Stable",
"Intended Audience :: Developers",
"Topic :: Software Development :: Embedded Systems",
"Topic :: Software Development :: Testing",
"Topic :: Software Development :: Testing :: Unit",
"License :: OSI Approved :: GNU General Public License v3 or later (GPLv3+)",
"License :: Other/Proprietary License",
"Operating System :: Microsoft :: Windows",
"Operating System :: POSIX :: Linux",
"Operating System :: MacOS :: MacOS X",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: C",
"Programming Language :: C++"],
)
|
import logging
from onegov.translator_directory.app import TranslatorDirectoryApp
log = logging.getLogger('onegov.translator_directory') # noqa
log.addHandler(logging.NullHandler()) # noqa
from translationstring import TranslationStringFactory # noqa
_ = TranslationStringFactory('onegov.translator_directory') # noqa
__all__ = ('TranslatorDirectoryApp', 'log', '_')
|
from flask import render_template, request, redirect, url_for, jsonify
from flask_login import login_required
from app.api.classes.observation.models import Observation
from app.api.classes.observationperiod.models import Observationperiod
from app.api.classes.observation.services import parseCountString, addObservationToDb, getDaySummary
from app.api import bp
from app.db import db
from sqlalchemy.sql import text
@bp.route('/api/addObservation', methods=['POST'])
@login_required
def addObservation():
req = request.get_json()
addObservationToDb(req)
return jsonify(req)
@bp.route('/api/getObservations', methods=["GET"])
@login_required
def getObservations():
observations = Observation.query.all()
ret = []
for obs in observations:
ret.append({ 'species': obs.species, 'adultUnknownCount': obs.adultUnknownCount, 'adultFemaleCount': obs.adultFemaleCount, 'adultMaleCount': obs.adultMaleCount,
'juvenileUnknownCount': obs.juvenileUnknownCount, 'juvenileFemaleCount': obs.juvenileFemaleCount, 'juvenileMaleCount': obs.juvenileMaleCount,
'subadultUnknownCount': obs.subadultUnknownCount, 'subadultFemaleCount': obs.subadultFemaleCount, 'subadultMaleCount': obs.subadultMaleCount,
'unknownUnknownCount': obs.unknownUnknownCount, 'unknownFemaleCount': obs.unknownFemaleCount, 'unknownMaleCount': obs.unknownMaleCount, 'total_count' :obs.total_count,
'direction': obs.direction, 'bypassSide': obs.bypassSide, 'notes': obs.notes,
'observationperiod_id': obs.observationperiod_id, 'shorthand_id': obs.shorthand_id})
return jsonify(ret)
@bp.route('/api/getObservations/<observationperiod_id>', methods=["GET"])
@login_required
def getObservationsByObservationPeriod(observationperiod_id):
observations = Observation.query.filter_by(observationperiod_id = observationperiod_id)
ret = []
for observation in observations:
countString = parseCountString(observation)
ret.append({ 'species': observation.species, 'count': countString, 'direction': observation.direction, 'bypassSide': observation.bypassSide})
return jsonify(ret)
@bp.route("/api/deleteObservations", methods=["DELETE"])
@login_required
def observations_delete():
req = request.get_json()
shorthand_id = req['shorthand_id']
Observation.query.filter_by(shorthand_id=shorthand_id).delete()
db.session.commit()
return jsonify(req)
@bp.route('/api/getObservationSummary/<day_id>', methods=["GET"])
@login_required
def getSummary(day_id):
response = getDaySummary(day_id)
return response
|
from .SnipeIT import SnipeIT
|
import requests, threading
from discord.ext import commands
client = commands.Bot(command_prefix=".", self_bot= True)
token = "token.YXvmcw.yuh-qvd6bsDfyb4gY"
users = ['811042929040687177','903621585053835275','791835116980666418','903244322181361755'] #users aka the victims
gcs = ['904174831707250750','904174832642568273','904174835285000262','904174878138204240','904174879862042624','904174881200041985','903624652549672980','903624649777233961','904120310272491530']
# gc ids ^^^^^ for inviting and kicking out
#t = input("threads: ")
#gc = int(input("gc you wanna fuck them in: "))
def login(): #making it automated i'll finish it up in the future
data = {}
@client.event
async def on_ready():
data['friendsID'] = [freind.id for freind in client.user.friends]
data['channelsID'] = [channel.id for channel in client.private_channels]
await client.close()
try:
client.run(token)
except Exception as error:
print(f"Incorrect Token", error)
return None
return data
def add(i2):
for i in users:
headers = {"Authorization": token}
r = requests.put(f'https://discordapp.com/api/v6/channels/{i2}/recipients/{i}', headers=headers)
if r.status_code == 200 or r.status_code == 201 or r.status_code == 204:
print(f"added {i} to gc {i2}")
elif r.status_code == 429:
print(f"ratelimited")
def remove(i2):
for i in users:
headers = {"Authorization": token}
r = requests.delete(f'https://discordapp.com/api/v6/channels/{i2}/recipients/{i}', headers=headers)
if r.status_code == 200 or r.status_code == 201 or r.status_code == 204:
print(f"removed {i} from gc {i2}")
elif r.status_code == 429:
print(f"ratelimited")
def creategc(): #gc create for ur victims
for i in users:
headers = {"Authorization": token}
json = {"recipients":['811042929040687177','903621585053835275','791835116980666418','903244322181361755']}
r = requests.post('https://discordapp.com/api/v6/users/@me/channels', headers=headers, json=json)
if r.status_code == 200 or r.status_code == 201 or r.status_code == 204:
print(f"created gc")
elif r.status_code == 429:
print(f"ratelimited")
while True:
try:
for i2 in gcs:
threading.Thread(target=remove, args=(i2,)).start()
threading.Thread(target=add, args=(i2,)).start()
except:
print("process couldn't start")
|
# -*- coding: utf-8 -*-
# Copyright (c) 2016-2017 by University of Kassel and Fraunhofer Institute for Wind Energy and
# Energy System Technology (IWES), Kassel. All rights reserved. Use of this source code is governed
# by a BSD-style license that can be found in the LICENSE file.
import numpy as np
import pytest
import pandapower as pp
try:
import pplog as logging
except ImportError:
import logging
logger = logging.getLogger(__name__)
logger.setLevel("DEBUG")
def test_cost_piecewise_linear_gen_q():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_gen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
with pytest.raises(ValueError):
pp.create_piecewise_linear_cost(net, 0, "gen", np.array(
[[0, 0], [1, 50], [2, 100]]), type="q")
with pytest.raises(ValueError):
pp.create_piecewise_linear_cost(net, 0, "gen", np.array(
[[0, 0], [-1, 50], [-2, 100]]), type="q")
with pytest.raises(ValueError):
pp.create_piecewise_linear_cost(net, 0, "gen", np.array(
[[-10, 0], [-200, 50], [-50, 100]]), type="q")
pp.create_piecewise_linear_cost(net, 0, "gen", np.array(
[[-50, 50], [0, 0], [50, -50]]), type="q")
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost - net.res_gen.q_kvar.values < 1e-3
def test_cost_piecewise_linear_sgen_q():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_sgen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "sgen", np.array(
[[-50, 50], [50, -50]]), type="q")
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost - net.res_sgen.q_kvar.values < 1e-3
def test_cost_piecewise_linear_load_q():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_load(net, 1, p_kw=-100, controllable=True, max_p_kw=50, min_p_kw=-0, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "load", np.array(
[[-50, 50], [50, -50]]), type="q")
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost - net.res_load.q_kvar.values < 1e-3
def test_cost_piecewise_linear_eg_q():
""" Testing a very simple network for the resulting cost value
constraints with OPF """
# boundaries:
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10)
pp.create_ext_grid(net, 0, max_p_kw=0, min_p_kw=-50, min_q_kvar=-50, max_q_kvar=50)
pp.create_gen(net, 1, p_kw=-10, max_p_kw=0, min_p_kw=-50, controllable=True)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "ext_grid", np.array(
[[-50, 50], [0, 0], [50, 50]]), type="q")
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
assert net.res_cost - net.res_ext_grid.q_kvar.values * 1 < 1e-3
# check and assert result
def test_cost_pwl_q_3point():
# We have a problem with the cost value after optimization of 3 point q cost functions! It returns the amount of q at the EG, but not the costs!
# Also, the q result is not the optimum!
vm_max = 1.05
vm_min = 0.95
# create net
net = pp.create_empty_network()
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=10.)
pp.create_bus(net, max_vm_pu=vm_max, min_vm_pu=vm_min, vn_kv=.4)
pp.create_sgen(net, 1, p_kw=-100, controllable=True, max_p_kw=-5, min_p_kw=-150, max_q_kvar=50,
min_q_kvar=-50)
pp.create_ext_grid(net, 0)
pp.create_load(net, 1, p_kw=20, controllable=False)
pp.create_line_from_parameters(net, 0, 1, 50, name="line2", r_ohm_per_km=0.876,
c_nf_per_km=260.0, max_i_ka=0.123, x_ohm_per_km=0.1159876,
max_loading_percent=100 * 690)
pp.create_piecewise_linear_cost(net, 0, "sgen", np.array(
[[-50, 50], [0,0], [50, 50]]), type="q")
# run OPF
pp.runopp(net, verbose=False)
assert net["OPF_converged"]
# test failing because somehow, the res_cost is the eg reactive power and the result is not the optimum!
# assert net.res_cost - net.res_sgen.q_kvar.values < 1e-3
if __name__ == "__main__":
pytest.main(["test_costs_pwl_q.py", "-xs"])
# test_cost_piecewise_linear_eg_q()
# test_cost_piecewise_linear_sgen_q()
# test_cost_piecewise_linear_gen_q()
# test_get_costs()
|
from binascii import hexlify
from pkg_resources import resource_stream
from usb.core import find
from usb.util import find_descriptor, claim_interface
from nitrolib.util import partition
from nitrolib.device import NitroDevice, DeviceNotFound
from nitrolib.emulator.enums import WriteCommandType, MemoryRegion, ReadCommandType
class NitroEmulator(NitroDevice):
is_little_endian = True
def __init__(self):
super().__init__("IS-NITRO-Emulator")
self.device = find(idVendor=0x0F6e, idProduct=0x0404)
if self.device is None:
raise DeviceNotFound(self)
if self.debug:
self.print_configurations()
self.device.set_configuration()
# self.device.reset() # Prevent weird timeouts when used twice
config = self.device.get_active_configuration()
interface = config[(0, 0)]
# Claim interface
if self.device.is_kernel_driver_active(interface.iInterface):
self.device.detach_kernel_driver(interface.iInterface)
claim_interface(self.device, interface.iInterface)
self.endpoint_out = find_descriptor(interface, bEndpointAddress=0x01) # Bulk Out
self.endpoint_in = find_descriptor(interface, bEndpointAddress=0x82) # Bulk in
# self.endpoint_debug = find_descriptor(interface, bEndpointAddress=0x83) # Bulk in 2?
assert self.endpoint_out is not None
assert self.endpoint_in is not None
self.isid = resource_stream(__name__, "../resources/isid.bin").read()
self.debugger_code = resource_stream(__name__, "../resources/debugger_code.bin").read()
# Device utils
def _usb_write(self, data: bytes):
max_size = self.endpoint_out.wMaxPacketSize
packets = partition(data, max_size)
for packet in packets:
written = self.endpoint_out.write(packet)
assert written == len(packet)
def _usb_read(self, size: int) -> bytes:
data = b''
while len(data) < size:
data += bytes(self.endpoint_in.read(size - len(data)))
return data
def read(self, command: ReadCommandType, region: MemoryRegion, address: int, length: int) -> bytes:
packed = self.encode("HBBIII",
command.value,
0x11, # Read
region.value, # TODO: Determine based on CommandType
address,
length,
0) # Padding?
self._usb_write(packed)
data = self._usb_read(length)
if self.debug:
print(f"Read {hex(command.value)} {hex(region.value)}\nAt {hex(address)} Size {hex(length)}")
print(hexlify(data[:0x100]).decode())
print("-" * 10)
return data
def write(self, command: WriteCommandType, region: MemoryRegion, address: int, data: bytes):
packed = self.encode("HBBIII",
command.value,
0x10, # Write
region.value,
address,
len(data),
0) # Padding?
self._usb_write(packed)
self._usb_write(data)
if self.debug:
print(f"Write {hex(command.value)} {hex(region.value)}\nAt {hex(address)} Size {hex(len(data))}")
print(hexlify(data[:0x100]).decode())
print("-"*10)
# Public methods
def full_reset(self):
self.write(WriteCommandType.FULL_RESET, MemoryRegion.CONTROL, 0, b'\x81\xF2')
def processor_stop(self):
self.write(WriteCommandType.CURRENT_PROCESSOR, MemoryRegion.CONTROL, 0, b'\x81\x00\x01\x00')
def processor_start(self):
self.write(WriteCommandType.CURRENT_PROCESSOR, MemoryRegion.CONTROL, 0, b'\x81\x00\x00\x00')
def select_arm9(self):
self.write(WriteCommandType.SELECT_PROCESSOR, MemoryRegion.CONTROL, 0, b'\x8b\x00\x00\x00')
def select_arm7(self):
self.write(WriteCommandType.SELECT_PROCESSOR, MemoryRegion.CONTROL, 0, b'\x8b\x00\x01\x00')
def _slot1_toggle(self, on: bool):
self.write(WriteCommandType.UNKNOWN_AD, MemoryRegion.CONTROL, 0,
b'\xAD\x00\x00\x00'
b'\x0A\x00\x00\x00' +
self.encode('I', int(on)) +
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00')
def slot1_on(self):
self._slot1_toggle(True)
def slot1_off(self):
self._slot1_toggle(False)
def slot2_on(self):
self.write(WriteCommandType.UNKNOWN_AD, MemoryRegion.CONTROL, 0,
b'\xAD\x00\x00\x00'
b'\x02\x00\x00\x00'
b'\x01\x00\x00\x00'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00')
self.write(WriteCommandType.UNKNOWN_AD, MemoryRegion.CONTROL, 0,
b'\xAD\x00\x00\x00'
b'\x04\x00\x00\x00'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00')
def slot2_off(self):
self.write(WriteCommandType.UNKNOWN_AD, MemoryRegion.CONTROL, 0,
b'\xAD\x00\x00\x00'
b'\x02\x00\x00\x00'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00'
b'\x00\x00\x00\x00')
def write_slot1(self, address, data):
self.write(WriteCommandType.WRITE_MAIN_MEMORY, MemoryRegion.NDS, address, data)
def read_slot1(self, address, length):
self.read(ReadCommandType.READ_MAIN_MEMORY, MemoryRegion.NDS, address, length)
def write_slot2(self, address, data):
self.write(WriteCommandType.WRITE_MAIN_MEMORY, MemoryRegion.GBA, address, data)
def read_slot2(self, address, length):
self.read(ReadCommandType.READ_MAIN_MEMORY, MemoryRegion.GBA, address, length)
def read_nec(self, address, unit, count):
header = self.encode("BBHI",
0x25,
unit,
count,
address)
self.write(WriteCommandType.READ_NEC_CONTROL, MemoryRegion.CONTROL, 0, header)
return self.read(ReadCommandType.READ_NEC, MemoryRegion.CONTROL, 0, 8)
def write_nec(self, address, unit, count, data):
header = self.encode("BBHI",
0x26,
unit,
count,
address)
self.write(WriteCommandType.WRITE_NEC, MemoryRegion.CONTROL, 0, header + data)
def _write_video_register(self, register, data):
self.write_nec(0x8000030, 2, 1, self.encode('BB', register, 0))
self.write_nec(0x8000034, 2, 1, self.encode('BB', data & 0xFF, 0))
self.write_nec(0x8000036, 2, 1, self.encode('BB', data >> 8, 0))
def trigger_fiq(self):
self.write(WriteCommandType.FIQ, MemoryRegion.CONTROL, 0, b'\xaa\x00\x01\x00')
self.write(WriteCommandType.FIQ, MemoryRegion.CONTROL, 0, b'\xaa\x00\x00\x00')
def load_nds_rom(self, rom: bytes, to_firmware: bool = False, enable_gba: bool = False, debug_rom: bytes = None):
debug_rom = debug_rom or self.debugger_code
self.full_reset()
self.processor_stop()
self.slot1_off()
self.slot2_off()
# Gericom/ISNitroController uses RESET_STATE for this, but that doesn't seem to do much on my end.
# Maybe related to being a Lite model?
# Something with the slots based on what I can find?
# Write rom chunked
rom_chunk_size = 1 << 16 # 65536 bytes at a time
for i, rom_chunk in enumerate(partition(rom, rom_chunk_size)):
print(hex(i * rom_chunk_size), "/", hex(len(rom)))
self.write_slot1(i * rom_chunk_size, rom_chunk)
# TODO: Look into whether this is needed
# self.write(MemoryRegion.EMULATION_MEMORY, InteractionType.NDS, 0, rom[:0x160])
self.write_slot1(0x0FF80000, debug_rom)
self.write(WriteCommandType.WRITE_MAIN_MEMORY, MemoryRegion.GBA, 0, self.isid)
if not to_firmware:
self.write_slot1(
0x160,
self.encode("IIII",
0x8FF80000, # Debug rom offset
len(debug_rom), # Debug rom size
0x02700000, # ARM9 Entry
0x02700004)) # ARM7 Entry
if enable_gba:
self.slot2_on()
self.slot1_on()
self.processor_start()
def load_gba_rom(self):
self.full_reset()
self.slot2_off()
self.processor_stop()
self.slot2_on()
self.processor_start()
def enable_video(self):
pass
|
from unittest.mock import MagicMock, call
import pytest
from injectable import InjectionContainer, Injectable
from injectable.constants import DEFAULT_NAMESPACE
from injectable.container.namespace import Namespace
from injectable.testing import register_injectables
class TestRegisterInjectables:
def test__register_injectables__with_no_class_or_qualifier(self):
with pytest.raises(ValueError):
register_injectables([MagicMock(spec=Injectable)()])
def test__register_injectables__with_no_class_and_propagate(self):
with pytest.raises(ValueError):
register_injectables(
[MagicMock(spec=Injectable)()], qualifier="TEST", propagate=True
)
def test__register_injectables__with_class_and_default_values(self):
# given
default_namespace_key = DEFAULT_NAMESPACE
namespace = MagicMock(spec=Namespace)()
InjectionContainer.NAMESPACES[default_namespace_key] = namespace
injectables = [MagicMock(spec=Injectable)(), MagicMock(spec=Injectable)()]
klass = MagicMock
# when
register_injectables(injectables, klass)
# then
assert all(
call.register_injectable(inj, klass, None, False) in namespace.mock_calls
for inj in injectables
)
def test__register_injectables__with_qualifier_and_default_values(self):
# given
default_namespace_key = DEFAULT_NAMESPACE
namespace = MagicMock(spec=Namespace)()
InjectionContainer.NAMESPACES[default_namespace_key] = namespace
injectables = [MagicMock(spec=Injectable)(), MagicMock(spec=Injectable)()]
qualifier = "TEST"
# when
register_injectables(injectables, qualifier=qualifier)
# then
assert all(
call.register_injectable(inj, None, qualifier, False)
in namespace.mock_calls
for inj in injectables
)
def test__register_injectables__with_explicit_values(self):
# given
namespace_key = "TEST_NAMESPACE"
namespace = MagicMock(spec=Namespace)()
InjectionContainer.NAMESPACES[namespace_key] = namespace
injectables = [MagicMock(spec=Injectable)(), MagicMock(spec=Injectable)()]
klass = MagicMock
qualifier = "TEST"
# when
register_injectables(
injectables, klass, qualifier, namespace_key, propagate=True
)
# then
assert all(
call.register_injectable(inj, klass, qualifier, True)
in namespace.mock_calls
for inj in injectables
)
def test__register_injectables__with_empty_injection_container(self):
# given
InjectionContainer.NAMESPACES = {}
namespace = MagicMock(spec=Namespace)()
InjectionContainer._get_namespace_entry = MagicMock(return_value=namespace)
injectables = [MagicMock(spec=Injectable)(), MagicMock(spec=Injectable)()]
klass = MagicMock
qualifier = "TEST"
# when
register_injectables(injectables, klass, qualifier)
# then
assert all(
call.register_injectable(inj, klass, qualifier, False)
in namespace.mock_calls
for inj in injectables
)
|
from autogoal.kb import (
Document,
Sentence,
Seq,
Stem,
Word,
build_pipeline_graph,
algorithm,
AlgorithmBase,
)
from autogoal.search import RandomSearch
class TextAlgorithm(AlgorithmBase):
def run(self, input: Sentence) -> Document:
pass
class StemWithDependanceAlgorithm(AlgorithmBase):
def __init__(self, ub: algorithm(Sentence, Document)):
pass
def run(self, input: Word) -> Stem:
pass
class StemAlgorithm(AlgorithmBase):
def run(self, input: Word) -> Stem:
print("inside StemAlgorithm")
class HigherStemAlgorithm(AlgorithmBase):
def __init__(self, stem: algorithm(Word, Stem)):
pass
def run(self, input: Seq[Word]) -> Seq[Stem]:
pass
def test_recursive_list_pipeline_graph():
pipelineBuilder = build_pipeline_graph(
input_types=Seq[Word],
output_type=Seq[Stem],
registry=[StemAlgorithm, HigherStemAlgorithm],
)
def _make_mock_fitness_fn(X, y):
def mock_fitness_fn(pipeline):
return 1
return mock_fitness_fn
|
################### Imports ######################
# General imports
import numpy as np ; np.random.seed(1) # for reproducibility
import pandas as pd
import pathlib
pd.options.mode.chained_assignment = None
import numpy as np ; np.random.seed(1) # for reproducibility
import os
import joblib
# TensorFlow
import tensorflow as tf
from tensorflow import keras
from tensorflow.keras.preprocessing.sequence import pad_sequences
# Dash imports
import dash
import dash_core_components as dcc
import dash_html_components as html
import dash_bootstrap_components as dbc
from dash.dependencies import Input, Output, State # ClientsideFunction
# Indicate the version of Tensorflow and whether it uses the CPU or the GPU
print("TensorFlow version:", tf.__version__)
if len(tf.config.list_physical_devices('GPU')) > 0:
print("The GPU will be used for calculations.")
else:
print("The CPU will be used for calculations.")
################### Dash set up ######################
# Set up
app = dash.Dash(
__name__,
meta_tags=[{"name": "viewport", "content": "width=device-width, initial-scale=1"}]
)
server = app.server
app.config.suppress_callback_exceptions = True
# Define paths
BASE_PATH = pathlib.Path(__file__).parent.resolve()
DATA_PATH = BASE_PATH.joinpath("data").resolve()
################## Functions #####################
def predict_sentiment(text):
# Set values
max_length = 240
trunc_type = 'post'
# Tokenize
tokenizer = joblib.load(os.path.join(path_model, 'tokenizer'))
# # Use absolute path when running in the server
# tokenizer = joblib.load('/home/ubuntu/Sentiment-analysis/app/tokenizer')
# Sequence
sequences = tokenizer.texts_to_sequences([text])
# Add padding
padded = pad_sequences(sequences, maxlen=max_length, truncating=trunc_type)
# Predict
predictions = model.predict(padded)
# Get response
if predictions[0] < 0.5:
response = "BAD"
else:
response = "GOOD"
return response
# Create a brief description of the tool
def description_card():
"""
return: A Div containing dashboard title & descriptions.
"""
return html.Div(
id="title-card",
children=[
dbc.Row([
html.Br(),
html.H1(
"Sentiment analysis: IMDB reviews",
style={
'text-align': 'center',
'font-family': 'verdana, sans-serif',
'color': '#f3ce13'
}
),
]),
]
)
################### Paths ######################
# Define paths
path_data = '../data'
path_model = '../model'
path_output = '../output'
#################### Loads #####################
# Load model and history
model = keras.models.load_model(os.path.join(path_model, 'imdb_model.h5'))
model.load_weights(os.path.join(path_model, 'imdb_weights.h5'))
history_dict = joblib.load(os.path.join(path_model, 'imdb_history'))
# # Use absolute path when running in the server
# model = keras.models.load_model('/home/ubuntu/Sentiment-analysis/app/imdb_model.h5')
# model.load_weights('/home/ubuntu/Sentiment-analysis/app/imdb_weights.h5')
# history_dict = joblib.load('/home/ubuntu/Sentiment-analysis/app/imdb_history')
################### User Interface ######################
# Layout
app.layout = html.Div(
id="app-container",
children=[
# Banner
html.Div(
id="banner",
children=[
html.Img(
src=app.get_asset_url("imdb_logo.jpeg"),
style={'height': '5%', 'width': '5%'}
)
],
),
# Title body
dbc.Row([
description_card(),
html.Hr(),
]),
# Description body
dbc.Row([
dbc.Col([
html.Div(
# Left column
html.Div(
id="left-column",
children=[
html.H5("About this App"),
html.Div(
children="This app allows you to classify movie reviews extracted from IMBD. "
"By means of embeddings, it also allows you to visualize how the different words cluster with each other."
),
html.Div([
html.A("GitHub repo",
href='https://github.com/guillermo-lahuerta/Sentiment_analysis',
target="_blank")
]),
html.Br(),
html.H5("Data"),
html.Div(
children="The dataset used to train this model, correpsonds to the 'IMDB reviews' dataset. "
"It is composed by a training set of 25,000 examples, and a test set of 25,000 examples. "
),
html.Div([
html.A("IMDB dataset",
href='https://www.tensorflow.org/datasets/catalog/imdb_reviews/',
target="_blank")
]),
html.Br(),
html.Div(
id="center-column",
children=[
html.H5("Sentiment Analysis"),
html.Div(
children="Sentiment analysis is a Natural Language Processing technique used to determine the "
"'sentiment' of a corpus of text (e.g., whether the opinion expressed is either positive or "
"negative). The model presented in this app, provides the following accuracies a train "
"accuracy of 95.23% and a test accuracy of 83.88%."
),
],
)
],
), style={'display': 'inline-block', 'width': '50%', 'justify': "center",
'vertical-align': 'top', 'margin-left': '0.5em', 'margin-right': '0.5em',
'textAlign': 'center'}
)
]),
# Accuracy body
html.Br(),
html.Hr(),
html.Div(
id="accuracy",
children=[
html.H5("Model evaluation"),
html.Br(),
html.Img(
src=app.get_asset_url("acc.png"),
style={'height': '75%', 'width': '75%', 'justify': "center",
'vertical-align': 'middle', 'textAlign': 'center'}
)], style={'width': '100%', 'justify': "center", 'vertical-align': 'middle', 'textAlign': 'center'}
),
html.Hr(),
], style={'width': '100%', 'justify': "center", 'vertical-align': 'middle', 'textAlign': 'center'}
),
# Embeddings body
html.Div(
id="embeds",
children=[
html.H5("Embeddings"),
html.Br(),
html.Br(),
html.B('Please, click on "Sphereize data" to normalise the data and see the proper clusters (the '
'checkbox option is on the left hand side).'),
html.Br(),
html.Br(),
html.Iframe(
src="https://projector.tensorflow.org/?config=https://gist.githubusercontent.com/guillermo-lahuerta/6185a0ed9d82bf371a984cf7c2ec8547/raw/688afac9a363f872036640cf6e8ddf2fa036c576/config.json",
width="1500",
height="600"
)],
style={'display': 'inline-block', 'justify': "center", 'width': '100%', 'textAlign': 'center'}
),
html.Hr(),
# Word cloud body
html.Div(
id="wordcloud",
children=[
html.H5("Word cloud"),
html.Br(),
html.Img(
src=app.get_asset_url("wordcloud.png"),
style={'height': '35%', 'width': '35%', 'justify': "center",
'vertical-align': 'middle', 'textAlign': 'center'}
)
],
style={'width': '100%', 'justify': "center",
'vertical-align': 'middle', 'textAlign': 'center'}
),
html.Br(),
# Write canvas
html.Hr(),
html.Div(
id="canvas",
children=[
html.H5("Try it yourself!"),
html.Br(),
dcc.Textarea(
id='textarea-state',
value='Game of Thrones is awesome',
style={'width': '60%', 'height': 50},
),
html.Br(),
html.Br(),
html.Button('Predict sentiment', id='textarea-state-button', n_clicks=0,
style={'background-color': '#4CAF50', 'color': 'white'}),
html.Div(id='textarea-state-output', style={'whiteSpace': 'pre-line'})
],
style={'width': '100%', 'justify': "center",
'vertical-align': 'middle', 'textAlign': 'center'}
),
html.Br(),
html.Br(),
],
)
################### Callbacks ######################
@app.callback(
Output('textarea-state-output', 'children'),
Input('textarea-state-button', 'n_clicks'),
State('textarea-state', 'value')
)
def update_output(n_clicks, value):
if n_clicks > 0:
resp = predict_sentiment(value)
return 'The expected sentiment is: \n{}'.format(resp)
################### Run the App ######################
# Run the server
if __name__ == "__main__":
# app.run_server(debug=True, port=80) # Comment this line when launching from the AWS server
app.run_server(debug=False, host='0.0.0.0', port=8082) # Uncomment this line when launching from the AWS server
|
#
#
# =================================================================
# =================================================================
"""Overrides the Conductor Manager for additional PowerVC DB Access:
1. Query HMC Information for a given Host
2. Add/Update/Deleted/Query VIOS Information
3. Add/Update/Delete/Query Adapter Information (SEA/VEA/HBA)
4. Add/Update/Delete/Query Host Metric/Status Information
5. Add/Update/Delete/Query LPAR Allocation Information
"""
import sys
import copy
import nova.context
from oslo import messaging
from nova import rpc as rpc_api
from nova.conductor import manager
from nova.conductor.tasks import live_migrate
from nova.openstack.common import jsonutils
from nova.openstack.common import log as logging
from nova.objects import service as service_obj
from paxes_nova.compute import api as compute_api
from paxes_nova.db import api as db_api
from paxes_nova.objects.compute import dom as compute_dom
from paxes_nova.objects.network import dom as network_dom
from paxes_nova.objects.storage import dom as storage_dom
from paxes_nova import _
LOG = logging.getLogger(__name__)
HOST_REG = 'powervc_discovery.registration.compute.host_registrar'
class PowerVCConductorManager(manager.ConductorManager):
"""Extends the base Conductor Manager class with PowerVC capabilities"""
def __init__(self, *args, **kwargs):
"""Constructor for the PowerVC extension to the Conductor Manager"""
super(PowerVCConductorManager, self).__init__(*args, **kwargs)
self.additional_endpoints.append(_PowerVCConductorManagerProxy(self))
# Construct a Default Factory from each Module to allow initialization
self.hmcfac = compute_dom.ManagementConsoleFactory.get_factory()
self.seafac = network_dom.SharedEthernetAdapterFactory.get_factory()
self.scgfac = storage_dom.StorageConnectivityGroupFactory.get_factory()
# We need to create the default SCG the first time the Conductor starts
self.scgfac.create_default_fc_scg(nova.context.get_admin_context())
#######################################################
########## Override Initialization Methods ##########
#######################################################
def pre_start_hook(self, **kwargs):
"""Override the Pre-Start Hook to perform some initialization"""
# Call the Parent preStart hook if it ever implements anything
super(PowerVCConductorManager, self).pre_start_hook(**kwargs)
try:
# Force the DB API to initialize by doing a random query for HMC's
self.hmcfac.find_all_hmcs(nova.context.get_admin_context())
except:
pass # We don't care if it fails, since it is just initialization
###################################################
###### PowerSpec Conductor API Implementation #####
###################################################
def instance_update(self, context, instance_uuid,
updates, service=None):
""" Update the attributes for the Instance given in the Database """
updates = copy.deepcopy(updates)
pwr_specs = updates.pop('power_specs', None)
#Call the Parent's method to actually update the Instance in the DB
instance_ref = super(PowerVCConductorManager, self).\
instance_update(context, instance_uuid, updates, service)
#If there were any Power Specs given, update those in the database
if pwr_specs is not None:
pwr_specs = db_api.\
instance_power_specs_update(context, instance_uuid, pwr_specs)
instance_ref['power_specs'] = self._convert_power_specs(pwr_specs)
return instance_ref
####################################################
######### Network Adapter Implementation #########
####################################################
def host_reconcile_network(self, context, host_networking_dict):
""" Sends the collected topology of a host's networking resources """
LOG.info('pvc_nova.conductor.manager.PowerVCConductorManager '
'host_reconcile_network: context, '
'host_networking_dict len()= '
+ str(len(host_networking_dict)))
db_api.host_reconcile_network(context, host_networking_dict)
####################################################
########## UnManage Host Implementation ##########
####################################################
def notify_unmanage(self, context, host, mgmt_ip):
"""Notifies this Management System to remove Host Management"""
info = dict(hostname=host, ip=mgmt_ip)
try:
#Log a message for debug purposes that were are removing the Host
LOG.info(_("Removing Host %(hostname)s, switching "
"to Management System %(ip)s...") % info)
#Generate a Context with a Token to use for the Requests
context = self._generate_admin_context()
#If the Compute Node doesn't exist, we don't need to notify
comp_node = self._get_compute_node(context, host)
if comp_node is None:
return
#Notify the old Management System is no longer managing the host
text = _("The PowerVC management system at %(ip)s is taking over "
"management of host %(hostname)s. The host will be "
"removed from this management system.") % info
anotifier = rpc_api.get_notifier('compute', host)
anotifier.info(context, 'compute.instance.log', {'msg': text})
try:
__import__(HOST_REG)
#Call the Host Registrar to do the full clean-up of the Host
get_registrar = getattr(sys.modules[HOST_REG], 'get_registrar')
registrar = get_registrar(context, host_name=host)
registrar.skip_remote_commands = True
registrar.deregister(force=True)
except Exception as ex:
LOG.warn(_("Exception trying to fully remove the Host."))
LOG.exception(ex)
#Send a notification that we are removing the Compute Node
anotifier.info(context, 'compute.node.delete.start', comp_node)
#Fall-back to just cleaning the DB, if the main flow failed
hostfact = compute_dom.ManagedHostFactory.get_factory()
hostfact.delete_host(context, host)
#Send a notification that we removed the Compute Node
anotifier.info(context, 'compute.node.delete.end', comp_node)
#Log a message for debug purposes that we removed the Host
LOG.info(_("Removed Host %(hostname)s, switching "
"to Management System %(ip)s.") % info)
except Exception as exc:
#Log the Exception that occurred while trying to Remove the Host
LOG.warn(_("Failed to remove Host %(hostname)s while "
"switching to Management System %(ip)s") % info)
LOG.exception(exc)
####################################################
######## Internal Conductor Helper Methods #######
####################################################
@staticmethod
def _get_compute_node(context, host):
"""Helper method to query the Compute Node from the DB"""
service = service_obj.Service.get_by_compute_host(context, host)
#If we weren't able to find the Server or Compute Node, just return
if service is None or service['compute_node'] is None:
return None
#Return the key info from the Compute Node as a dictionary
compute_node = service['compute_node']
return dict(compute_node_id=compute_node.id,
host=service.host, service_id=compute_node.service_id,
hypervisor_hostname=compute_node.hypervisor_hostname)
@staticmethod
def _generate_admin_context():
"""Helper method to create a Context with a Token/ServiceCatalog"""
from nova.network import neutronv2
__import__('nova.network.neutronv2.api')
context = nova.context.get_admin_context()
#We are using the Neutron Client since they having Caching logic
nclient = neutronv2.get_client(context).httpclient
#Since the Neutron Client is cached, the token may already be
#populated, so only need to authenticate if it isn't set yet
if nclient.auth_token is None:
nclient.authenticate()
context.auth_token = nclient.auth_token
context.service_catalog = \
nclient.service_catalog.catalog['access']['serviceCatalog']
return context
@staticmethod
def _convert_power_specs(power_specs):
"""Internal Helper Method to Convert PowerSpecs to a Dictionary"""
lst = ['created_at', 'updated_at', 'deleted_at', 'deleted', 'instance']
power_specs = jsonutils.to_primitive(power_specs)
#There are certain properties on the PowerSpecs we don't want returned
for attr in lst:
if attr in power_specs:
power_specs.pop(attr, None)
return power_specs
###################################################
#### Conductor Manager Adapter Extension Class ####
###################################################
class _PowerVCConductorManagerProxy(object):
"""Adapter Class to extend Remote Methods for the Conductor"""
target = messaging.Target(version='2.0')
def __init__(self, manager):
"""Construct a Class to extend Remote Methods for the Conductor"""
self.manager = manager
def notify_unmanage(self, context, host, mgmt_ip):
"""Notifies this Management System to remove Host Management"""
return self.manager.notify_unmanage(context, host, mgmt_ip)
###################################################
######### Override Live Migration Task ########
###################################################
#Since the default RPC timeout in OpenStack is way too low in many cases for
#migrations, Paxes is extending a few of the RPC API call methods to set a
#larger timeout (based on a configuration property). This works in most places
#in the Compute Manager since we specify the Paxes RPC API there, but for
#the check_can_live_migrate_destination call in the live_migration module this
#directly constructs its own RPC API, so it doesn't pick up the override.
#
#We need to have a better solution for 2Q14 (larger default, OSCE increasing
#timeout, etc), but temporarily for 1.2 FP1 are putting in a change to inject
#our own extended LiveMigrationTask class that only overrides the RPC API
#used. This will inject our own execute method that just constructs this Task.
class _Extended_LiveMigrationTask(live_migrate.LiveMigrationTask):
"""Extends the default Live Migration Task to override the RPC API used"""
def __init__(self, context, instance, destination,
block_migration, disk_over_commit):
"""Constructor for the PowerVC extension to Live Migration Task"""
super(_Extended_LiveMigrationTask, self).\
__init__(context, instance, destination,
block_migration, disk_over_commit)
#Use our own extended RPC API instead of the default
self.compute_rpcapi = compute_api.PowerVCComputeRPCAPI()
def _extended_live_migrate_execute(context, instance, destination,
block_migration, disk_over_commit):
"""Overrides the default live_migrate execute to use our extended Task"""
task = _Extended_LiveMigrationTask(context, instance, destination,
block_migration, disk_over_commit)
return task.execute()
#Inject our own overwritten live migrate execute function instead
setattr(live_migrate, 'execute', _extended_live_migrate_execute)
|
# -*- coding: utf-8 -*-
"""
Created on Tue Oct 30 13:50:41 2018
@author: Caio Hamamura
It generates the radar-boxplot
"""
import matplotlib.pyplot as plt
import math
import numpy as np
def radarboxplot(x, y, colNames, plotMedian=False, **kwargs):
nrows = kwargs.get("nrows")
ncols = kwargs.get("ncols")
# Standardize between [0.1, 1]
N = x.shape[1]
minVal = np.min(x, 0)
maxVal = np.max(x, 0)
standardized = 0.1+0.9*(x-minVal)/(maxVal-minVal)
# Calculate angles for each variable and
# repeat to close polygons
angles = [n / float(N) * 2 * math.pi for n in range(N)]
angles += angles[:1]
classes = np.unique(y)
if not(nrows and ncols):
nClasses = len(classes)
nrows = ncols = math.ceil(math.sqrt(nClasses))
axs = []
fig = plt.figure()
for i in range(len(classes)):
class_i = classes[i]
values = standardized[y == class_i]
quantiles = np.quantile(values, axis=0, q=[0.25, 0.5, 0.75])
q25 = quantiles[0]
q50 = quantiles[1]
q75 = quantiles[2]
qDiff = q75-q25
outTop = values > (q75+qDiff*1.5)
outBot = values < (q25-qDiff*1.5)
tops = np.ma.array(values, mask=outTop)
bots = np.ma.array(values, mask=outBot)
q100 = np.max(tops, 0)
q0 = np.min(bots, 0)
q100 = np.append(q100, q100[0])
q0 = np.append(q0, q0[0])
q25 = np.append(q25, q25[0])
q50 = np.append(q50, q50[0])
q75 = np.append(q75, q75[0])
bots = values[outBot]
botsAlpha = (np.array(angles[:-1])*outBot)[outBot]
tops = values[outTop]
topsAlpha = (np.array(angles[:-1])*outTop)[outTop]
axs.append(__generate_plot__(angles, q25, q50, q75, q0, q100, topsAlpha,
tops, botsAlpha, bots, i+1, class_i, colNames,
plotMedian, fig, nrows, ncols, kwargs))
return (fig, axs)
def __generate_plot__(angles, q25, q50, q75, q0, q100, topsAlpha, tops,
botsAlpha, bots, row, title, categories, plotMedian, fig,
nrows, ncols, kwargs):
# Check if there is color kwargs:
color = kwargs.get("color")
if not color:
color = ['r', 'b', 'black']
col1 = color[0]
col2 = color[1]
col3 = "black"
if len(color) > 2:
col3 = color[2]
# Initialize polar subplot
ax = plt.subplot(nrows, ncols, row, polar=True)
plt.title(title, y=1.16)
ax.set_theta_offset(math.pi / 2)
ax.set_theta_direction(-1)
# Draw one axis per variable + add labels labels yet
plt.xticks(angles[:-1], categories, color='grey', size=7)
plt.subplots_adjust(hspace=0.6, wspace=0.6)
# Draw ylabels
ax.set_rlabel_position(0)
ax.set_theta_offset(math.pi / 2)
ax.set_theta_direction(-1)
plt.yticks([0.25, 0.5, 0.75], ["", "", ""], color="grey", size=7)
plt.ylim(0, 1)
# Fill area
plt.fill_between(angles, q25, q75, color=col1, lw=0, alpha=0.7)
plt.fill_between(angles, q75, q100, color=col2, lw=0, alpha=0.4)
plt.fill_between(angles, q25, q0, color=col2, lw=0, alpha=0.4)
if plotMedian:
plt.polar(angles, q50, lw=.5, color=col3)
plt.polar(angles, q25, lw=1, color=col1)
plt.polar(angles, q75, lw=1, color=col1)
plt.polar(topsAlpha, tops, markeredgecolor="black", marker="o",
markerfacecolor="none", linewidth=0, markersize=3)
plt.polar(botsAlpha, bots, markeredgecolor="black", marker="o",
markerfacecolor="none", linewidth=0, markersize=3)
return ax
|
from flask import request
from app import api
from app.main.service.log_service import LogService
from app.main.service.user_group_service import UserGroupService
from app.main.util.auth_utils import Auth
from app.main.util.constants import Constants
from app.main.util.response_utils import ResponseUtils
_logger = LogService.get_instance()
_user_group_service_instance = UserGroupService.get_instance()
@api.route('/hubs/<product_key>/user-groups', methods=['POST'])
def create_user_group(product_key: str):
auth_header = request.headers.get('Authorization')
error_message, user_info = Auth.get_user_info_from_auth_header(auth_header)
response_message, status, request_dict = ResponseUtils.get_request_data(
request=request,
data_keys=['groupName', 'password']
)
if error_message is None:
if status is None:
group_name = request_dict['groupName']
password = request_dict['password']
if not user_info['is_admin']:
result = _user_group_service_instance.create_user_group_in_device_group(
product_key,
group_name,
password,
user_info['user_id'])
else:
result = Constants.RESPONSE_MESSAGE_USER_DOES_NOT_HAVE_PRIVILEGES
else:
result = response_message
else:
result = error_message
return ResponseUtils.create_response(
result=result,
success_message=Constants.RESPONSE_MESSAGE_CREATED,
product_key=product_key,
is_logged=True,
payload=request_dict
)
@api.route('/hubs/<product_key>/user-groups', methods=['GET'])
def get_list_of_user_groups(product_key: str):
auth_header = request.headers.get('Authorization')
error_message, user_info = Auth.get_user_info_from_auth_header(auth_header)
result_values = None
if error_message is None:
result, result_values = _user_group_service_instance.get_list_of_user_groups(
product_key,
user_info['user_id'],
user_info['is_admin']
)
else:
result = error_message
return ResponseUtils.create_response(
result=result,
result_values=result_values,
product_key=product_key,
is_logged=True
)
@api.route('/hubs/<product_key>/user-groups/<user_group_name>', methods=['DELETE'])
def delete_user_group(product_key: str, user_group_name: str):
auth_header = request.headers.get('Authorization')
error_message, user_info = Auth.get_user_info_from_auth_header(auth_header)
if error_message is None:
result = _user_group_service_instance.delete_user_group(
user_group_name,
product_key,
user_info['user_id'],
user_info['is_admin']
)
else:
result = error_message
return ResponseUtils.create_response(
result=result,
product_key=product_key,
is_logged=True
)
@api.route('/hubs/<product_key>/user-groups/<user_group_name>/executive-devices', methods=['GET'])
def get_list_of_executive_devices(product_key: str, user_group_name: str):
auth_header = request.headers.get('Authorization')
error_message, user_info = Auth.get_user_info_from_auth_header(auth_header)
result_values = None
if error_message is None:
result, result_values = _user_group_service_instance.get_list_of_executive_devices(
product_key,
user_group_name,
user_info['user_id']
)
else:
result = error_message
return ResponseUtils.create_response(
result=result,
result_values=result_values,
product_key=product_key,
is_logged=True
)
@api.route('/hubs/<product_key>/user-groups/<user_group_name>/sensors', methods=['GET'])
def get_list_of_sensors(product_key: str, user_group_name: str):
auth_header = request.headers.get('Authorization')
error_message, user_info = Auth.get_user_info_from_auth_header(auth_header)
result_values = None
if error_message is None:
result, result_values = _user_group_service_instance.get_list_of_sensors(
product_key,
user_group_name,
user_info['user_id']
)
else:
result = error_message
return ResponseUtils.create_response(
result=result,
result_values=result_values,
product_key=product_key,
is_logged=True
)
|
import despymisc.miscutils as miscutils
def get_config_vals(archive_info, config, keylist):
"""Search given dicts for specific values.
"""
info = {}
for k, stat in list(keylist.items()):
if archive_info is not None and k in archive_info:
info[k] = archive_info[k]
elif config is not None and k in config:
info[k] = config[k]
elif stat.lower() == 'req':
miscutils.fwdebug(0, 'FMUTILS_DEBUG', '******************************')
miscutils.fwdebug(0, 'FMUTILS_DEBUG', 'keylist = %s' % keylist)
miscutils.fwdebug(0, 'FMUTILS_DEBUG', 'archive_info = %s' % archive_info)
miscutils.fwdebug(0, 'FMUTILS_DEBUG', 'config = %s' % config)
miscutils.fwdie('Error: Could not find required key (%s)' % k, 1, 2)
return info
|
# search vulnerabilities by dock
import sys
from urllib2 import HTTPError, URLError
from lib import bing
from lib import google
from lib import yahoo
bingsearch = bing.Bing()
yahoosearch = yahoo.Yahoo()
class Search:
"""basic search class that can be inherited by other search agents like Google, Yandex"""
pass
class Google(Search):
def search(self, query, pages=10):
"""search and return an array of urls"""
urls = []
try:
for url in google.search(query, start=0, stop=pages):
urls.append(url)
except HTTPError:
exit("[503] Service Unreachable")
except URLError:
exit("[504] Gateway Timeout")
except:
exit("Unknown error occurred")
else:
return urls
class Bing(Search):
def search(self, query, pages=10):
try:
return bingsearch.search(query, stop=pages)
except HTTPError:
exit("[503] Service Unreachable")
except URLError:
exit("[504] Gateway Timeout")
except:
exit("Unknown error occurred")
class Yahoo(Search):
def search(self, query, pages=1):
try:
return yahoosearch.search(query, pages)
except HTTPError:
exit("[503] Service Unreachable")
except URLError:
exit("[504] Gateway Timeout")
except:
exit("Unknown error occurred")
|
#!/usr/bin/python3
"""This module define the conection of DB mysql"""
from os import getenv
from sqlalchemy import create_engine
from sqlalchemy.orm import scoped_session
from sqlalchemy.orm import sessionmaker
from models.base_model import Base
from models.user import User
from models.place import Place
from models.state import State
from models.city import City
from models.amenity import Amenity
from models.review import Review
class DBStorage:
"""DB configuration
Attrs:
__engine, __session
"""
__engine = None
__session = None
def __init__(self):
"""Initialization of the DB engine"""
self.__engine = create_engine('mysql+mysqldb://{}:{}@{}:3306/{}'
.format(getenv("HBNB_MYSQL_USER"),
getenv("HBNB_MYSQL_PWD"),
getenv("HBNB_MYSQL_HOST"),
getenv("HBNB_MYSQL_DB")),
pool_pre_ping=True)
if getenv("HBNB_ENV") == "test":
Base.metadata.drop_all(self.__engine)
def all(self, cls=None):
"""query on the current database session
all objects depending of the class name
"""
objs = []
if not cls:
objs += self.__session.query(State).all()
objs += self.__session.query(City).all()
objs += self.__session.query(User).all()
objs += self.__session.query(Place).all()
objs += self.__session.query(Amenity).all()
objs += self.__session.query(Review).all()
else:
objs += self.__session.query(cls).all()
return {obj.__class__.__name__ + "." + obj.id: obj for obj in objs}
def new(self, obj):
"""add the object to the current database session"""
self.__session.add(obj)
def save(self):
"""commit all changes of the current database session"""
self.__session.commit()
def delete(self, obj=None):
"""delete from the current database session obj if not None"""
self.__session.delete(obj)
def reload(self):
"""create all tables in the database"""
Base.metadata.create_all(self.__engine)
session_fact = sessionmaker(expire_on_commit=False, bind=self.__engine)
self.__session = scoped_session(session_fact)()
|
"""This is a module for extracting data from simtelarray files and
calculate image parameters of the events: Hillas parameters, timing
parameters. They can be stored in HDF5 file. The option of saving the
full camera image is also available.
Usage:
"import calib_dl0_to_dl1"
"""
import os
import logging
import numpy as np
from ctapipe.image import (
hillas_parameters,
tailcuts_clean,
HillasParameterizationError,
)
from ctapipe.utils import get_dataset_path
from ctapipe.io import event_source
from ctapipe.io import HDF5TableWriter
from eventio.simtel.simtelfile import SimTelFile
import math
from . import utils
from .volume_reducer import check_and_apply_volume_reduction
from ..io.lstcontainers import ExtraImageInfo
from ..calib.camera import lst_calibration, load_calibrator_from_config
from ..io import DL1ParametersContainer, standard_config, replace_config
from ..image.muon import analyze_muon_event, tag_pix_thr
from ..image.muon import create_muon_table, fill_muon_event
from ..visualization import plot_calib
from ctapipe.image.cleaning import number_of_islands
import tables
from functools import partial
from ..io import write_simtel_energy_histogram, write_mcheader, write_array_info, global_metadata
from ..io import add_global_metadata, write_metadata, write_subarray_tables
from ..io.io import add_column_table
import pandas as pd
from . import disp
import astropy.units as u
from astropy.table import Table
from astropy.time import Time
from .utils import sky_to_camera
from .utils import unix_tai_to_utc
from ctapipe.instrument import OpticsDescription
from traitlets.config.loader import Config
from ..calib.camera.calibrator import LSTCameraCalibrator
from ..calib.camera.r0 import LSTR0Corrections
from ..calib.camera.calib import combine_channels
from ..pointing import PointingPosition
__all__ = [
'get_dl1',
'r0_to_dl1',
]
cleaning_method = tailcuts_clean
filters = tables.Filters(
complevel=5, # enable compression, with level 0=disabled, 9=max
complib='blosc:zstd', # compression using blosc
fletcher32=True, # attach a checksum to each chunk for error correction
bitshuffle=False, # for BLOSC, shuffle bits for better compression
)
def get_dl1(calibrated_event, telescope_id, dl1_container = None,
custom_config = {}, use_main_island = True):
"""
Return a DL1ParametersContainer of extracted features from a calibrated event.
The DL1ParametersContainer can be passed to be filled if created outside the function
(faster for multiple event processing)
Parameters
----------
calibrated_event: ctapipe event container
telescope_id: `int`
dl1_container: DL1ParametersContainer
custom_config: path to a configuration file
configuration used for tailcut cleaning
superseeds the standard configuration
use_main_island: `bool` Use only the main island
to calculate DL1 parameters
Returns
-------
DL1ParametersContainer
"""
config = replace_config(standard_config, custom_config)
cleaning_parameters = config["tailcut"]
dl1_container = DL1ParametersContainer() if dl1_container is None else dl1_container
tel = calibrated_event.inst.subarray.tels[telescope_id]
dl1 = calibrated_event.dl1.tel[telescope_id]
camera = tel.camera
image = dl1.image
pulse_time = dl1.pulse_time
signal_pixels = cleaning_method(camera, image, **cleaning_parameters)
if image[signal_pixels].sum() > 0:
# check the number of islands
num_islands, island_labels = number_of_islands(camera, signal_pixels)
if use_main_island:
n_pixels_on_island = np.zeros(num_islands + 1)
for iisland in range(1, num_islands + 1):
n_pixels_on_island[iisland] = np.sum(island_labels == iisland)
max_island_label = np.argmax(n_pixels_on_island)
signal_pixels[island_labels != max_island_label] = False
hillas = hillas_parameters(camera[signal_pixels], image[signal_pixels])
# Fill container
dl1_container.fill_hillas(hillas)
dl1_container.fill_event_info(calibrated_event)
dl1_container.set_mc_core_distance(calibrated_event, telescope_id)
dl1_container.set_mc_type(calibrated_event)
dl1_container.set_timing_features(camera[signal_pixels],
image[signal_pixels],
pulse_time[signal_pixels],
hillas)
dl1_container.set_leakage(camera, image, signal_pixels)
dl1_container.n_islands = num_islands
dl1_container.set_telescope_info(calibrated_event, telescope_id)
return dl1_container
else:
return None
def r0_to_dl1(input_filename = get_dataset_path('gamma_test_large.simtel.gz'),
output_filename = None,
custom_config = {},
pedestal_path = None,
calibration_path = None,
time_calibration_path = None,
pointing_file_path = None,
ucts_t0_dragon = math.nan,
dragon_counter0 = math.nan,
ucts_t0_tib = math.nan,
tib_counter0 = math.nan
):
"""
Chain r0 to dl1
Save the extracted dl1 parameters in output_filename
Parameters
----------
input_filename: str
path to input file, default: `gamma_test_large.simtel.gz`
output_filename: str
path to output file, default: `./` + basename(input_filename)
custom_config: path to a configuration file
pedestal_path: Path to the DRS4 pedestal file
calibration_path: Path to the file with calibration constants and
pedestals
time_calibration_path: Path to the DRS4 time correction file
pointing_file_path: path to the Drive log with the pointing information
Arguments below are just temporal and will be removed whenever UCTS+EvB
is proved to stably and reliably provide timestamps.
ucts_t0_dragon: first valid ucts_time
dragon_counter0: Dragon counter corresponding to ucts_t0_dragon
ucts_t0_tib: first valid ucts_time for the first valid TIB counter
tib_counter0: first valid TIB counter
Returns
-------
"""
if output_filename is None:
output_filename = (
'dl1_' + os.path.basename(input_filename).rsplit('.',1)[0] + '.h5'
)
if os.path.exists(output_filename):
raise AttributeError(output_filename + ' exists, exiting.')
config = replace_config(standard_config, custom_config)
custom_calibration = config["custom_calibration"]
try:
source = event_source(input_filename, back_seekable=True)
except:
# back_seekable might not be available for other sources that eventio
# TODO for real data: source with calibration file and pointing file
source = event_source(input_filename)
is_simu = source.metadata['is_simulation']
source.allowed_tels = config["allowed_tels"]
if config["max_events"] is not None:
source.max_events = config["max_events"]+1
metadata = global_metadata(source)
write_metadata(metadata, output_filename)
cal_mc = load_calibrator_from_config(config)
# minimum number of pe in a pixel to include it in calculation of muon ring time (peak sample):
min_pe_for_muon_t_calc = 10.
# Dictionary to store muon ring parameters
muon_parameters = create_muon_table()
if not is_simu:
# TODO : add DRS4 calibration config in config file, read it and pass it here
r0_r1_calibrator = LSTR0Corrections(pedestal_path = pedestal_path,
tel_id = 1)
# all this will be cleaned up in a next PR related to the configuration files
r1_dl1_calibrator = LSTCameraCalibrator(calibration_path = calibration_path,
time_calibration_path = time_calibration_path,
extractor_product = config['image_extractor'],
gain_threshold = Config(config).gain_selector_config['threshold'],
config = Config(config),
allowed_tels = [1],
)
# Pulse extractor for muon ring analysis. Same parameters (window_width and _shift) as the one for showers, but
# using GlobalPeakWindowSum, since the signal for the rings is expected to be very isochronous
r1_dl1_calibrator_for_muon_rings = LSTCameraCalibrator(calibration_path = calibration_path,
time_calibration_path = time_calibration_path,
extractor_product = config['image_extractor_for_muons'],
gain_threshold = Config(config).gain_selector_config['threshold'],
config = Config(config),
allowed_tels = [1],)
dl1_container = DL1ParametersContainer()
if pointing_file_path:
# Open drive report
pointings = PointingPosition()
pointings.drive_path = pointing_file_path
drive_data = pointings._read_drive_report()
extra_im = ExtraImageInfo()
extra_im.prefix = '' # get rid of the prefix
event = next(iter(source))
write_array_info(event, output_filename)
### Write extra information to the DL1 file
if is_simu:
write_mcheader(event.mcheader, output_filename, obs_id = event.r0.obs_id,
filters = filters, metadata = metadata)
subarray = event.inst.subarray
with HDF5TableWriter(filename = output_filename,
group_name = 'dl1/event',
mode = 'a',
filters = filters,
add_prefix = True,
# overwrite = True,
) as writer:
print("USING FILTERS: ", writer._h5file.filters)
if is_simu:
# build a mapping of tel_id back to tel_index:
# (note this should be part of SubarrayDescription)
idx = np.zeros(max(subarray.tel_indices) + 1)
for key, val in subarray.tel_indices.items():
idx[key] = val
# the final transform then needs the mapping and the number of telescopes
tel_list_transform = partial(utils.expand_tel_list,
max_tels = len(event.inst.subarray.tel) + 1,
)
writer.add_column_transform(
table_name = 'subarray/trigger',
col_name = 'tels_with_trigger',
transform = tel_list_transform
)
### EVENT LOOP ###
for i, event in enumerate(source):
if i % 100 == 0:
print(i)
event.dl0.prefix = ''
event.mc.prefix = 'mc'
event.trig.prefix = ''
# write sub tables
if is_simu:
write_subarray_tables(writer, event, metadata)
if not custom_calibration:
cal_mc(event)
else:
r0_r1_calibrator.calibrate(event)
r1_dl1_calibrator(event)
# Temporal volume reducer for lstchain - dl1 level must be filled and dl0 will be overwritten.
# When the last version of the method is implemented, vol. reduction will be done at dl0
check_and_apply_volume_reduction(event, config)
# FIXME? This should be eventually done after we evaluate whether the image is
# a candidate muon ring. In that case the full image could be kept, or reduced
# only after the ring analysis is complete.
for ii, telescope_id in enumerate(event.r0.tels_with_data):
tel = event.dl1.tel[telescope_id]
tel.prefix = '' # don't really need one
# remove the first part of the tel_name which is the type 'LST', 'MST' or 'SST'
tel_name = str(event.inst.subarray.tel[telescope_id])[4:]
tel_name = tel_name.replace('-003', '')
if custom_calibration:
lst_calibration(event, telescope_id)
try:
dl1_filled = get_dl1(event, telescope_id,
dl1_container = dl1_container,
custom_config = config,
use_main_island = True)
except HillasParameterizationError:
logging.exception(
'HillasParameterizationError in get_dl1()'
)
continue
if dl1_filled is not None:
# Some custom def
dl1_container.wl = dl1_container.width / dl1_container.length
# Log10(Energy) in GeV
if is_simu:
dl1_container.mc_energy = event.mc.energy.value
dl1_container.log_mc_energy = np.log10(event.mc.energy.value * 1e3)
dl1_container.fill_mc(event)
dl1_container.log_intensity = np.log10(dl1_container.intensity)
dl1_container.gps_time = event.trig.gps_time.value
if not is_simu:
# GPS + WRS + UCTS is now working in its nominal configuration.
# These TS are stored into ucts_time container.
# TS can be alternatively calculated from the TIB and
# Dragon modules counters based on the first valid UCTS TS
# as the reference point. For the time being, the three TS
# are stored in the DL1 files for checking purposes.
ucts_time = event.lst.tel[telescope_id].evt.ucts_timestamp * 1e-9 # secs
module_id = 82 # Get counters from the central Dragon module
if math.isnan(ucts_t0_dragon) and math.isnan(dragon_counter0) \
and math.isnan(ucts_t0_tib) and math.isnan(tib_counter0):
# Dragon/TIB timestamps not based on a valid absolute reference timestamp
dragon_time = (event.lst.tel[telescope_id].svc.date +
event.lst.tel[telescope_id].evt.pps_counter[module_id] +
event.lst.tel[telescope_id].evt.tenMHz_counter[module_id] * 10**(-7))
tib_time = (event.lst.tel[telescope_id].svc.date +
event.lst.tel[telescope_id].evt.tib_pps_counter +
event.lst.tel[telescope_id].evt.tib_tenMHz_counter * 10**(-7))
else:
# Dragon/TIB timestamps based on a valid absolute reference timestamp
dragon_time = ((ucts_t0_dragon - dragon_counter0) * 1e-9 + # secs
event.lst.tel[telescope_id].evt.pps_counter[module_id] +
event.lst.tel[telescope_id].evt.tenMHz_counter[module_id] * 10**(-7))
tib_time = ((ucts_t0_tib - tib_counter0) * 1e-9 + # secs
event.lst.tel[telescope_id].evt.tib_pps_counter +
event.lst.tel[telescope_id].evt.tib_tenMHz_counter * 10**(-7))
# FIXME: directly use unix_tai format whenever astropy v4.1 is out
ucts_time_utc = unix_tai_to_utc(ucts_time)
dragon_time_utc = unix_tai_to_utc(dragon_time)
tib_time_utc = unix_tai_to_utc(tib_time)
dl1_container.ucts_time = ucts_time_utc.unix
dl1_container.dragon_time = dragon_time_utc.unix
dl1_container.tib_time = tib_time_utc.unix
# Select the timestamps to be used for pointing interpolation
if config['timestamps_pointing'] == "ucts":
event_timestamps = ucts_time_utc.unix
elif config['timestamps_pointing'] == "dragon":
event_timestamps = dragon_time_utc.unix
elif config['timestamps_pointing'] == "tib":
event_timestamps = tib_time_utc.unix
else:
raise ValueError("The timestamps_pointing option is not a valid one. \
Try ucts (default), dragon or tib.")
if pointing_file_path and event_timestamps > 0:
azimuth, altitude = pointings.cal_pointingposition(event_timestamps, drive_data)
event.pointing[telescope_id].azimuth = azimuth
event.pointing[telescope_id].altitude = altitude
dl1_container.az_tel = azimuth
dl1_container.alt_tel = altitude
else:
dl1_container.az_tel = u.Quantity(np.nan, u.rad)
dl1_container.alt_tel = u.Quantity(np.nan, u.rad)
# Until the TIB trigger_type is fully reliable, we also add
# the ucts_trigger_type to the data
extra_im.ucts_trigger_type = event.lst.tel[telescope_id].evt.ucts_trigger_type
# FIXME: no need to read telescope characteristics like foclen for every event!
foclen = event.inst.subarray.tel[telescope_id].optics.equivalent_focal_length
mirror_area = u.Quantity(event.inst.subarray.tel[telescope_id].optics.mirror_area, u.m**2)
width = np.rad2deg(np.arctan2(dl1_container.width, foclen))
length = np.rad2deg(np.arctan2(dl1_container.length, foclen))
dl1_container.width = width.value
dl1_container.length = length.value
dl1_container.prefix = tel.prefix
extra_im.tel_id = telescope_id
extra_im.num_trig_pix = event.r0.tel[telescope_id].num_trig_pix
extra_im.trigger_time = event.r0.tel[telescope_id].trigger_time
extra_im.trigger_type = event.r0.tel[telescope_id].trigger_type
extra_im.trig_pix_id = event.r0.tel[telescope_id].trig_pix_id
for container in [extra_im, dl1_container, event.r0, tel]:
add_global_metadata(container, metadata)
event.r0.prefix = ''
writer.write(table_name = f'telescope/image/{tel_name}',
containers = [event.r0, tel, extra_im])
writer.write(table_name = f'telescope/parameters/{tel_name}',
containers = [dl1_container, extra_im])
# Muon ring analysis, for real data only (MC is done starting from DL1 files)
if not is_simu:
bad_pixels = event.mon.tel[telescope_id].calibration.unusable_pixels[0]
# Set to 0 unreliable pixels:
image = tel.image*(~bad_pixels)
# process only promising events, in terms of # of pixels with large signals:
if tag_pix_thr(image):
# re-calibrate r1 to obtain new dl1, using a more adequate pulse integrator for muon rings
numsamples = event.r1.tel[telescope_id].waveform.shape[2] # not necessarily the same as in r0!
bad_pixels_hg = event.mon.tel[telescope_id].calibration.unusable_pixels[0]
bad_pixels_lg = event.mon.tel[telescope_id].calibration.unusable_pixels[1]
# Now set to 0 all samples in unreliable pixels. Important for global peak
# integrator in case of crazy pixels! TBD: can this be done in a simpler
# way?
bad_waveform = np.array(([np.transpose(np.array(numsamples*[bad_pixels_hg])),
np.transpose(np.array(numsamples*[bad_pixels_lg]))]))
# print('hg bad pixels:',np.where(bad_pixels_hg))
# print('lg bad pixels:',np.where(bad_pixels_lg))
event.r1.tel[telescope_id].waveform *= ~bad_waveform
r1_dl1_calibrator_for_muon_rings(event)
tel = event.dl1.tel[telescope_id]
image = tel.image*(~bad_pixels)
# Check again: with the extractor for muon rings (most likely GlobalPeakWindowSum)
# perhaps the event is no longer promising (e.g. if it has a large time evolution)
if not tag_pix_thr(image):
good_ring = False
else:
# read geometry from event.inst. But not needed for every event. FIXME?
geom = event.inst.subarray.tel[telescope_id].camera
muonintensityparam, size_outside_ring, muonringparam, good_ring, \
radial_distribution, mean_pixel_charge_around_ring = \
analyze_muon_event(event.r0.event_id, image, geom, foclen,
mirror_area, False, '')
# mirror_area, True, './') # (test) plot muon rings as png files
# Now we want to obtain the waveform sample (in HG and LG) at which the ring light peaks:
bright_pixels_waveforms = event.r1.tel[telescope_id].waveform[:,image>min_pe_for_muon_t_calc,:]
stacked_waveforms = np.sum(bright_pixels_waveforms, axis=-2)
# stacked waveforms from all bright pixels; shape (ngains, nsamples)
hg_peak_sample = np.argmax(stacked_waveforms, axis=-1)[0]
lg_peak_sample = np.argmax(stacked_waveforms, axis=-1)[1]
if good_ring:
fill_muon_event(muon_parameters, good_ring, event.r0.event_id, dragon_time,
muonintensityparam, muonringparam, radial_distribution,
size_outside_ring, mean_pixel_charge_around_ring,
hg_peak_sample, lg_peak_sample)
# writes mc information per telescope, including photo electron image
if is_simu \
and (event.mc.tel[telescope_id].photo_electron_image > 0).any() \
and config['write_pe_image']:
event.mc.tel[telescope_id].prefix = ''
writer.write(table_name = f'simulation/{tel_name}',
containers = [event.mc.tel[telescope_id], extra_im]
)
if is_simu:
### Reconstruct source position from disp for all events and write the result in the output file
for tel_name in ['LST_LSTCam']:
focal = OpticsDescription.from_name(tel_name.split('_')[0]).equivalent_focal_length
dl1_params_key = f'dl1/event/telescope/parameters/{tel_name}'
add_disp_to_parameters_table(output_filename, dl1_params_key, focal)
# Write energy histogram from simtel file and extra metadata
# ONLY of the simtel file has been read until the end, otherwise it seems to hang here forever
if source.max_events is None:
write_simtel_energy_histogram(source, output_filename, obs_id = event.dl0.obs_id,
metadata = metadata)
else:
dir = os.path.dirname(output_filename)
name = os.path.basename(output_filename)
k = name.find('Run')
muon_output_filename = name[0:name.find('LST-')+5] + '.' + \
name[k:k+13] + '.fits'
muon_output_filename = dir+'/'+muon_output_filename.replace("dl1", "muons")
table = Table(muon_parameters)
table.write(muon_output_filename, format='fits', overwrite=True)
def add_disp_to_parameters_table(dl1_file, table_path, focal):
"""
Reconstruct the disp parameters and source position from a DL1 parameters table and write the result in the file
Parameters
----------
dl1_file: HDF5 DL1 file containing the required field in `table_path`:
- mc_alt
- mc_az
- mc_alt_tel
- mc_az_tel
table_path: path to the parameters table in the file
focal: focal of the telescope
"""
df = pd.read_hdf(dl1_file, key = table_path)
source_pos_in_camera = sky_to_camera(df.mc_alt.values * u.rad,
df.mc_az.values * u.rad,
focal,
df.mc_alt_tel.values * u.rad,
df.mc_az_tel.values * u.rad,
)
disp_parameters = disp.disp(df.x.values * u.m,
df.y.values * u.m,
source_pos_in_camera.x,
source_pos_in_camera.y)
with tables.open_file(dl1_file, mode = "a") as file:
tab = file.root[table_path]
add_column_table(tab, tables.Float32Col, 'disp_dx', disp_parameters[0].value)
tab = file.root[table_path]
add_column_table(tab, tables.Float32Col, 'disp_dy', disp_parameters[1].value)
tab = file.root[table_path]
add_column_table(tab, tables.Float32Col, 'disp_norm', disp_parameters[2].value)
tab = file.root[table_path]
add_column_table(tab, tables.Float32Col, 'disp_angle', disp_parameters[3].value)
tab = file.root[table_path]
add_column_table(tab, tables.Float32Col, 'disp_sign', disp_parameters[4])
tab = file.root[table_path]
add_column_table(tab, tables.Float32Col, 'src_x', source_pos_in_camera.x.value)
tab = file.root[table_path]
add_column_table(tab, tables.Float32Col, 'src_y', source_pos_in_camera.y.value)
|
import sys
import time
from PyQt5.QtCore import *
from PyQt5.QtGui import *
from PyQt5.QtWebEngineWidgets import *
from PyQt5.QtWidgets import *
import likeyoubot_license
import likeyoubot_win
import time
def mouseOverOnLink(web, url):
print('mouse over: ', url)
if (
('adclick' in url) or
('googleads' in url) or
('clickmon' in url) or
('doubleclick' in url)
):
web.ads_url = url
else:
web.ads_url = ''
# if web != None and len(url) > 0:
# web.page().acceptNavigationRequest(url, 1000, False)
class WebEnginePage(QWebEnginePage):
def __init__(self, parent=None):
super().__init__(parent)
def acceptNavigationRequest(self, url, _type, isMainFrame):
print('acceptNavigationRequest: ', url, _type, isMainFrame)
if _type == QWebEnginePage.NavigationTypeLinkClicked:
return True
if "https://googleads.g.doubleclick.net/pagead" in str(url):
return True
elif "https://pagead2.googlesyndication.com/pagead/s/cookie_push.html" in str(url):
return True
else:
QDesktopServices.openUrl(QUrl(url))
elif _type == 1000:
QDesktopServices.openUrl(QUrl(url))
return False
else:
return True
return True
class AdsView(QWebEngineView):
def __init__(self, *args, **kwargs):
QWebEngineView.__init__(self, *args, **kwargs)
self.ads_url = ''
self.setPage(WebEnginePage(self))
self.monitorTimer = QTimer()
self.monitorTimer.timeout.connect(self.updateAdsWindow)
self.monitorTimer2 = QTimer()
self.monitorTimer2.timeout.connect(self.updateAdsWindow2)
self.adsHwnd = None
self.beforeWindowList = None
self.afterWindowList = None
self.myTitle = "클릭하면 오늘 하루 동안 보이지 않습니다. 광고 클릭 후 약 10초 동안 창을 닫지 말아주세요. "
self.elapsedTime = 0
self.adsHwndList = None
self.timeClicked = None
self.elapsedTimeTotal = 0
self.timePopup = None
def event(self, e):
# print('EVENT: ', e.type(), len(self.ads_url), QEvent.ChildAdded)
if e.type() == QEvent.ChildAdded and len(self.ads_url) > 0:
win = likeyoubot_win.LYBWin("ads")
self.beforeWindowList = win.getCurrentWindowList()
self.page().acceptNavigationRequest(self.ads_url, 1000, False)
self.timeClicked = time.time()
self.adsHwndList = None
self.monitorTimer.start(500)
return super().event(e)
def updateAdsWindow(self):
print("updateAdsWindow")
self.setWindowTitle(str(15 - int(self.elapsedTime)) + " 초 후에 자동으로 사라집니다." )
win = likeyoubot_win.LYBWin("ads")
self.afterWindowList = win.getCurrentWindowList()
adsHwndList = []
for each_hwnd in self.afterWindowList:
if not each_hwnd in self.beforeWindowList:
adsHwndList.append(each_hwnd)
if len(adsHwndList) > 0:
if self.adsHwnd == None:
self.adsHwnd = adsHwndList[0]
else:
print('OK -- [', win.get_title(adsHwndList[0]), ']')
else:
if self.adsHwnd != None:
print('Not OK. Closed')
self.close()
self.elapsedTime = time.time() - self.timeClicked
if self.elapsedTime > 15:
print('Ads successfully completed')
likeyoubot_license.LYBLicense().update_ads_info()
self.close()
def updateAdsWindow2(self):
self.elapsedTimeTotal = time.time() - self.timePopup
if self.elapsedTimeTotal > 1200:
self.close()
if __name__ == '__main__':
if len(sys.argv) < 2:
sys.exit()
if sys.argv[1] != 'dogfooter':
sys.exit()
app = QApplication(sys.argv)
screen_resolution = app.desktop().screenGeometry()
width, height = screen_resolution.width(), screen_resolution.height()
web = AdsView()
# web.settings().setAttribute(QWebEngineSettings.FullScreenSupportEnabled, True)
web.settings().setAttribute(QWebEngineSettings.ShowScrollBars, False)
# web.settings().setAttribute(QWebEngineSettings.FocusOnNavigationEnabled, True)
# web.setPage(WebEnginePage(web))
web.page().fullScreenRequested.connect(lambda request: request.accept())
web.page().linkHovered.connect(lambda url: mouseOverOnLink(web, url=url))
# web.page().loadFinished.connect(lambda ok: debug_action(ok=ok, web=web))
# web.urlChanged.connect(lambda: debug_change())
baseUrl = "https://www.dogfooter.com"
web.page().load(QUrl(baseUrl))
web.setWindowTitle(web.myTitle)
web.setFixedSize(1020, 260)
web.setGeometry(width - 1020, height - 260 - 30, 1020, 260)
web.show()
web.timePopup = time.time()
web.monitorTimer2.start(2000)
sys.exit(app.exec_()) |
import numpy as np
import matplotlib.pyplot as plt
import utils.utils_filt as utils_filt
import pandas as pd
from tqdm import tqdm
# plt.rc('text', usetex=True)
plt.rc('font', size=11)
plt.rc('font', family='serif')
plt.rc('lines', linewidth=1)
plt.rc('axes', linewidth=1, labelsize=10)
plt.rc('legend', fontsize=10)
###########
# x20
###########
measurement = '13/Ice_x20scan'
signalRAW = np.load(f'{measurement}/full_data.npy')
Df_data = pd.read_pickle(f'{measurement}/log_file.pkl')
xx, yy = np.cumsum(np.array(Df_data['dx'])), np.cumsum(np.array(Df_data['dy']))
Tmin, Tmax, Freqmin, Freqmax, nfft, Freqmax_fft= 0, 3, 20, 100, 15, 100
nfft = int(2**nfft)
t = signalRAW[0,0,:]
index = t>-0.2
signal = signalRAW[:,:,index]
t = signal[0,0,:]
signal[:,1,:] = signal[:,1,:] - np.mean(signalRAW[:,1,np.logical_and(signalRAW[0,0,:]>-0.2, signalRAW[0,0,:]<-0.1)], axis = -1)[:,None]
# signal[:,1,:] = signal[:,1,:]/ np.max(signal[:,1,:], axis=1)[:, None]
dt = abs(t[10]-t[11])
freq =np.arange(nfft)/dt/nfft
FREQMAX_IDX = np.logical_and(freq < Freqmax, freq > Freqmin)
signal_filt = np.asarray([utils_filt.filt(signal[idx, 0, np.logical_and(t>Tmin, t<Tmax)], signal[idx, 1, np.logical_and(t>Tmin, t<Tmax)], Type ='BPF', Freqmin=Freqmin, Freqmax = Freqmax) for idx in tqdm(range(len(Df_data)))])
# print(signal_filt.shape)
PSD = np.array([np.abs(np.fft.fft(signal_filt[idx,1], n = nfft)) for idx in range(len(Df_data))])[:,FREQMAX_IDX]
print(PSD.shape)
print('eeee')
freq2 = freq[FREQMAX_IDX]
PSDmaxIDX = np.argmax(PSD, axis=1)
PSDmax = freq2[PSDmaxIDX]
print(PSDmax.shape)
plt.figure(figsize=(4,3))
plt.imshow(PSDmax.reshape(50,50), extent=[xx.min(), xx.max(), yy.min(), yy.max()], cmap='jet', vmin=20,vmax=50)
plt.xlabel('x (mm)')
plt.ylabel('y (mm)')
plt.colorbar(label='Max Frequency (GHz)')
plt.tight_layout()
plt.savefig('x20scan.png', transparent = True, dpi = 800)
Tmin, Tmax, Freqmin, Freqmax, nfft, Freqmax_fft= -0., 2, 2, 100, 14, 100
nfft = int(2**nfft)
t = signalRAW[0,0,:]
index = t>-0.2
signal = signalRAW[:,:,index]
t = signal[0,0,:]
signal[:,1,:] = signal[:,1,:] - np.mean(signalRAW[:,1,np.logical_and(signalRAW[0,0,:]>-0.2, signalRAW[0,0,:]<-0.1)], axis = -1)[:,None]
# signal[:,1,:] = signal[:,1,:]/ np.max(signal[:,1,:], axis=1)[:, None]
dt = abs(t[10]-t[11])
freq =np.arange(nfft)/dt/nfft
FREQMAX_IDX = np.logical_and(freq < Freqmax, freq > Freqmin)
signal_filt = np.asarray([utils_filt.filt(signal[idx, 0, np.logical_and(t>Tmin, t<Tmax)], signal[idx, 1, np.logical_and(t>Tmin, t<Tmax)], Type ='BPF', Freqmin=Freqmin, Freqmax = Freqmax) for idx in tqdm(range(len(Df_data)))])
# print(signal_filt.shape)
PSD = np.array([np.abs(np.fft.fft(signal_filt[idx,1], n = nfft)) for idx in range(len(Df_data))])[:,FREQMAX_IDX]
print(PSD.shape)
print('eeee')
freq2 = freq[FREQMAX_IDX]
PSDmaxIDX = np.argmax(PSD, axis=1)
PSDmax = freq2[PSDmaxIDX]
print(PSDmax.shape)
plt.figure(figsize=(4,3))
plt.imshow(PSDmax.reshape(50,50), extent=[xx.min(), xx.max(), yy.min(), yy.max()], cmap='jet')
plt.xlabel('x (mm)')
plt.ylabel('y (mm)')
plt.colorbar(label='Max Frequency (GHz)')
plt.tight_layout()
plt.savefig('x20scanlow.png', transparent = True, dpi = 800)
measurement = '12/Scan_ice_4'#'16/scan_icex50'
aa = 46
signalRAW = np.load(f'{measurement}/full_data.npy')
Df_data = pd.read_pickle(f'{measurement}/log_file.pkl')
xx, yy = np.cumsum(np.array(Df_data['dx'])), np.cumsum(np.array(Df_data['dy']))
Tmin, Tmax, Freqmin, Freqmax, nfft, Freqmax_fft= 0, 3, 20, 100, 15, 100
nfft = int(2**nfft)
t = signalRAW[0,0,:]
index = t>-0.2
signal = signalRAW[:,:,index]
t = signal[0,0,:]
signal[:,1,:] = signal[:,1,:] - np.mean(signalRAW[:,1,np.logical_and(signalRAW[0,0,:]>-0.2, signalRAW[0,0,:]<-0.1)], axis = -1)[:,None]
# signal[:,1,:] = signal[:,1,:]/ np.max(signal[:,1,:], axis=1)[:, None]
dt = abs(t[10]-t[11])
freq =np.arange(nfft)/dt/nfft
FREQMAX_IDX = np.logical_and(freq < Freqmax, freq > Freqmin)
signal_filt = np.asarray([utils_filt.filt(signal[idx, 0, np.logical_and(t>Tmin, t<Tmax)], signal[idx, 1, np.logical_and(t>Tmin, t<Tmax)], Type ='BPF', Freqmin=Freqmin, Freqmax = Freqmax) for idx in tqdm(range(len(Df_data)))])
# print(signal_filt.shape)
PSD = np.array([np.abs(np.fft.fft(signal_filt[idx,1], n = nfft)) for idx in range(len(Df_data))])[:,FREQMAX_IDX]
print(PSD.shape)
print('eeee')
freq2 = freq[FREQMAX_IDX]
PSDmaxIDX = np.argmax(PSD, axis=1)
PSDmax = freq2[PSDmaxIDX]
print(PSDmax.shape)
plt.figure(figsize=(4,3))
plt.imshow(PSDmax.reshape(aa,aa), extent=[xx.min(), xx.max(), yy.min(), yy.max()], cmap='jet', vmin=20,vmax=50)
plt.xlabel('x (mm)')
plt.ylabel('y (mm)')
plt.colorbar(label='Max Frequency (GHz)')
plt.tight_layout()
plt.savefig('x50scan.png', transparent = True, dpi = 800)
Tmin, Tmax, Freqmin, Freqmax, nfft, Freqmax_fft= -0., 2, 2, 100, 14, 100
nfft = int(2**nfft)
t = signalRAW[0,0,:]
index = t>-0.2
signal = signalRAW[:,:,index]
t = signal[0,0,:]
signal[:,1,:] = signal[:,1,:] - np.mean(signalRAW[:,1,np.logical_and(signalRAW[0,0,:]>-0.2, signalRAW[0,0,:]<-0.1)], axis = -1)[:,None]
# signal[:,1,:] = signal[:,1,:]/ np.max(signal[:,1,:], axis=1)[:, None]
dt = abs(t[10]-t[11])
freq =np.arange(nfft)/dt/nfft
FREQMAX_IDX = np.logical_and(freq < Freqmax, freq > Freqmin)
signal_filt = np.asarray([utils_filt.filt(signal[idx, 0, np.logical_and(t>Tmin, t<Tmax)], signal[idx, 1, np.logical_and(t>Tmin, t<Tmax)], Type ='BPF', Freqmin=Freqmin, Freqmax = Freqmax) for idx in tqdm(range(len(Df_data)))])
# print(signal_filt.shape)
PSD = np.array([np.abs(np.fft.fft(signal_filt[idx,1], n = nfft)) for idx in range(len(Df_data))])[:,FREQMAX_IDX]
print(PSD.shape)
print('eeee')
freq2 = freq[FREQMAX_IDX]
PSDmaxIDX = np.argmax(PSD, axis=1)
PSDmax = freq2[PSDmaxIDX]
print(PSDmax.shape)
plt.figure(figsize=(4,3))
plt.imshow(PSDmax.reshape(aa,aa), extent=[xx.min(), xx.max(), yy.min(), yy.max()], cmap='jet')
plt.xlabel('x (mm)')
plt.ylabel('y (mm)')
plt.colorbar(label='Max Frequency (GHz)')
plt.tight_layout()
plt.savefig('x50scanlow.png', transparent = True, dpi = 800)
plt.show()
|
import random
import pytest
import cattle
import common
from common import dev # NOQA
from common import SIZE, read_dev, write_dev
def test_basic_rw(dev): # NOQA
for i in range(0, 10):
offset = random.randint(0, SIZE - 256)
length = random.randint(0, 256)
data = common.random_string(length)
common.verify_data(dev, offset, data)
# See also BUG: https://github.com/rancher/longhorn/issues/131
def test_beyond_boundary(dev): # NOQA
# check write at the boundary
data = common.random_string(128)
common.verify_data(dev, SIZE - 128, data)
# out of bounds
with pytest.raises(cattle.ApiError) as err:
write_dev(dev, SIZE, "1")
assert 'EOF' in str(err.value)
with pytest.raises(cattle.ApiError) as err:
read_dev(dev, SIZE, 1)
assert 'EOF' in str(err.value)
# normal writes to verify controller/replica survival
for i in range(0, 10):
offset = random.randint(0, SIZE - 256)
length = random.randint(0, 256)
data = common.random_string(length)
common.verify_data(dev, offset, data)
|
import json
import logging
log = logging.getLogger(__name__)
class Translator:
def __init__(self, bot, langs):
self.bot = bot
self._langs = dict()
self._lang_cache = dict()
for l in langs:
with open(f"src/i18n/{l}.json", "r", encoding="utf8", errors="ignore") as f:
self._langs[l] = json.load(f)
log.info(f"Loaded strings for language {l}")
def translate(self, guild, key, _emote=None, **kwargs):
if not guild.id in self._lang_cache:
try:
lang = self.bot.db.configs.get(guild.id, "lang")
except KeyError:
self.bot.db.configs.update(f"{guild.id}", "lang", "en_US")
lang = "en_US"
self._lang_cache[guild.id] = lang
else:
lang = self._lang_cache[guild.id]
global string
try:
string = self._langs[lang][key]
except KeyError:
string = self._langs["en_US"][key]
finally:
if "{emote}" in string:
return str(string).format(emote=str(self.bot.emotes.get(_emote)), **kwargs)
else:
return str(string).format(**kwargs) |
# Contributed by Peter Burgers
# The matplotlib.numerix package sneaks these imports in under the radar:
hiddenimports = [
'fft',
'linear_algebra',
'random_array',
'ma',
'mlab',
]
|
from argparse import ArgumentParser
from logging import getLogger
from multiprocessing import Process
from crosscompute.exceptions import (
CrossComputeConfigurationError,
CrossComputeError)
from crosscompute.routines.automation import Automation
from crosscompute.routines.log import (
configure_argument_parser_for_logging,
configure_logging_from)
from crosscompute.scripts.configure import (
configure_argument_parser_for_configuring,
configure_with)
from crosscompute.scripts.run import (
configure_argument_parser_for_running,
run_with)
from crosscompute.scripts.serve import (
configure_argument_parser_for_serving,
serve_with)
def do():
a = ArgumentParser()
configure_argument_parser_for_logging(a)
configure_argument_parser_for_launching(a)
configure_argument_parser_for_configuring(a)
configure_argument_parser_for_serving(a)
configure_argument_parser_for_running(a)
args = a.parse_args()
configure_logging_from(args)
launch_mode = get_launch_mode_from(args)
if launch_mode == 'configure':
configure_with(args)
raise SystemExit
automation = get_automation_from(args)
processes = []
if launch_mode in ['serve', 'all']:
processes.append(Process(target=serve_with, args=(automation, args)))
if launch_mode in ['run', 'all']:
processes.append(Process(target=run_with, args=(automation, args)))
try:
for process in processes:
process.start()
for process in reversed(processes):
process.join()
except KeyboardInterrupt:
L.info('waiting for processes to stop')
def configure_argument_parser_for_launching(a):
a.add_argument(
'--configure', dest='is_configure_only', action='store_true',
help='configure only')
a.add_argument(
'--serve', dest='is_serve_only', action='store_true',
help='serve only')
a.add_argument(
'--run', dest='is_run_only', action='store_true',
help='run only')
'''
a.add_argument(
'--debug', dest='is_debug_only', action='store_true',
help='debug only')
'''
def get_launch_mode_from(args):
launch_mode = 'all'
if args.is_configure_only:
launch_mode = 'configure'
elif args.is_run_only:
launch_mode = 'run'
elif args.is_serve_only:
launch_mode = 'serve'
'''
elif args.is_debug_only:
launch_mode = 'debug'
'''
return launch_mode
def get_automation_from(args):
path_or_folder = args.path_or_folder
try:
automation = Automation.load(path_or_folder or '.')
except CrossComputeConfigurationError as e:
L.error(e)
raise SystemExit
except CrossComputeError:
L.info('existing configuration not found; configuring new automation')
print()
path = configure_with(args)
automation = Automation.load(path)
return automation
L = getLogger(__name__)
if __name__ == '__main__':
do()
|
import numpy as np
from tensorflow.keras.layers import Layer, Add, Conv2D, Dropout
from tensorflow.keras.layers import Activation, ELU, LeakyReLU, ReLU
from tensorflow.keras.layers import AveragePooling2D
from tensorflow.keras.layers import BatchNormalization, TimeDistributed
from tensorflow.keras.layers import LayerNormalization
from tensorflow.keras.regularizers import l2
import tensorflow as tf
from .layers import ReflectionPadding2D
class ConvBlock(Layer):
def __init__(self, channels, conv_size=(3,3), time_dist=False,
norm=None, stride=1, activation='relu', padding='same',
order=("conv", "act", "dropout", "norm"), scale_norm=False,
dropout=0
):
super().__init__()
TD = TimeDistributed if time_dist else (lambda x: x)
if padding == 'reflect':
pad = tuple((s-1)//2 for s in conv_size)
self.padding = TD(ReflectionPadding2D(padding=pad))
else:
self.padding = lambda x: x
self.conv = TD(Conv2D(
channels, conv_size,
padding='valid' if padding=='reflect' else padding,
strides=(stride,stride),
))
if activation == 'leakyrelu':
self.act = LeakyReLU(0.2)
elif activation == 'relu':
self.act = ReLU()
elif activation == 'elu':
self.act = ELU()
else:
self.act = Activation(activation)
if norm == "batch":
self.norm = BatchNormalization(momentum=0.95, scale=scale_norm)
elif norm == "layer":
self.norm = LayerNormalization(scale=scale_norm)
else:
self.norm = lambda x: x
if dropout > 0:
self.dropout = Dropout(dropout)
else:
self.dropout = lambda x: x
self.order = order
def call(self, x):
for layer in self.order:
if layer == "conv":
x = self.conv(self.padding(x))
elif layer == "act":
x = self.act(x)
elif layer == "norm":
x = self.norm(x)
elif layer == "dropout":
x = self.dropout(x)
else:
raise ValueError("Unknown layer {}".format(layer))
return x
class ResBlock(Layer):
def __init__(self, channels, **kwargs):
super().__init__()
self.channels = channels
self.stride = kwargs.pop("stride", 1)
time_dist = kwargs.get("time_dist", False)
TD = TimeDistributed if time_dist else (lambda x: x)
if self.stride > 1:
self.pool = TD(AveragePooling2D(
pool_size=(self.stride,self.stride)))
else:
self.pool = lambda x: x
self.proj = TD(Conv2D(self.channels, kernel_size=(1,1)))
self.conv_block_1 = ConvBlock(channels, stride=self.stride, **kwargs)
self.conv_block_2 = ConvBlock(channels, activation='leakyrelu', **kwargs)
self.add = Add()
@tf.function
def call(self, x):
x_in = self.pool(x)
in_channels = int(x.shape[-1])
if in_channels != self.channels:
x_in = self.proj(x_in)
x = self.conv_block_1(x)
x = self.conv_block_2(x)
return self.add([x,x_in])
class GRUResBlock(ResBlock):
def __init__(self, channels, final_activation='sigmoid', **kwargs):
super().__init__(channels, **kwargs)
self.final_act = Activation(final_activation)
def call(self, x):
x_in = x
x_in = self.proj(x)
x = self.conv_block_1(x)
x = self.conv_block_2(x)
x = self.add([x,x_in])
return self.final_act(x)
|
from django.apps import AppConfig
class RecordsConfig(AppConfig):
name = "phandler.records"
def ready(self):
try:
import phandler.records.signals # noqa F401
except ImportError:
pass
|
__author__ = 'Gunawan Ariyanto'
data_barang = {
'111':['Es Krim Walls E', 4500],
'222':['Gelas Mug', 5000],
'333':['Sandal Jepit',7500],
'444':['Kopi Tora Bika Mocca Spesial Grande 700g ', 1500],
'555':['D500 Dispenser Air Sanken', 299900],
'666':['Spidol Snowman',6000],
}
|
from data_science_layer.preprocessing.abstract_pre_processor import AbstractPreProcessor
from sklearn.preprocessing import PolynomialFeatures
class PolynomialFeaturesTransformation(AbstractPreProcessor):
_polynomial_features = None
degree = 2
interaction_only = False
include_bias = True
def fit_transform(self, data, y=None):
self.fit(data, y)
return self.transform(data, y)
def transform(self, data, y=None):
data = self._check_input(data)
output = self._polynomial_features.transform(data)
output = self._check_output(data, output)
return output
def fit(self, data, y=None):
self._polynomial_features = PolynomialFeatures(
degree=self.degree, interaction_only=self.interaction_only, include_bias=self.include_bias)
self._polynomial_features.fit(data)
|
"""Creates some common widgets"""
from tkinter import Event, Grid, Listbox, Misc, Pack, Place, StringVar, Text, Canvas, Tk, Toplevel, Variable, Widget, X, VERTICAL, HORIZONTAL, LEFT, BOTTOM, RIGHT, Y, BOTH, END
from tkinter.constants import ACTIVE, ALL, E, GROOVE, INSERT, N, NW, RIDGE, S, SE, SINGLE, W
from tkinter.ttk import Entry, Frame, Button, Scrollbar
from .arrange import Autogrid
from typing import Any, Callable, Iterable, List, Optional, Sequence, Tuple
from .mixins import Common
from .constants import EXPAND, FILL
__all__ = ['Main', 'Window', 'CommonFrame', 'ModalDialog', 'ScrolledListbox', 'AutoSearchCombobox']
class ScrolledFrame(Frame):
"""
A scrolling frame inside a canvas. Based on tkinter.scrolledtext.ScrolledText
"""
def __init__(self, master: Widget ,**kwargs):
self.container = Frame(master)
self.canvas = Canvas(self.container, relief=None, highlightthickness=0)
self.v_scroll = Scrollbar(self.container, orient=VERTICAL)
self.h_scroll = Scrollbar(self.container, orient=HORIZONTAL)
kwargs.update({'master': self.canvas})
Frame.__init__(self, **kwargs)
self.__layout()
self.__commands()
# Copy geometry methods of self.container without overriding Frame
# methods -- hack!
text_meths = vars(Listbox).keys()
methods = vars(Pack).keys() | vars(Grid).keys() | vars(Place).keys()
methods = methods.difference(text_meths)
for m in methods:
if m[0] != '_' and m != 'config' and m != 'configure':
setattr(self, m, getattr(self.container, m))
def __layout(self):
self.canvas.grid(column=0, row=0, sticky=NW+SE)
self.v_scroll.grid(column=1, row=0, sticky=N+S+E)
self.h_scroll.grid(column=0, row=1, sticky=E+W+S)
self.scrolled_frame = self.canvas.create_window((0,0), window=self, anchor=NW)
def __commands(self):
self.v_scroll.configure(command=self.canvas.yview)
self.h_scroll.configure(command=self.canvas.xview)
self.canvas.configure(yscrollcommand=self.v_scroll.set)
self.canvas.configure(xscrollcommand=self.h_scroll.set)
self.container.bind('<Configure>', self._container_configure_handler)
self.bind('<Configure>', self._self_configure_handler)
def _container_configure_handler(self, event: Event):
self.canvas.configure(
width=event.width - self.v_scroll.winfo_width(),
height=event.height - self.h_scroll.winfo_height()
)
def _self_configure_handler(self, *__):
self.canvas.configure(scrollregion=self.canvas.bbox(ALL))
class ScrolledListbox(Listbox):
"""
A scrolled listbox, based on tkinter.scrolledtext.ScrolledText
"""
def __init__(self, master=None, **kw):
self.frame = Frame(master)
self.vbar = Scrollbar(self.frame)
self.vbar.pack(side=RIGHT, fill=Y)
kw.update({'yscrollcommand': self.vbar.set})
Listbox.__init__(self, self.frame, **kw)
self.pack(side=LEFT, fill=BOTH, expand=True)
self.vbar['command'] = self.yview
# Copy geometry methods of self.frame without overriding Listbox
# methods -- hack!
text_meths = vars(Listbox).keys()
methods = vars(Pack).keys() | vars(Grid).keys() | vars(Place).keys()
methods = methods.difference(text_meths)
for m in methods:
if m[0] != '_' and m != 'config' and m != 'configure':
setattr(self, m, getattr(self.frame, m))
def __str__(self):
return str(self.frame)
class AutoSearchCombobox(Entry):
def __init__(self, master: Widget, values: Optional[Iterable[str]] = None, height: Optional[int]=None, **kwargs):
super().__init__(master, **kwargs)
self._tl = Toplevel(self, takefocus=False, relief=GROOVE, borderwidth=1)
self._tl.wm_overrideredirect(True)
self._lb = ScrolledListbox(self._tl, width=kwargs.pop('width', None), height=height, selectmode=SINGLE)
self.values = values
self._lb.pack(expand=True, fill=BOTH)
self._hide_tl()
self.winfo_toplevel().focus_set()
self.bind('<KeyRelease>', self._handle_keyrelease)
self.bind('<FocusOut>', self._handle_focusout)
self.bind('<KeyPress>', self._handle_keypress)
#toplevel bindings
cfg_handler = self.winfo_toplevel().bind('<Configure>', self._handle_configure, add="+")
self.bind('<Destroy>', lambda __, cfg_handler=cfg_handler: self._unbind_my_configure(cfg_handler))
def _unbind_my_configure(self, cfg_handler):
"""Internal function. Allows for JUST this widget's associated callback. Getting around tkinter bug"""
root_tl = self.winfo_toplevel()
if not cfg_handler:
root_tl.tk.call('bind', self._w, '<Configure>', '')
return
func_callbacks = root_tl.tk.call(
'bind', root_tl._w, '<Configure>', None).split('\n')
new_callbacks = [
l for l in func_callbacks if l[6:6 + len(cfg_handler)] != cfg_handler]
root_tl.tk.call('bind', root_tl._w, '<Configure>', '\n'.join(new_callbacks))
root_tl.deletecommand(cfg_handler)
@property
def values(self):
"""
Gets the values
"""
try:
return self.__values
except AttributeError:
self.values = ()
return self.values
@values.setter
def values(self, values: Optional[Iterable]):
"""
Sorts and sets the values
"""
self.__values = tuple(sorted(values)) if values is not None else tuple()
self._lb.insert(END, *self.values)
self._lb.selection_clear(0, END)
self._lb.selection_set(0)
self._lb.activate(0)
@property
def _lb_current_selection(self) -> str:
"""
Returns the current selection in the listbox
"""
try:
sel = self._lb.curselection()[0]
except IndexError:
return None
return self._lb.get(sel)
def _set_lb_index(self, index):
self._lb.selection_clear(0, END)
self._lb.selection_set(index)
self._lb.activate(index)
self._lb.see(index)
@property
def text_after_cursor(self) -> str:
"""
Gets the entry text after the cursor
"""
contents = self.get()
return contents[self.index(INSERT):]
@property
def dropdown_is_visible(self):
return self._tl.winfo_ismapped()
def _handle_keypress(self, event: Event):
if 'Left' in event.keysym:
if self.dropdown_is_visible:
self._hide_tl()
return 'break'
else:
return
elif (('Right' in event.keysym and self.text_after_cursor == '') or event.keysym in ['Return', 'Tab']) and self.dropdown_is_visible:
#Completion and block next action
self.delete(0, END)
self.insert(0, self._lb_current_selection)
self._hide_tl()
return 'break'
def _handle_keyrelease(self, event: Event):
if 'Up' in event.keysym and self.dropdown_is_visible:
previous_index = self._lb.index(ACTIVE)
new_index = max(0, self._lb.index(ACTIVE) - 1)
self._set_lb_index(new_index)
if previous_index == new_index:
self._hide_tl()
return
if 'Down' in event.keysym:
if self.dropdown_is_visible:
current_index = self._lb.index(ACTIVE)
new_index = min(current_index + 1, self._lb.size() - 1)
self._set_lb_index(new_index)
return 'break'
if not self.dropdown_is_visible and self._lb.size() > 0:
self._show_tl()
if len(event.keysym) == 1 or ('Right' in event.keysym and self.text_after_cursor == '') or event.keysym in ['BackSpace']:
if self.get() != '':
new_values = [value for value in self.values if value.lower(
).startswith(self.get().lower())]
else:
new_values = self.values
self._lb.delete(0, END)
self._lb.insert(END, *new_values)
self._set_lb_index(0)
if self._lb.size() < 1 or self.get() == self._lb_current_selection:
self._hide_tl()
else:
self._show_tl()
def _handle_focusout(self, event: Event):
def cf():
if self.focus_get() != self._tl and self.focus_get() != self._lb:
self._hide_tl()
else:
self.focus_set()
self.after(1, cf)
def _handle_configure(self, event: Event):
if self._tl.winfo_ismapped():
self._update_tl_pos()
def _show_tl(self) -> None:
if self._tl.winfo_ismapped() == False:
self._update_tl_pos()
self._tl.deiconify()
self._tl.attributes("-topmost", True)
def _update_tl_pos(self) -> None:
self._tl.geometry('+{}+{}'.format(self.winfo_rootx(),
self.winfo_rooty() + self.winfo_height() - 1))
def _hide_tl(self) -> None:
self._tl.withdraw()
class Main(Common, Tk):
"""
A main application window
"""
def __new__(cls) -> Any:
cls.__doc__ = Frame.__doc__
return super().__new__(cls)
class Window(Common, Toplevel):
"""
A sub window that is not a dialog, unless you want it to be
"""
class CommonFrame(Common, Frame):
"""A nice Frame to use with common setup methods"""
class ModalDialog(Common, Toplevel):
"""
A modal dialog that demands attention
"""
def __init__(self, master: Widget = None, **kwargs):
"""
Initializes the dialog, instantiate this directly if you don't care about return values
"""
super().__init__(**kwargs)
self.transient(master)
self.withdraw()
self.cancelled = False
self.protocol("WM_DELETE_WINDOW", self._on_cancel)
self.bind('<Escape>', lambda _: self._on_cancel())
def _on_cancel(self):
"""Default behavior is to set self.cancelled = True and destroy the dialog"""
self.cancelled = True
self.destroy()
@classmethod
def show(dialog_class, master: Widget, **kwargs) -> Any:
"""Shows this dialog and waits for finish"""
new = dialog_class(master=master, **kwargs)
new.deiconify()
new.grab_set()
new.focus_set()
new.wait_window()
if (new.cancelled):
return None
return new._return_values()
def _return_values(self):
"""Returns the result of this dialog, if any"""
return None
class Table(CommonFrame):
sort_up = " ▲"
sort_down = " ▼"
col_pack_options = {
FILL: X,
EXPAND: True
}
def __init__(self, master, column_headers, data, **kwargs):
"""
Creates a table
"""
self.column_headers = column_headers
self.data = data
super().__init__(master, **kwargs)
def _create_events(self):
"""Create events"""
self.scrollable_canvas.bind(
"<Configure>",
lambda e: self.scrollable_canvas.configure(
scrollregion=self.scrollable_canvas.bbox("all")
)
)
def _create_vars(self):
"""Create widget variables"""
def _create_widgets(self):
"""Create widgets"""
self.table_frame = Frame(self)
self.scrollable_canvas = Canvas(self.table_frame)
self.x_scroll = Scrollbar(
self, orient=HORIZONTAL, command=self.scrollable_canvas.xview)
self.y_scroll = Scrollbar(
self.table_frame, orient=VERTICAL, command=self.scrollable_canvas.yview)
self.scrollable_canvas.configure(yscrollcommand=self.y_scroll.set,
xscrollcommand=self.x_scroll.set)
self.table = Frame(self.scrollable_canvas)
for header_text in self.column_headers:
widget = Frame(self.table)
button = Button(widget, text=header_text)
button.configure(
command=lambda button=button: self._sort_command(button))
self._create_data_widgets()
def _create_data_widgets(self):
for row in self.data:
for x_index, col_frame in enumerate(self.table.children.values()):
widget = Text(col_frame, width=20, height=1)
widget.insert('1.0', row[x_index])
def _layout_widgets(self):
"""Layout widgets"""
for col_frame in self.table.children.values():
for widget in col_frame.children.values():
widget.pack(**self.col_pack_options)
col_frame.pack(side=LEFT, fill=X, expand=True)
self.x_scroll.pack(side=BOTTOM, fill=X)
self.y_scroll.pack(side=RIGHT, fill=Y)
self.scrollable_canvas.pack(expand=True, fill=BOTH)
self.scrollable_canvas.create_window(
(0, 0), window=self.table, anchor="nw")
self.table_frame.pack(expand=True, fill=BOTH)
def _sort_command(self, button):
"""Event that sorts by the element"""
self.__reset_button_sort_text(except_button=button)
if self.sort_up in button['text']:
button.configure(text=button['text'][:-2] + self.sort_down)
elif self.sort_down in button['text']:
button.configure(text=button['text'][:-2] + self.sort_up)
else:
button.configure(text=button['text'] + self.sort_up)
column_data = [
tuple(enumerate(column.pack_slaves())) for column in self.table.children.values()
]
column_to_sort_by = [
col for col in column_data if col[0][1] == button][0]
sort_kwargs = {
'key': self.__sort_key
}
if self.sort_down in button['text']:
sort_kwargs['reverse'] = True
sorted_column = sorted(column_to_sort_by[1:], **sort_kwargs)
self.__apply_sorting(sorted_column, column_data)
@staticmethod
def __sort_key(row):
text = row[1].get(1.0, END)
try:
return int(text)
except ValueError:
try:
return float(text)
except ValueError:
return text
def __apply_sorting(self, sorted_column, column_data):
for col in self.table.children.values():
for widget in tuple(col.children.values())[1:]:
widget.pack_forget()
index_order = [col[0] for col in sorted_column]
all_sorted_columns = []
for col in [data[1:] for data in column_data]:
all_sorted_columns.append([])
for index in index_order:
found = [t for t in col if t[0] == index][0]
all_sorted_columns[-1].append(found)
found[1].pack(**self.col_pack_options)
self.scrollable_canvas.update_idletasks()
def __reset_button_sort_text(self, except_button=None):
for col_widget in self.table.children.values():
button = tuple(col_widget.children.values())[0]
if button is not except_button:
button.configure(text=button['text'].replace(
self.sort_up, '').replace(self.sort_down, ''))
HeaderRow = Tuple[str, Callable[[Widget], Any]]
class NewTable(CommonFrame):
sort_up = " ▲"
sort_down = " ▼"
def __init__(self, master: Widget, headers: Optional[Tuple[HeaderRow, ...]] = None, **kwargs):
self.__headers = []
self.__sort_keys = []
self.__cell_widgets = []
super().__init__(master=master, **kwargs)
self.headers = headers
@property
def headers(self):
return self.__headers
@headers.deleter
def headers(self):
for button in self.headers:
button.destroy()
self.__headers = []
self.__sort_keys = []
@headers.setter
def headers(self, headers: Tuple[HeaderRow, ]):
self.__headers = []
self.__sort_keys = []
for label, key in headers:
self.__headers.append(self._create_header_button(label))
self.__sort_keys.append(
lambda widget=self.__headers[-1]: key(widget))
self._layout_table_widgets()
@property
def cell_widgets(self) -> List[Widget]:
return self.__cell_widgets
@cell_widgets.setter
def cell_widgets(self, widgets: Sequence[Widget]):
self.__cell_widgets = list(widgets)
self._layout_table_widgets()
def sort_data(self, column_index: int, sort_by: Optional[Callable] = None):
def chunked(sequence: Sequence, chunk_size: int) -> List[Sequence]:
return [sequence[i: i + chunk_size] for i in range(0, len(sequence), chunk_size)]
rows = chunked(self.cell_widgets, len(self.headers))
def sort_key(row):
widget = row[column_index]
return self.__sort_keys[column_index](widget)
if sort_by is not None:
sort_key = sort_by
new_rows = sorted(rows, key=sort_key)
widgets = []
for row in new_rows:
widgets.extend(row)
self.cell_widgets = widgets
def _create_header_button(self, text) -> Button:
button = Button(self.table, text=text)
button.configure(command=lambda index=len(
self.__headers): self.sort_data(index))
return button
def _create_widgets(self):
self.table_frame = Frame(self)
self.scrollable_canvas = Canvas(self.table_frame)
self.x_scroll = Scrollbar(
self, orient=HORIZONTAL, command=self.scrollable_canvas.xview)
self.y_scroll = Scrollbar(
self.table_frame, orient=VERTICAL, command=self.scrollable_canvas.yview)
self.scrollable_canvas.configure(yscrollcommand=self.y_scroll.set,
xscrollcommand=self.x_scroll.set)
self.table = Frame(self.scrollable_canvas)
def _layout_widgets(self):
self.x_scroll.pack(side=BOTTOM, fill=X)
self.y_scroll.pack(side=RIGHT, fill=Y)
self.scrollable_canvas.pack(expand=True, fill=BOTH)
self.scrollable_canvas.create_window(
(0, 0), window=self.table, anchor="nw")
self.table_frame.pack(expand=True, fill=BOTH)
self._layout_table_widgets()
def _layout_table_widgets(self):
# Configure the grid to expand
all_widgets = self.headers + self.cell_widgets
for button, coords in Autogrid((len(self.headers), ), 1).zip_dicts(all_widgets):
button.grid(**coords, sticky=E+W)
|
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import time
from gaiatest import GaiaTestCase
from gaiatest.mocks.mock_contact import MockContact
from marionette.errors import NoSuchElementException
class TestContacts(GaiaTestCase):
_loading_overlay = ('id', 'loading-overlay')
# Header buttons
_done_button_locator = ('id', 'save-button')
_edit_contact_button_locator = ('id', 'edit-contact-button')
_details_back_button_locator = ('id', 'details-back')
# Contact details panel
_contact_name_title = ('id', 'contact-name-title')
_call_phone_number_button_locator = ('id', 'call-or-pick-0')
# New/Edit contact fields
_given_name_field_locator = ('id', 'givenName')
_family_name_field_locator = ('id', 'familyName')
_phone_field_locator = ('id', "number_0")
def setUp(self):
GaiaTestCase.setUp(self)
# launch the Contacts app
self.app = self.apps.launch('Contacts')
self.wait_for_element_not_displayed(*self._loading_overlay)
self.contact = MockContact()
self.data_layer.insert_contact(self.contact)
self.marionette.refresh()
def create_contact_locator(self, contact):
return ('xpath', "//a[descendant::strong[text()='%s']]" % contact)
def test_edit_contact(self):
# https://moztrap.mozilla.org/manage/case/1310/
# First insert a new contact to edit
contact_locator = self.create_contact_locator(self.contact['givenName'])
self.wait_for_element_displayed(*contact_locator)
contact_listing = self.marionette.find_element(*contact_locator)
self.marionette.tap(contact_listing)
self.wait_for_element_displayed(*self._edit_contact_button_locator)
edit_contact = self.marionette.find_element(*self._edit_contact_button_locator)
self.marionette.tap(edit_contact)
# Now we'll update the mock contact and then insert the new values into the UI
self.contact['givenName'] = 'gaia%s' % repr(time.time()).replace('.', '')[10:]
self.contact['familyName'] = "testedit"
self.contact['tel']['value'] = "02011111111"
self.wait_for_element_displayed(*self._given_name_field_locator)
given_name_field = self.marionette.find_element(*self._given_name_field_locator)
given_name_field.clear()
given_name_field.send_keys(self.contact['givenName'])
family_name_field = self.marionette.find_element(*self._family_name_field_locator)
family_name_field.clear()
family_name_field.send_keys(self.contact['familyName'])
tel_field = self.marionette.find_element(*self._phone_field_locator)
tel_field.clear()
tel_field.send_keys(self.contact['tel']['value'])
done_button = self.marionette.find_element(*self._done_button_locator)
self.marionette.tap(done_button)
# Construct a new locator using the edited givenName
edited_contact_locator = self.create_contact_locator(self.contact['givenName'])
details_back_button = self.marionette.find_element(*self._details_back_button_locator)
self.marionette.tap(details_back_button)
# click back into the contact
self.wait_for_element_displayed(*edited_contact_locator)
edited_contact = self.marionette.find_element(*edited_contact_locator)
# Due to a previous issue this will check that the original contact is no longer present
self.assertRaises(NoSuchElementException,
self.marionette.find_element, contact_locator[0], contact_locator[1])
self.assertTrue(edited_contact.is_displayed(),
"Expected the edited contact to be present")
self.marionette.tap(edited_contact)
# Now assert that the values have updated
full_name = self.contact['givenName'] + " " + self.contact['familyName']
self.assertEqual(self.marionette.find_element(*self._contact_name_title).text,
full_name)
self.assertEqual(self.marionette.find_element(*self._call_phone_number_button_locator).text,
self.contact['tel']['value'])
def tearDown(self):
if hasattr(self, 'contact'):
# Have to switch back to Contacts frame to remove the contact
self.marionette.switch_to_frame()
self.marionette.switch_to_frame(self.app.frame)
self.data_layer.remove_contact(self.contact)
GaiaTestCase.tearDown(self)
|
import ply.lex as lex
from qsyasm.error import ParseError
class QsyASMLexer:
tokens = (
'INTEGER',
'FLOAT',
'IDENT',
'COMMA',
'LBRACKET',
'RBRACKET',
'LPAREN',
'RPAREN',
'PLUS',
'MIN',
'DIV',
'POW',
'MUL',
'NEWLINE',
'ADJ'
)
reserved = {
'adj': 'ADJ'
}
t_COMMA = r','
t_LBRACKET = r'\['
t_RBRACKET = r'\]'
t_LPAREN = r'\('
t_RPAREN = r'\)'
t_PLUS = r'\+'
t_MIN = r'-'
t_DIV = r'/'
t_POW = r'\*\*'
t_MUL = r'\*'
t_ignore = '\t\r '
t_ignore_COMMENT = r';.*'
def __init__(self, **kwargs):
self.lexer = lex.lex(module=self, **kwargs)
def t_IDENT(self, t):
r'[a-zA-Z_][a-zA-Z0-9_]*'
t.type = self.reserved.get(t.value, 'IDENT')
return t
def t_INTEGER(self, t):
r'\d+'
t.value = int(t.value)
return t
def t_FLOAT(self, t):
r'\d+\.\d+'
t.value = float(t.value)
return t
def t_NEWLINE(self, t):
r'\n+'
t.lexer.lineno += t.value.count('\n')
return t
def t_error(self, t):
raise ParseError('Unknown token "{}"'.format(t.value[0]), t)
lexer = QsyASMLexer()
|
import sys
from flaskApp import db
from flaskApp.models import User
from flaskApp.error.error_handlers import *
from sqlalchemy import text
import string
class DbSearchOptionUtils(object):
def get_top_results_coursename(param, semester):
#cursor = db.get_db().cursor()
query = " select distinct CNum, CName from (select distinct CNum, CName from Course c, Prof_OH po where c.CSID = po.CSID and c.CName \
<> 'Dissertation Research' and c.CName <> 'PhD Research' and c.CName \
<> 'Independent Study' and c.CName like '%%%s%%' and c.Semester = '%s' and po.DayTime <> ''\
union\
select distinct CNum, CName from Course c, TA_OH t where c.CSID = t.CSID and c.CName \
<> 'Dissertation Research' and c.CName <> 'PhD Research' and c.CName <> \
'Independent Study' and c.CName like '%%%s%%' and c.Semester = '%s' and t.DayTime <> '') as temp limit 5" % (param, semester, param, semester)
sql = text(query)
resTuple = db.engine.execute(sql)
result = []
for item in resTuple:
print(item[1])
dictItem = {"id" : item[0], "Name" : item[1]}
result.append(dictItem)
#cursor.close()
return {"result" : result}
def get_top_results_instructor(param, semester):
query = "select distinct FName, LName, CNum, CName from (select distinct FName, LName, CNum, CName from Professor p,\
Teaches te, Prof_OH po, Course c where \
p.ProfId = te.ProfId and te.CSID = po.CSID and c.CSID = te.CSID and p.LName like '%%%s%%' and c.Semester = '%s' and po.DayTime <> '' \
union\
select distinct FName, LName, CNum, CName from Professor p, Teaches te, TA_OH t, Course c where p.ProfId = te.ProfId and te.CSID = t.CSID \
and te.CSID = c.CSID and\
p.LName like '%%%s%%' and c.Semester = '%s' and t.DayTime <> '') as temp limit 5" % (param, semester, param, semester)
sql = text(query)
resTuple = db.engine.execute(sql)
result = []
for item in resTuple:
print(item[2])
name = str(item[0]).lstrip().rstrip() + ' ' + str(item[1]).lstrip().rstrip()
dictItem = {"id" : item[2], "Instructor" : name, "Name" : item[3]}
result.append(dictItem)
return {"result" : result}
def get_top_results_courseID(param, semester):
query = " select distinct CNum, CName from (select distinct CNum, CName from Course c, Prof_OH po where c.CSID = po.CSID and c.CName \
<> 'Dissertation Research' and c.CName <> 'PhD Research' and c.CName <> 'Independent Study' \
and c.CNum like '%%%s%%' and c.Semester = '%s' and po.DayTime <> ''\
union\
select distinct CNum, CName from Course c, TA_OH t where c.CSID = t.CSID and c.CName \
<> 'Dissertation Research' and c.CName <> 'PhD Research' and c.CName <> 'Independent Study'\
and c.CName like '%%%s%%' and c.Semester = '%s' and t.DayTime <> '') as temp limit 5" % (param, semester, param, semester)
sql = text(query)
resTuple = db.engine.execute(sql)
result = []
for item in resTuple:
print(item[0])
name = str(item[0])
dictItem = {"id" : item[0], "Name" : item[1]}
result.append(dictItem)
return {"result" : result}
def get_top_results_all_courses(param, semester):
query = " select distinct CNum, CName from Course where CName \
<> 'Dissertation Research' and CName <> 'PhD Research' and CName <> 'Independent Study' \
and CNum like '%%%s%%' and Semester = '%s' limit 5" % (param, semester)
sql = text(query)
resTuple = db.engine.execute(sql)
result = []
for item in resTuple:
print(item[0])
name = str(item[0])
dictItem = {"id" : item[0], "Name" : item[1]}
result.append(dictItem)
return {"result" : result}
|
#-*- coding: utf-8 -*-
import os
import pickle
import logging
import HtmlXmlTestRunner_pkg.nosexunit
import HtmlXmlTestRunner_pkg.nosexunit.const as nconst
import HtmlXmlTestRunner_pkg.nosexunit.excepts as nexcepts
# Get a logger
logger = logging.getLogger('%s.%s' % (nconst.LOGGER, __name__))
class Singleton(object):
'''
Singleton implementation
On: http://www.python.org/download/releases/2.2.3/descrintro/
'''
def __new__(cls, *args, **kwds):
'''
Return the instance of the singleton
Call init method if not yet initialized
'''
it = cls.__dict__.get('__it__')
if it is not None:
return it
it = object.__new__(cls)
it.init(cls, *args, **kwds)
cls.__it__ = it
return it
def init(self, *args, **kwds):
'''Initialization on first call'''
pass
def reset(cls):
'''
Reset the instance of the singleton
Call close on the instance
'''
if cls.__dict__.get('__it__') is not None:
it = cls.__it__
cls.__it__ = None
it.close()
reset = staticmethod(reset)
def close(self):
'''Close the instance of the singleton'''
pass
def packages(root, search=False, exclude=nconst.SEARCH_EXCLUDE):
'''Return the package list contained by the folder'''
# Store the list of packages
pkgs = {}
# Check that source folder is not in a package
if os.path.exists(os.path.join(root, nconst.INIT)):
# The root package can't be a part of a package
raise nexcepts.ToolError('following folder can not contain %s file: %s' % (nconst.INIT, root))
# Go threw the folders
for folder, folders, files in os.walk(root):
# Filter on pattern
for bn in exclude:
# Check if pattern in and drop it
if bn in folders: folders.remove(bn)
# Folders to pop
pops = []
# Go threw the folders
for fld in [ os.path.join(folder, fld) for fld in folders ]:
# Check if folder has an __init__
if os.path.exists(os.path.join(fld, nconst.INIT)):
# This is a package, get the full description
entry = package(fld)
# Check if already exists
if pkgs.has_key(entry): logger.warn('package %s already exists in following folder tree: %s' % (entry, root))
# Else add it
else: pkgs[entry] = fld
# Add the folders to pop
else: pops.append(os.path.basename(fld))
# Check if pop
if not search:
# Go threw the folder to pop
for pop in pops: folders.remove(pop)
# Go threw the files
for path in [ os.path.join(folder, fn) for fn in files ]:
# Check if this is a module
if split(path)[1] == 'py' and os.path.basename(path) != nconst.INIT:
# Get the full description
entry = package(path)
# Check if already exists
if pkgs.has_key(entry): logger.warn('module %s already exists in following folder tree: %s' % (entry, root))
# Else add it
else: pkgs[entry] = path
# Return the packages
return pkgs
def package(source):
'''Get the package that contains the source'''
# Check if source exists
if not os.path.exists(source): raise nexcepts.ToolError("source doesn't exists: %s" % source)
# Check if folder contains an __init__
folder = os.path.dirname(source)
# Get the base
base = split(os.path.basename(source))[0]
# Check if exists
if os.path.exists(os.path.join(folder, nconst.INIT)):
# Source is contained in a package
return '%s.%s' % (package(folder), base)
# Source not contained in a folder
else: return base
def split(fn):
'''Return the extension of the provided base file'''
# Get the parts of the file
sf = fn.split('.')
# Get the length
l = len(sf)
# Check if not extension
if l == 1: return (fn, None)
# Else, there is an extension
else:
# Get the last part as extension
ext = sf[-1]
# Join to get base
bn = '.'.join(sf[0:l-1])
# Return the 2-UPLE: base, extension
return (bn, ext)
def save(content, path, binary=False):
'''Save the provided content in a file'''
# Get the mode, here binary
if binary: mode = 'wb'
# Here, textual
else: mode = 'w'
# Open the file
fd = open(path, mode)
# Set the content
fd.write(content)
# Close the file
fd.close()
def load(path, binary=False):
'''Return the content of the file'''
# Get the mode, here binary
if binary: mode = 'rb'
# Here, textual
else: mode = 'r'
# Open the file
fd = open(path, mode)
# Get the content
content = fd.read()
# Close the file
fd.close()
# Return the content
return content
def create(folder):
'''Create a folder'''
# If not exists, create the folders
if not os.path.exists(folder): os.makedirs(folder)
# If exists and is a file, raise
elif os.path.isfile(folder): raise nexcepts.ToolError('following path exists but is not a folder: %s' %folder)
def clean(folder, prefix=None, ext=None):
'''Clean all file with the given extension and/or prefix in specified folder'''
# Check if folder exists
if not os.path.isdir(folder): raise nexcepts.ToolError("folder doesn't exist: %s" % folder)
# Go threw the folder
for bn in os.listdir(folder):
# Get the full path
full = os.path.join(folder, bn)
# Check if file
if os.path.isfile(full) and (not prefix or (prefix and bn.startswith(prefix))) and (not ext or (ext and split(bn)[1] == ext)):
# Clean the file
os.remove(full)
def get_test_id(test):
'''Get the ID of the provided test'''
# Try to call the id
try: return test.id()
# Failed to get it ! Get an unique test entry
except:
# Get the default base
entry = 'nose.nose'
# Check if UUID is available
try:
# Get UUID
import uuid
# Get the value
return '%s.%s' % (entry, uuid.uuid1())
# Else use id(...)
except: return '%s.%s' % (entry, id(test))
def identical(file1, file2):
'''Return True if it is the same file'''
# Check if on UNIX
try: return os.path.samefile(file1, file2)
# On Win32
except: return os.path.normpath(file1) == os.path.normpath(file2)
def on_posix():
'''Return True if run on POSIX platform'''
# Check OS name
return os.name == 'posix'
def extract(package, entry, folder, bn=None, binary=False):
'''Extract file with provided entry from the provided package'''
# Get the content of the entry
content = pkg_resources.resource_string(package, entry)
# Check if base is specified: here yes
if bn: path = os.path.join(folder, bn)
# Here no
else: path = os.path.join(folder, entry)
# Save content
save(content, path, binary=binary)
def kiding(package, entry, folder, bn=None, output='html', **kwarg):
'''Extract file with provided entry from the provided package and process the template'''
# Get the kid package
import kid
# Get the content of the entry
content = pkg_resources.resource_string(package, entry)
# Check if base is specified: here yes
if bn: path = os.path.join(folder, bn)
# Here no
else: path = os.path.join(folder, entry)
# Open the file
fd = open(path, 'w')
# Create the template
kid.Template(content, **kwarg).write(file=fd, output=output)
# Close the file descriptor
fd.close()
def extract_pic_js_css(target):
'''Extract the CSS and the Java Script in the target folder'''
# Get package
import pygments.formatters
# Extract the Java Script
extract(__name__, 'nosexunit.js', target)
# Extract the CSS
extract(__name__, 'nosexunit.css', target)
# Get the images folder
folder = os.path.join(target, 'images')
# Create it
create(folder)
# Get the PNG in it
extract(__name__, 'blank.png', folder, binary=True)
# Get the highlight CSS
save(pygments.formatters.HtmlFormatter().get_style_defs('.highlight'), os.path.join(target, 'highlight.css'), binary=False)
def highlight(content):
'''Highlight source code'''
# Get packages
import pygments
import pygments.lexers
import pygments.formatters
# Get a LEXER
lexer = pygments.lexers.PythonLexer()
# Get a formatter class
class HPyF(pygments.formatters.HtmlFormatter):
'''Class override to avoid > < changing in report'''
def wrap(self, source, outfile): return source
# Get a formatter
formatter = HPyF()
# Highlight
return pygments.highlight(content, lexer, formatter).splitlines()
def exchange(path, data=None):
'''
Load pickle file if `data` is defined
Save data if `data` is not defined
'''
# Check if save of load
if data is not None:
# Get the file descriptor
fd = open(path, 'wb')
# Close the file
pickle.dump(data, fd)
# Close the file
fd.close()
# Load here
else:
# Check that file exists
if not os.path.exists(path): raise nexcepts.ToolError("exchange file doesn't exist: %s" % path)
# Get the file descriptor
fd = open(path, 'rb')
# Load data
data = pickle.load(fd)
# Close the file
fd.close()
# Return data
return data
def expand(environ):
'''Expand paths in environment variables'''
# Check if NoseXUnit in PYTHONPATH
set = False
# Get the path
path = os.path.dirname(os.path.dirname(os.path.abspath(nosexunit.__file__)))
# Go threw the variables
for entry in environ.keys():
# Check if in expanded list
if entry.lower().strip() in ['path', 'ld_library_path', 'libpath', 'shlib_path', 'pythonpath', ]:
# Go threw the path
sections = environ[entry].split(os.pathsep)
# Get the new list
atarashii = []
# Go threw the parts
for section in sections:
# Delete quote if on Win32
if not on_posix(): section = section.replace('"', '')
# Get the right one
section = section.strip()
# Check that useful
if section != '':
# Get the absolute path
atarashii.append(os.path.abspath(section))
# If PYTHONPATH, add NoseXUnit folder
if entry.lower().strip() == 'pythonpath':
# Add folder
atarashii.append(path)
# Set flag
set = True
# Replace entry
environ[entry] = os.pathsep.join(atarashii)
# Check if set
if not set: environ['PYTHONPATH'] = path
# Return dictionary
return environ
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-12-04 04:26
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('agency', '0005_auto_20171127_0139'),
]
operations = [
migrations.RemoveField(
model_name='agency',
name='created',
),
migrations.RemoveField(
model_name='agency',
name='modified',
),
]
|
import re
def default_authority(request):
"""
Return the value of the h.authority config settings.
Falls back on returning request.domain if h.authority isn't set.
"""
return request.registry.settings.get("h.authority", request.domain)
def client_authority(request):
"""
Return the authority associated with an authenticated auth_client or None.
Once a request with an auth_client is authenticated, a principal is set
indicating the auth_client's verified authority
see :func:`~h.auth.util.principals_for_auth_client` for more details on
principals applied when auth_clients are authenticated
:rtype: str or None
"""
for principal in request.effective_principals:
match = re.match(r"^client_authority:(.+)$", principal)
if match and match.group(1):
return match.group(1)
return None
|
# coding: utf-8
from __future__ import unicode_literals
from .common import InfoExtractor
from .youtube import YoutubeIE
from ..utils import (
determine_ext,
int_or_none,
parse_iso8601,
xpath_text,
)
class HeiseIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?heise\.de/(?:[^/]+/)+[^/]+-(?P<id>[0-9]+)\.html'
_TESTS = [{
'url': 'http://www.heise.de/video/artikel/Podcast-c-t-uplink-3-3-Owncloud-Tastaturen-Peilsender-Smartphone-2404147.html',
'md5': 'ffed432483e922e88545ad9f2f15d30e',
'info_dict': {
'id': '2404147',
'ext': 'mp4',
'title': "Podcast: c't uplink 3.3 – Owncloud / Tastaturen / Peilsender Smartphone",
'format_id': 'mp4_720p',
'timestamp': 1411812600,
'upload_date': '20140927',
'description': 'md5:c934cbfb326c669c2bcabcbe3d3fcd20',
'thumbnail': r're:^https?://.*/gallery/$',
}
}, {
# YouTube embed
'url': 'http://www.heise.de/newsticker/meldung/Netflix-In-20-Jahren-vom-Videoverleih-zum-TV-Revolutionaer-3814130.html',
'md5': 'e403d2b43fea8e405e88e3f8623909f1',
'info_dict': {
'id': '6kmWbXleKW4',
'ext': 'mp4',
'title': 'NEU IM SEPTEMBER | Netflix',
'description': 'md5:2131f3c7525e540d5fd841de938bd452',
'upload_date': '20170830',
'uploader': 'Netflix Deutschland, Österreich und Schweiz',
'uploader_id': 'netflixdach',
},
'params': {
'skip_download': True,
},
}, {
'url': 'http://www.heise.de/ct/artikel/c-t-uplink-3-3-Owncloud-Tastaturen-Peilsender-Smartphone-2403911.html',
'only_matching': True,
}, {
'url': 'http://www.heise.de/newsticker/meldung/c-t-uplink-Owncloud-Tastaturen-Peilsender-Smartphone-2404251.html?wt_mc=rss.ho.beitrag.atom',
'only_matching': True,
}, {
'url': 'http://www.heise.de/ct/ausgabe/2016-12-Spiele-3214137.html',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(url, video_id)
title = self._html_search_meta('fulltitle', webpage, default=None)
if not title or title == "c't":
title = self._search_regex(
r'<div[^>]+class="videoplayerjw"[^>]+data-title="([^"]+)"',
webpage, 'title')
yt_urls = YoutubeIE._extract_urls(webpage)
if yt_urls:
return self.playlist_from_matches(yt_urls, video_id, title, ie=YoutubeIE.ie_key())
container_id = self._search_regex(
r'<div class="videoplayerjw"[^>]+data-container="([0-9]+)"',
webpage, 'container ID')
sequenz_id = self._search_regex(
r'<div class="videoplayerjw"[^>]+data-sequenz="([0-9]+)"',
webpage, 'sequenz ID')
doc = self._download_xml(
'http://www.heise.de/videout/feed', video_id, query={
'container': container_id,
'sequenz': sequenz_id,
})
formats = []
for source_node in doc.findall('.//{http://rss.jwpcdn.com/}source'):
label = source_node.attrib['label']
height = int_or_none(self._search_regex(
r'^(.*?_)?([0-9]+)p$', label, 'height', default=None))
video_url = source_node.attrib['file']
ext = determine_ext(video_url, '')
formats.append({
'url': video_url,
'format_note': label,
'format_id': '%s_%s' % (ext, label),
'height': height,
})
self._sort_formats(formats)
description = self._og_search_description(
webpage, default=None) or self._html_search_meta(
'description', webpage)
return {
'id': video_id,
'title': title,
'description': description,
'thumbnail': (xpath_text(doc, './/{http://rss.jwpcdn.com/}image') or
self._og_search_thumbnail(webpage)),
'timestamp': parse_iso8601(
self._html_search_meta('date', webpage)),
'formats': formats,
}
|
import os
import torch
from torch import nn
from torch.utils.data.dataloader import DataLoader
from src.data_process.dataset import LMDataset
from src.train.eval import eval
from src.utils.constants import PAD_INDEX
from src.utils.logger import Logger
def test(args):
os.environ['CUDA_VISIBLE_DEVICES'] = str(args.gpu)
base_path = os.path.join('./data', args.data)
processed_base_path = os.path.join(base_path, 'processed')
processed_test_path = os.path.join(processed_base_path, 'test.npz')
save_path = os.path.join(processed_base_path, 'rnnlm.pkl')
log_base_path = os.path.join(base_path, 'log')
log_path = os.path.join(log_base_path, 'test_log.txt')
logger = Logger(log_path)
test_data = LMDataset(processed_test_path)
test_loader = DataLoader(
dataset=test_data,
batch_size=args.batch_size,
shuffle=False,
pin_memory=True
)
model = torch.load(save_path)
model = model.cuda()
criterion = nn.CrossEntropyLoss(ignore_index=PAD_INDEX)
test_loss, test_ppl = eval(model, test_loader, criterion)
logger.log('test_loss: %.4f\ttest_ppl: %.4f' % (test_loss, test_ppl)) |
import os
import time
from prompt_toolkit.formatted_text import FormattedText
from iredis import renders
from iredis.config import config
from iredis.completers import IRedisCompleter
def strip_formatted_text(formatted_text):
return "".join(text[1] for text in formatted_text)
def test_render_simple_string_raw_using_raw_render():
assert renders.OutputRender.render_raw(b"OK") == b"OK"
assert renders.OutputRender.render_raw(b"BUMPED 1") == b"BUMPED 1"
assert renders.OutputRender.render_raw(b"STILL 1") == b"STILL 1"
def test_render_simple_string():
assert renders.OutputRender.render_simple_string(b"OK") == FormattedText(
[("class:success", "OK")]
)
assert renders.OutputRender.render_simple_string(b"BUMPED 1") == FormattedText(
[("class:success", "BUMPED 1")]
)
assert renders.OutputRender.render_simple_string(b"STILL 1") == FormattedText(
[("class:success", "STILL 1")]
)
def test_render_list_index():
raw = ["hello", "world", "foo"]
out = renders._render_list([item.encode() for item in raw], raw)
out = strip_formatted_text(out)
assert isinstance(out, str)
assert "3)" in out
assert "1)" in out
assert "4)" not in out
def test_render_list_index_const_width():
raw = ["hello"] * 100
out = renders._render_list([item.encode() for item in raw], raw)
out = strip_formatted_text(out)
assert isinstance(out, str)
assert " 1)" in out
assert "\n100)" in out
raw = ["hello"] * 1000
out = renders._render_list([item.encode() for item in raw], raw)
out = strip_formatted_text(out)
assert " 1)" in out
assert "\n 999)" in out
assert "\n1000)" in out
raw = ["hello"] * 10
out = renders._render_list([item.encode() for item in raw], raw)
out = strip_formatted_text(out)
assert " 1)" in out
assert "\n 9)" in out
assert "\n10)" in out
def test_render_list_using_raw_render():
raw = ["hello", "world", "foo"]
out = renders.OutputRender.render_raw([item.encode() for item in raw])
assert b"hello\nworld\nfoo" == out
def test_render_list_with_nil_init():
raw = [b"hello", None, b"world"]
out = renders.OutputRender.render_list(raw)
out = strip_formatted_text(out)
assert out == '1) "hello"\n2) (nil)\n3) "world"'
def test_render_list_with_nil_init_while_config_raw():
raw = [b"hello", None, b"world"]
out = renders.OutputRender.render_raw(raw)
assert out == b"hello\n\nworld"
def test_render_list_with_empty_list_raw():
raw = []
out = renders.OutputRender.render_raw(raw)
assert out == b""
def test_render_list_with_empty_list():
raw = []
out = renders.OutputRender.render_list(raw)
out = strip_formatted_text(out)
assert out == "(empty list or set)"
def test_ensure_str_bytes():
assert renders.ensure_str(b"hello world") == r"hello world"
assert renders.ensure_str(b"hello'world") == r"hello'world"
assert renders.ensure_str("你好".encode()) == r"\xe4\xbd\xa0\xe5\xa5\xbd"
def test_double_quotes():
assert renders.double_quotes('hello"world') == r'"hello\"world"'
assert renders.double_quotes('"hello\\world"') == '"\\"hello\\world\\""'
assert renders.double_quotes("'") == '"\'"'
assert renders.double_quotes("\\") == '"\\"'
assert renders.double_quotes('"') == '"\\""'
def test_render_int():
config.raw = False
assert renders.OutputRender.render_int(12) == FormattedText(
[("class:type", "(integer) "), ("", "12")]
)
def test_render_int_raw():
assert renders.OutputRender.render_raw(12) == b"12"
def test_render_list_or_string():
config.raw = False
assert renders.OutputRender.render_list_or_string("") == '""'
assert renders.OutputRender.render_list_or_string("foo") == '"foo"'
assert renders.OutputRender.render_list_or_string(
[b"foo", b"bar"]
) == FormattedText(
[
("", "1)"),
("", " "),
("class:string", '"foo"'),
("", "\n"),
("", "2)"),
("", " "),
("class:string", '"bar"'),
]
)
def test_list_or_string():
config.raw = False
assert renders.OutputRender.render_string_or_int(b"10.1") == '"10.1"'
assert renders.OutputRender.render_string_or_int(3) == FormattedText(
[("class:type", "(integer) "), ("", "3")]
)
def test_command_keys():
completer = IRedisCompleter()
completer.key_completer.words = []
config.raw = False
rendered = renders.OutputRender.command_keys([b"cat", b"dog", b"banana"])
completer.update_completer_for_response("KEYS", None, [b"cat", b"dog", b"banana"])
assert rendered == FormattedText(
[
("", "1)"),
("", " "),
("class:key", '"cat"'),
("", "\n"),
("", "2)"),
("", " "),
("class:key", '"dog"'),
("", "\n"),
("", "3)"),
("", " "),
("class:key", '"banana"'),
]
)
assert completer.key_completer.words == ["banana", "dog", "cat"]
def test_command_scan():
completer = IRedisCompleter()
completer.key_completer.words = []
config.raw = False
rendered = renders.OutputRender.command_scan(
[b"44", [b"a", b"key:__rand_int__", b"dest", b" a"]]
)
completer.update_completer_for_response(
"SCAN", ("0",), [b"44", [b"a", b"key:__rand_int__", b"dest", b" a"]]
)
assert rendered == FormattedText(
[
("class:type", "(cursor) "),
("class:integer", "44"),
("", "\n"),
("", "1)"),
("", " "),
("class:key", '"a"'),
("", "\n"),
("", "2)"),
("", " "),
("class:key", '"key:__rand_int__"'),
("", "\n"),
("", "3)"),
("", " "),
("class:key", '"dest"'),
("", "\n"),
("", "4)"),
("", " "),
("class:key", '" a"'),
]
)
assert completer.key_completer.words == [" a", "dest", "key:__rand_int__", "a"]
def test_command_sscan():
completer = IRedisCompleter()
completer.member_completer.words = []
rendered = renders.OutputRender.command_sscan(
[b"44", [b"a", b"member:__rand_int__", b"dest", b" a"]]
)
completer.update_completer_for_response(
"SSCAN", (0), [b"44", [b"a", b"member:__rand_int__", b"dest", b" a"]]
)
assert rendered == FormattedText(
[
("class:type", "(cursor) "),
("class:integer", "44"),
("", "\n"),
("", "1)"),
("", " "),
("class:member", '"a"'),
("", "\n"),
("", "2)"),
("", " "),
("class:member", '"member:__rand_int__"'),
("", "\n"),
("", "3)"),
("", " "),
("class:member", '"dest"'),
("", "\n"),
("", "4)"),
("", " "),
("class:member", '" a"'),
]
)
assert completer.member_completer.words == [
" a",
"dest",
"member:__rand_int__",
"a",
]
def test_command_sscan_config_raw():
completer = IRedisCompleter()
completer.member_completer.words = []
rendered = renders.OutputRender.render_raw(
[b"44", [b"a", b"member:__rand_int__", b"dest", b" a"]]
)
completer.update_completer_for_response(
"SSCAN", (0), [b"44", [b"a", b"member:__rand_int__", b"dest", b" a"]]
)
assert rendered == b"44\na\nmember:__rand_int__\ndest\n a"
assert completer.member_completer.words == [
" a",
"dest",
"member:__rand_int__",
"a",
]
def test_render_members():
completer = IRedisCompleter()
completer.member_completer.words = []
config.withscores = True
resp = [b"duck", b"667", b"camel", b"708"]
rendered = renders.OutputRender.render_members(resp)
completer.update_completer_for_response("ZRANGE", ("foo", "0", "-1"), resp)
assert rendered == FormattedText(
[
("", "1)"),
("", " "),
("class:integer", "667 "),
("class:member", '"duck"'),
("", "\n"),
("", "2)"),
("", " "),
("class:integer", "708 "),
("class:member", '"camel"'),
]
)
assert completer.member_completer.words == ["camel", "duck"]
def test_render_members_config_raw():
completer = IRedisCompleter()
completer.member_completer.words = []
config.withscores = True
resp = [b"duck", b"667", b"camel", b"708"]
rendered = renders.OutputRender.render_raw(resp)
completer.update_completer_for_response("ZRANGE", (), resp)
assert rendered == b"duck\n667\ncamel\n708"
assert completer.member_completer.words == ["camel", "duck"]
def test_render_unixtime_config_raw():
# fake the timezone and reload
os.environ["TZ"] = "Asia/Shanghai"
time.tzset()
rendered = renders.OutputRender.render_unixtime(1570469891)
assert rendered == FormattedText(
[
("class:type", "(integer) "),
("", "1570469891"),
("", "\n"),
("class:type", "(local time)"),
("", " "),
("", "2019-10-08 01:38:11"),
]
)
def test_render_unixtime():
rendered = renders.OutputRender.render_raw(1570469891)
assert rendered == b"1570469891"
def test_bulk_string_reply():
assert renders.OutputRender.render_bulk_string(b"'\"") == '''"'\\""'''
def test_bulk_string_reply_raw():
assert renders.OutputRender.render_raw(b"hello") == b"hello"
def test_render_bulk_string_decoded():
EXPECTED_RENDER = """# Server\nredis_version:5.0.5\nredis_git_sha1:00000000\nredis_git_dirty:0\nredis_build_id:31cd6e21ec924b46""" # noqa
_input = b"# Server\r\nredis_version:5.0.5\r\nredis_git_sha1:00000000\r\nredis_git_dirty:0\r\nredis_build_id:31cd6e21ec924b46" # noqa
assert renders.OutputRender.render_bulk_string_decode(_input) == EXPECTED_RENDER
def test_render_bulk_string_decoded_with_decoded_utf8():
EXPECTED_RENDER = """# Server\nredis_version:5.0.5\nredis_git_sha1:00000000\nredis_git_dirty:0\nredis_build_id:31cd6e21ec924b46""" # noqa
_input = "# Server\r\nredis_version:5.0.5\r\nredis_git_sha1:00000000\r\nredis_git_dirty:0\r\nredis_build_id:31cd6e21ec924b46" # noqa
assert renders.OutputRender.render_bulk_string_decode(_input) == EXPECTED_RENDER
def test_render_time():
value = [b"1571305643", b"765481"]
assert renders.OutputRender.render_time(value) == FormattedText(
[
("class:type", "(unix timestamp) "),
("", "1571305643"),
("", "\n"),
("class:type", "(millisecond) "),
("", "765481"),
("", "\n"),
("class:type", "(convert to local timezone) "),
("", "2019-10-17 17:47:23.765481"),
]
)
assert renders.OutputRender.render_raw(value) == b"1571305643\n765481"
def test_render_nested_pairs():
text = [
b"peak.allocated",
10160336,
b"lua.caches",
0,
b"db.0",
[b"overhead.hashtable.main", 648, b"overhead.hashtable.expires", 32],
b"db.1",
[b"overhead.hashtable.main", 112, b"overhead.hashtable.expires", 32],
b"fragmentation",
b"0.062980629503726959",
b"fragmentation.bytes",
-9445680,
]
assert renders.OutputRender.render_raw(text) == (
b"peak.allocated\n10160336\nlua.caches\n0\ndb.0\noverhead.hashtable.main\n64"
b"8\noverhead.hashtable.expires\n32\ndb.1\noverhead.hashtable.main\n112\nove"
b"rhead.hashtable.expires\n32\nfragmentation\n0.062980629503726959\nfragmentat"
b"ion.bytes\n-9445680"
)
assert renders.OutputRender.render_nested_pair(text) == FormattedText(
[
("class:string", "peak.allocated: "),
("class:value", "10160336"),
("", "\n"),
("class:string", "lua.caches: "),
("class:value", "0"),
("", "\n"),
("class:string", "db.0: "),
("", "\n"),
("class:string", " overhead.hashtable.main: "),
("class:value", "648"),
("", "\n"),
("class:string", " overhead.hashtable.expires: "),
("class:value", "32"),
("", "\n"),
("class:string", "db.1: "),
("", "\n"),
("class:string", " overhead.hashtable.main: "),
("class:value", "112"),
("", "\n"),
("class:string", " overhead.hashtable.expires: "),
("class:value", "32"),
("", "\n"),
("class:string", "fragmentation: "),
("class:value", "0.062980629503726959"),
("", "\n"),
("class:string", "fragmentation.bytes: "),
("class:value", "-9445680"),
]
)
def test_render_nested_list():
text = [[b"get", 2, [b"readonly", b"fast"], 1, 1, 1]]
assert renders.OutputRender.render_list(text) == FormattedText(
[
("", "1)"),
("", " "),
("", "1)"),
("", " "),
("class:string", '"get"'),
("", "\n"),
("", " "),
("", "2)"),
("", " "),
("class:string", '"2"'),
("", "\n"),
("", " "),
("", "3)"),
("", " "),
("", "1)"),
("", " "),
("class:string", '"readonly"'),
("", "\n"),
("", " "),
("", "2)"),
("", " "),
("class:string", '"fast"'),
("", "\n"),
("", " "),
("", "4)"),
("", " "),
("class:string", '"1"'),
("", "\n"),
("", " "),
("", "5)"),
("", " "),
("class:string", '"1"'),
("", "\n"),
("", " "),
("", "6)"),
("", " "),
("class:string", '"1"'),
]
)
def test_render_bytes(config):
assert renders.OutputRender.render_bytes(b"bytes\n") == b"bytes"
def test_render_bytes_raw(config):
assert renders.OutputRender.render_raw(b"bytes\n") == b"bytes\n"
|
#! -*- coding: UTF-8 -*-
"""
Authentication module created by ma0 at contraslash.com
""" |
"""
fit motor circle task with external data (not simulated)
"""
import sys, os
import numpy as np
import pandas as pd
import stan
import arviz as az
import seaborn as sns
sys.path.append('.')
from simulations.sim_generalise_gs import generalise_gs_preprocess_func
from data_fit.fit_bandit3arm_combined import comp_hdi_mean_data
from data_fit.fit_bandit3arm_combined import plot_violin_params_mean
def extract_ind_results(df,pars_ind,data_dict):
out_col_names = []
out_df = np.zeros([data_dict['N'],len(pars_ind)*2])
i=0
for ind_par in pars_ind:
pattern = r'\A'+ind_par+r'.\d+'
out_col_names.append(ind_par+'_mean')
out_col_names.append(ind_par+'_std')
mean_val=df.iloc[:,df.columns.str.contains(pattern)].mean(axis=0).to_frame()
std_val=df.iloc[:,df.columns.str.contains(pattern)].std(axis=0).to_frame()
out_df[:,2*i:2*(i+1)] = np.concatenate([mean_val.values,std_val.values],axis=1)
i+=1
out_df = pd.DataFrame(out_df,columns=out_col_names)
beh_col_names = ['total','avg_rt','std_rt']
total_np = 600+data_dict['choice'].sum(axis=1,keepdims=True)*(-2)+data_dict['outcome'].sum(axis=1,keepdims=True)*(10)
avg_rt_np = data_dict['rt'].mean(axis=1,keepdims=True)
std_rt_np = data_dict['rt'].std(axis=1,keepdims=True)
beh_df = pd.DataFrame(np.concatenate([total_np,avg_rt_np,std_rt_np],axis=1),columns=beh_col_names)
out_df = beh_df.join(out_df)
return out_df
if __name__ == "__main__":
try:
groups_comp = sys.argv[1]
groups_comp = groups_comp.split(",")
except IndexError:
groups_comp = ['']
# groups_comp=['A','B']
# parse data
txt_path = f'./transformed_data/generalise/generalise_data.txt'
data_dict = generalise_gs_preprocess_func(txt_path)#, task_params=task_params)
model_code = open('./models/generalise_gs.stan', 'r').read() # moved to y changes
pars_ind = ['sigma_a', 'sigma_n', 'eta', 'kappa', 'beta', 'bias']
pars = ['mu_sigma_a', 'mu_sigma_n', 'mu_eta', 'mu_kappa', 'mu_beta', 'mu_bias']
fits=[]
for g in groups_comp:
group_value = data_dict['group']
print('Group: '+g)
if not g=='':
group_bool = [i for i,x in enumerate([g == val for val in data_dict['group']]) if x]
group_value = data_dict['group'][group_bool]
data_dict_gr = {}
for key, value in data_dict.items():
if key not in ['N','T','group']:
data_dict_gr[key] = value[group_bool]
elif key not in ['group']:
data_dict_gr[key] = value
else:
continue
else:
data_dict_gr = data_dict
data_dict_gr.pop('group')
data_dict_gr['N'] = data_dict_gr['rt'].shape[0]
# fit stan model
posterior = stan.build(program_code=model_code, data=data_dict_gr)
fit = posterior.sample(num_samples=2000, num_chains=4, num_warmup=1000)
fits.append(fit)
df = fit.to_frame() # pandas `DataFrame, requires pandas
data_dict_gr['group'] = group_value
# individual results
df_ind = extract_ind_results(df,pars_ind,data_dict_gr)
subjID_df=pd.DataFrame((data_dict_gr['subjID'],data_dict_gr['group'])).transpose()
subjID_df.columns = ['subjID','group']
df_ind = subjID_df.join(df_ind)
print(df['mu_sigma_a'].agg(['mean','var']))
print(df['mu_beta'].agg(['mean','var']))
# saving traces
df_extracted = df[pars]
save_dir = './data_output/generalise_mydata/'
if not os.path.isdir(save_dir):
os.mkdir(save_dir)
sfile = save_dir + f'mydata_fit_group_trace'+g+'.csv'
s_ind_file = save_dir + f'mydata_fit_ind_est'+g+'.csv'
df_extracted.to_csv(sfile, index=None)
df_ind.to_csv(s_ind_file, index=None)
diag_plot = az.plot_trace(fit,var_names=pars,compact=True,combined=True)
save_dir = './data_output/generalise_mydata/'
if not os.path.isdir(save_dir):
os.mkdir(save_dir)
save_name = 'diag_post_trace'+g+'.png'
fig = diag_plot.ravel()[0].figure
fig.savefig(save_dir+save_name,bbox_inches='tight',pad_inches=0)
comp_hdi_mean_data('generalise', param_ls=pars, groups_comp=groups_comp)
plot_violin_params_mean('generalise', param_ls=pars, groups_comp=groups_comp)
hdi_plot = az.plot_forest(fits,model_names=groups_comp,var_names=pars,figsize=(7,7),combined=True)
fig = hdi_plot.ravel()[0].figure
save_name = 'HDI_comp'+''.join(groups_comp)+'.png'
save_dir = './data_output/generalise_mydata/'
fig.savefig(save_dir+save_name,bbox_inches='tight',pad_inches=0)
|
#
# PySNMP MIB module LC-PHYSICAL-ENTITIES-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/LC-PHYSICAL-ENTITIES-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 19:55:35 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, ObjectIdentifier, Integer = mibBuilder.importSymbols("ASN1", "OctetString", "ObjectIdentifier", "Integer")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ConstraintsUnion", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint", "ValueRangeConstraint")
lancastTraps, lancastMibModulesA = mibBuilder.importSymbols("LANCAST-MIB", "lancastTraps", "lancastMibModulesA")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
iso, Gauge32, Unsigned32, MibIdentifier, Bits, ModuleIdentity, IpAddress, MibScalar, MibTable, MibTableRow, MibTableColumn, TimeTicks, Counter32, Integer32, ObjectIdentity, NotificationType, Counter64 = mibBuilder.importSymbols("SNMPv2-SMI", "iso", "Gauge32", "Unsigned32", "MibIdentifier", "Bits", "ModuleIdentity", "IpAddress", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "TimeTicks", "Counter32", "Integer32", "ObjectIdentity", "NotificationType", "Counter64")
DisplayString, TextualConvention = mibBuilder.importSymbols("SNMPv2-TC", "DisplayString", "TextualConvention")
physicalEntities = ModuleIdentity((1, 3, 6, 1, 4, 1, 2745, 1, 2))
physicalEntities.setRevisions(('1999-03-03 12:00',))
if mibBuilder.loadTexts: physicalEntities.setLastUpdated('9903031200Z')
if mibBuilder.loadTexts: physicalEntities.setOrganization('Lancast Inc')
chassisGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2))
backPlaneGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2745, 1, 2, 3))
powerSupplyGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2745, 1, 2, 4))
modulesGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5))
ePortGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6))
serialPortGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 2745, 1, 2, 7))
chassisTable = MibTable((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2, 1), )
if mibBuilder.loadTexts: chassisTable.setStatus('current')
chassisTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2, 1, 1), ).setIndexNames((0, "LC-PHYSICAL-ENTITIES-MIB", "chassisEntityIndex"))
if mibBuilder.loadTexts: chassisTableEntry.setStatus('current')
chassisEntityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chassisEntityIndex.setStatus('current')
chassisDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: chassisDescription.setStatus('current')
chassisPartNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: chassisPartNumber.setStatus('current')
chassisNumSlots = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chassisNumSlots.setStatus('current')
chassisCurrentTemp = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chassisCurrentTemp.setStatus('current')
chassisMaxTemp = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: chassisMaxTemp.setStatus('current')
chassisReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("reset", 1), ("resetable", 2), ("not-resetable", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: chassisReset.setStatus('current')
lastEntityResetReason = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("cold-start", 1), ("nms-sw-reset", 2), ("download-reset", 3), ("watch-dog-timeout", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lastEntityResetReason.setStatus('current')
lastEntityResetIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: lastEntityResetIndex.setStatus('current')
lastEntityResetTime = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2, 1, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lastEntityResetTime.setStatus('current')
lastEntityResetType = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 2, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(3, 9, 10))).clone(namedValues=NamedValues(("chassis", 3), ("module", 9), ("port", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: lastEntityResetType.setStatus('current')
backPlaneTable = MibTable((1, 3, 6, 1, 4, 1, 2745, 1, 2, 3, 1), )
if mibBuilder.loadTexts: backPlaneTable.setStatus('current')
backPlaneTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2745, 1, 2, 3, 1, 1), ).setIndexNames((0, "LC-PHYSICAL-ENTITIES-MIB", "backPlaneEntityIndex"))
if mibBuilder.loadTexts: backPlaneTableEntry.setStatus('current')
backPlaneEntityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 3, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: backPlaneEntityIndex.setStatus('current')
backPlaneDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 3, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: backPlaneDescription.setStatus('current')
backPlanePartNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 3, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: backPlanePartNumber.setStatus('current')
backPlaneSerialNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 3, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: backPlaneSerialNumber.setStatus('current')
backPlaneManufactureDate = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 3, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: backPlaneManufactureDate.setStatus('current')
backPlaneHWRevisionNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 3, 1, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: backPlaneHWRevisionNumber.setStatus('current')
powerSupplyTable = MibTable((1, 3, 6, 1, 4, 1, 2745, 1, 2, 4, 1), )
if mibBuilder.loadTexts: powerSupplyTable.setStatus('current')
powerSupplyTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2745, 1, 2, 4, 1, 1), ).setIndexNames((0, "LC-PHYSICAL-ENTITIES-MIB", "powerSupplyEntityIndex"))
if mibBuilder.loadTexts: powerSupplyTableEntry.setStatus('current')
powerSupplyEntityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 4, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: powerSupplyEntityIndex.setStatus('current')
powerSupplyStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 4, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("on", 1), ("off", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: powerSupplyStatus.setStatus('current')
powerSupplyType = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("ac", 1), ("dc", 2), ("universal", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: powerSupplyType.setStatus('current')
powerSupply5vCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 4, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: powerSupply5vCurrent.setStatus('current')
powerSupply5vMin = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 4, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: powerSupply5vMin.setStatus('current')
powerSupply5vMax = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 4, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: powerSupply5vMax.setStatus('current')
powerSupplyUnitIdentifier = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 4, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("ps-A", 1), ("ps-B", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: powerSupplyUnitIdentifier.setStatus('current')
moduleTable = MibTable((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1), )
if mibBuilder.loadTexts: moduleTable.setStatus('current')
moduleTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1), ).setIndexNames((0, "LC-PHYSICAL-ENTITIES-MIB", "modEntityIndex"))
if mibBuilder.loadTexts: moduleTableEntry.setStatus('current')
modEntityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: modEntityIndex.setStatus('current')
modAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("not-applicable", 0), ("up", 1), ("down", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modAdminState.setStatus('current')
modOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("up", 1), ("down", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: modOperStatus.setStatus('current')
modType = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("unknown", 1), ("reserved", 2), ("mgmnt", 3), ("singleTwister", 4), ("dualTwister", 5), ("redundantTwister", 6), ("displayModule", 7), ("singleTwister2", 8), ("fixedPort", 9), ("rateAdapter", 10), ("gigabitTwister", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: modType.setStatus('current')
modDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: modDescription.setStatus('current')
modName = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modName.setStatus('current')
modPartNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: modPartNumber.setStatus('current')
modSerialNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: modSerialNumber.setStatus('current')
modManufactureDate = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: modManufactureDate.setStatus('current')
modDiagnosticTestStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: modDiagnosticTestStatus.setStatus('current')
modHWRevisionNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: modHWRevisionNumber.setStatus('current')
modNumPorts = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: modNumPorts.setStatus('current')
modFirstSlot = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: modFirstSlot.setStatus('current')
modNumSlotsOccupied = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: modNumSlotsOccupied.setStatus('current')
modReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("reset", 1), ("resetable", 2), ("not-resetable", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: modReset.setStatus('current')
mgmntTable = MibTable((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2), )
if mibBuilder.loadTexts: mgmntTable.setStatus('current')
mgmntTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1), ).setIndexNames((0, "LC-PHYSICAL-ENTITIES-MIB", "mgmntEntityIndex"))
if mibBuilder.loadTexts: mgmntTableEntry.setStatus('current')
mgmntEntityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmntEntityIndex.setStatus('current')
mgmntBootImageName = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmntBootImageName.setStatus('current')
mgmntBootImageVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmntBootImageVersion.setStatus('current')
mgmntCoreImageName = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmntCoreImageName.setStatus('current')
mgmntCoreImageVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmntCoreImageVersion.setStatus('current')
mgmntAppImageName = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmntAppImageName.setStatus('current')
mgmntAppImageVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmntAppImageVersion.setStatus('current')
mgmntRamMemorySize = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmntRamMemorySize.setStatus('current')
mgmntFlashMemorySize = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmntFlashMemorySize.setStatus('current')
mgmntNVRamMemorySize = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmntNVRamMemorySize.setStatus('current')
mgmnt5vCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmnt5vCurrent.setStatus('current')
mgmnt5vMin = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmnt5vMin.setStatus('current')
mgmnt5vMax = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmnt5vMax.setStatus('current')
mgmnt3pt3vCurrent = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmnt3pt3vCurrent.setStatus('current')
mgmnt3pt3vMin = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmnt3pt3vMin.setStatus('current')
mgmnt3pt3vMax = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmnt3pt3vMax.setStatus('current')
mgmntDiagnosticBootError = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 2, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("on", 1), ("off", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: mgmntDiagnosticBootError.setStatus('current')
singleTwisterTable = MibTable((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 3), )
if mibBuilder.loadTexts: singleTwisterTable.setStatus('current')
singleTwisterTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 3, 1), ).setIndexNames((0, "LC-PHYSICAL-ENTITIES-MIB", "stwEntityIndex"))
if mibBuilder.loadTexts: singleTwisterTableEntry.setStatus('current')
stwEntityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 3, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stwEntityIndex.setStatus('current')
stwLinkLossCarryForward = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stwLinkLossCarryForward.setStatus('current')
dualTwisterTable = MibTable((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 4), )
if mibBuilder.loadTexts: dualTwisterTable.setStatus('current')
dualTwisterTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 4, 1), ).setIndexNames((0, "LC-PHYSICAL-ENTITIES-MIB", "stwEntityIndex"))
if mibBuilder.loadTexts: dualTwisterTableEntry.setStatus('current')
dtwEntityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 4, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dtwEntityIndex.setStatus('current')
dtwLinkLossCarryForward = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 4, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: dtwLinkLossCarryForward.setStatus('current')
redundantTwisterTable = MibTable((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 5), )
if mibBuilder.loadTexts: redundantTwisterTable.setStatus('current')
redundantTwisterTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 5, 1), ).setIndexNames((0, "LC-PHYSICAL-ENTITIES-MIB", "rtwEntityIndex"))
if mibBuilder.loadTexts: redundantTwisterTableEntry.setStatus('current')
rtwEntityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 5, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: rtwEntityIndex.setStatus('current')
rtwAutoRestorePrimary = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2), ("not-selectable", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtwAutoRestorePrimary.setStatus('current')
rtwLinkLossCarryForward = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtwLinkLossCarryForward.setStatus('current')
rtwActivePort = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 5, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("primary", 1), ("secondary", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rtwActivePort.setStatus('current')
rtwRedundantTransmission = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 5, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("not-applicable", 0), ("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtwRedundantTransmission.setStatus('current')
rtwSecondarySwitchOver = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 5, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("yes", 1), ("no", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: rtwSecondarySwitchOver.setStatus('current')
rtwLinkPulseControl = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 5, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("not-applicable", 0), ("both-ports", 1), ("active-port", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtwLinkPulseControl.setStatus('current')
rtwModeControl = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 5, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("redundant", 1), ("selectAB", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtwModeControl.setStatus('current')
rtwABSelect = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 5, 5, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("selectA", 1), ("selectB", 2), ("not-selectable", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: rtwABSelect.setStatus('current')
ePortTable = MibTable((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1), )
if mibBuilder.loadTexts: ePortTable.setStatus('current')
ePortTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1), ).setIndexNames((0, "LC-PHYSICAL-ENTITIES-MIB", "ePortEntityIndex"))
if mibBuilder.loadTexts: ePortTableEntry.setStatus('current')
ePortEntityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ePortEntityIndex.setStatus('current')
ePortIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ePortIfIndex.setStatus('current')
ePortType = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13))).clone(namedValues=NamedValues(("other", 1), ("e10BaseT", 2), ("e10BaseFL-MM", 3), ("e10BaseFL-SM", 4), ("e100BaseTX", 5), ("e100BaseFX-MM", 6), ("e100BaseFX-SM", 7), ("e10-100Base-TX", 8), ("e1000Base-LX", 9), ("e1000Base-SX", 10), ("e1000Base-FX", 11), ("e10Base-SX", 12), ("e100Base-SX", 13)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ePortType.setStatus('current')
ePortDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ePortDescription.setStatus('current')
ePortName = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ePortName.setStatus('current')
ePortLinkStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("link-detected", 1), ("no-link", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ePortLinkStatus.setStatus('current')
ePortAdminState = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("not-applicable", 0), ("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ePortAdminState.setStatus('current')
ePortOperStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ePortOperStatus.setStatus('current')
ePortDuplexAdmin = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("not-applicable", 0), ("half", 1), ("full", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ePortDuplexAdmin.setStatus('current')
ePortDuplexOper = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("half", 1), ("full", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ePortDuplexOper.setStatus('current')
ePortSpeedAdmin = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3))).clone(namedValues=NamedValues(("not-applicable", 0), ("tenMbit", 1), ("onehundredMbit", 2), ("gigabit", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ePortSpeedAdmin.setStatus('current')
ePortSpeedOper = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("tenMbit", 1), ("onehundredMbit", 2), ("gigabit", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ePortSpeedOper.setStatus('current')
ePortAutoNegotiationAdmin = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2))).clone(namedValues=NamedValues(("not-applicable", 0), ("enable", 1), ("disable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ePortAutoNegotiationAdmin.setStatus('current')
ePortAutoNegotiationOper = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 14), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("enable", 1), ("disable", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ePortAutoNegotiationOper.setStatus('current')
ePortReset = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("reset", 1), ("resetable", 2), ("not-resetable", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: ePortReset.setStatus('current')
ePortActivity = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("inactive", 1), ("active", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ePortActivity.setStatus('current')
ePortConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("other", 1), ("rj11", 2), ("rj21", 3), ("rj45", 4), ("bnc", 5), ("sc", 6), ("st", 7), ("sma", 8), ("mt-rj", 9), ("vf-45", 10)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ePortConnector.setStatus('current')
ePortParentRelPos = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 6, 1, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ePortParentRelPos.setStatus('current')
serialPortTable = MibTable((1, 3, 6, 1, 4, 1, 2745, 1, 2, 7, 1), )
if mibBuilder.loadTexts: serialPortTable.setStatus('current')
serialPortTableEntry = MibTableRow((1, 3, 6, 1, 4, 1, 2745, 1, 2, 7, 1, 1), ).setIndexNames((0, "LC-PHYSICAL-ENTITIES-MIB", "serialPortEntityIndex"))
if mibBuilder.loadTexts: serialPortTableEntry.setStatus('current')
serialPortEntityIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 7, 1, 1, 1), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: serialPortEntityIndex.setStatus('current')
serialPortDescription = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 7, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readonly")
if mibBuilder.loadTexts: serialPortDescription.setStatus('current')
serialPortName = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 7, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 64))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: serialPortName.setStatus('current')
serialPortSpeed = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 7, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(9600, 19200, 38400))).clone(namedValues=NamedValues(("baud-9600", 9600), ("baud-19200", 19200), ("baud-38400", 38400)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: serialPortSpeed.setStatus('current')
serialPortDataBits = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 7, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(5, 6, 7, 8))).clone(namedValues=NamedValues(("five", 5), ("six", 6), ("seven", 7), ("eight", 8)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: serialPortDataBits.setStatus('current')
serialPortParity = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 7, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("none", 1), ("even", 2), ("odd", 3), ("mark", 4), ("space", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: serialPortParity.setStatus('current')
serialPortStopBits = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 7, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("one", 1), ("one-five", 2), ("two", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: serialPortStopBits.setStatus('current')
serialPortFlowControl = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 7, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("none", 1), ("xon-xoff", 2), ("hardware", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: serialPortFlowControl.setStatus('current')
serialPortConnector = MibTableColumn((1, 3, 6, 1, 4, 1, 2745, 1, 2, 7, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("db9", 1), ("db25", 2), ("rj45", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: serialPortConnector.setStatus('current')
tempStatusChange = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 11)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "chassisEntityIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "chassisCurrentTemp"), ("LC-PHYSICAL-ENTITIES-MIB", "chassisMaxTemp"))
if mibBuilder.loadTexts: tempStatusChange.setStatus('current')
backPlaneFailure = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 12)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "modFirstSlot"))
if mibBuilder.loadTexts: backPlaneFailure.setStatus('current')
powerSupply5vChange = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 13)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "powerSupplyEntityIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "powerSupply5vCurrent"), ("LC-PHYSICAL-ENTITIES-MIB", "powerSupply5vMin"), ("LC-PHYSICAL-ENTITIES-MIB", "powerSupply5vMax"), ("LC-PHYSICAL-ENTITIES-MIB", "powerSupplyUnitIdentifier"))
if mibBuilder.loadTexts: powerSupply5vChange.setStatus('current')
powerSupplyStatusChange = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 14)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "powerSupplyEntityIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "powerSupplyStatus"), ("LC-PHYSICAL-ENTITIES-MIB", "powerSupplyUnitIdentifier"))
if mibBuilder.loadTexts: powerSupplyStatusChange.setStatus('current')
powerSupplyInsertion = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 15)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "powerSupplyEntityIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "powerSupplyType"), ("LC-PHYSICAL-ENTITIES-MIB", "powerSupplyUnitIdentifier"))
if mibBuilder.loadTexts: powerSupplyInsertion.setStatus('current')
powerSupplyRemoval = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 16)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "powerSupplyEntityIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "powerSupplyType"), ("LC-PHYSICAL-ENTITIES-MIB", "powerSupplyUnitIdentifier"))
if mibBuilder.loadTexts: powerSupplyRemoval.setStatus('current')
chassisEntityReset = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 17)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "lastEntityResetIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "lastEntityResetReason"), ("LC-PHYSICAL-ENTITIES-MIB", "lastEntityResetTime"))
if mibBuilder.loadTexts: chassisEntityReset.setStatus('current')
moduleEntityReset = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 18)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "lastEntityResetIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "lastEntityResetReason"), ("LC-PHYSICAL-ENTITIES-MIB", "lastEntityResetTime"), ("LC-PHYSICAL-ENTITIES-MIB", "modType"), ("LC-PHYSICAL-ENTITIES-MIB", "modFirstSlot"), ("LC-PHYSICAL-ENTITIES-MIB", "modNumSlotsOccupied"))
if mibBuilder.loadTexts: moduleEntityReset.setStatus('current')
eportEntityReset = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 19)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "lastEntityResetIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "lastEntityResetReason"), ("LC-PHYSICAL-ENTITIES-MIB", "lastEntityResetTime"), ("LC-PHYSICAL-ENTITIES-MIB", "modType"), ("LC-PHYSICAL-ENTITIES-MIB", "modFirstSlot"), ("LC-PHYSICAL-ENTITIES-MIB", "modNumSlotsOccupied"), ("LC-PHYSICAL-ENTITIES-MIB", "ePortParentRelPos"))
if mibBuilder.loadTexts: eportEntityReset.setStatus('current')
moduleInsertion = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 20)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "modEntityIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "modType"), ("LC-PHYSICAL-ENTITIES-MIB", "modFirstSlot"), ("LC-PHYSICAL-ENTITIES-MIB", "modNumSlotsOccupied"))
if mibBuilder.loadTexts: moduleInsertion.setStatus('current')
moduleRemoval = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 21)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "modEntityIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "modType"), ("LC-PHYSICAL-ENTITIES-MIB", "modFirstSlot"), ("LC-PHYSICAL-ENTITIES-MIB", "modNumSlotsOccupied"))
if mibBuilder.loadTexts: moduleRemoval.setStatus('current')
unknownModule = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 22)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "modEntityIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "modType"), ("LC-PHYSICAL-ENTITIES-MIB", "modFirstSlot"))
if mibBuilder.loadTexts: unknownModule.setStatus('current')
moduleFailure = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 23)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "modFirstSlot"))
if mibBuilder.loadTexts: moduleFailure.setStatus('current')
ePortLinkStatusChange = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 24)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "ePortEntityIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "ePortLinkStatus"), ("LC-PHYSICAL-ENTITIES-MIB", "modType"), ("LC-PHYSICAL-ENTITIES-MIB", "modFirstSlot"), ("LC-PHYSICAL-ENTITIES-MIB", "modNumSlotsOccupied"))
if mibBuilder.loadTexts: ePortLinkStatusChange.setStatus('current')
ePortAdminChange = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 25)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "ePortEntityIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "ePortAdminState"), ("LC-PHYSICAL-ENTITIES-MIB", "modType"), ("LC-PHYSICAL-ENTITIES-MIB", "modFirstSlot"), ("LC-PHYSICAL-ENTITIES-MIB", "modNumSlotsOccupied"))
if mibBuilder.loadTexts: ePortAdminChange.setStatus('current')
rtwSwitchOverChange = NotificationType((1, 3, 6, 1, 4, 1, 2745, 1, 0, 26)).setObjects(("LC-PHYSICAL-ENTITIES-MIB", "rtwEntityIndex"), ("LC-PHYSICAL-ENTITIES-MIB", "rtwActivePort"), ("LC-PHYSICAL-ENTITIES-MIB", "modFirstSlot"), ("LC-PHYSICAL-ENTITIES-MIB", "modNumSlotsOccupied"))
if mibBuilder.loadTexts: rtwSwitchOverChange.setStatus('current')
mibBuilder.exportSymbols("LC-PHYSICAL-ENTITIES-MIB", mgmntTable=mgmntTable, mgmntAppImageVersion=mgmntAppImageVersion, ePortReset=ePortReset, rtwSecondarySwitchOver=rtwSecondarySwitchOver, modDiagnosticTestStatus=modDiagnosticTestStatus, modPartNumber=modPartNumber, ePortActivity=ePortActivity, powerSupplyGroup=powerSupplyGroup, serialPortTable=serialPortTable, chassisPartNumber=chassisPartNumber, rtwRedundantTransmission=rtwRedundantTransmission, ePortEntityIndex=ePortEntityIndex, serialPortName=serialPortName, powerSupplyRemoval=powerSupplyRemoval, powerSupply5vMax=powerSupply5vMax, backPlaneTableEntry=backPlaneTableEntry, rtwAutoRestorePrimary=rtwAutoRestorePrimary, singleTwisterTable=singleTwisterTable, modType=modType, lastEntityResetIndex=lastEntityResetIndex, serialPortEntityIndex=serialPortEntityIndex, ePortParentRelPos=ePortParentRelPos, lastEntityResetTime=lastEntityResetTime, dtwEntityIndex=dtwEntityIndex, powerSupplyStatus=powerSupplyStatus, ePortOperStatus=ePortOperStatus, mgmntBootImageVersion=mgmntBootImageVersion, ePortTableEntry=ePortTableEntry, redundantTwisterTable=redundantTwisterTable, ePortSpeedAdmin=ePortSpeedAdmin, rtwActivePort=rtwActivePort, dualTwisterTableEntry=dualTwisterTableEntry, chassisTableEntry=chassisTableEntry, modName=modName, mgmntFlashMemorySize=mgmntFlashMemorySize, ePortAdminState=ePortAdminState, ePortIfIndex=ePortIfIndex, serialPortDescription=serialPortDescription, chassisDescription=chassisDescription, ePortDescription=ePortDescription, mgmnt3pt3vCurrent=mgmnt3pt3vCurrent, modEntityIndex=modEntityIndex, chassisTable=chassisTable, PYSNMP_MODULE_ID=physicalEntities, lastEntityResetReason=lastEntityResetReason, backPlanePartNumber=backPlanePartNumber, chassisCurrentTemp=chassisCurrentTemp, modManufactureDate=modManufactureDate, serialPortSpeed=serialPortSpeed, chassisEntityReset=chassisEntityReset, stwLinkLossCarryForward=stwLinkLossCarryForward, modulesGroup=modulesGroup, ePortGroup=ePortGroup, backPlaneManufactureDate=backPlaneManufactureDate, redundantTwisterTableEntry=redundantTwisterTableEntry, serialPortGroup=serialPortGroup, rtwModeControl=rtwModeControl, singleTwisterTableEntry=singleTwisterTableEntry, unknownModule=unknownModule, ePortDuplexAdmin=ePortDuplexAdmin, ePortDuplexOper=ePortDuplexOper, moduleInsertion=moduleInsertion, backPlaneEntityIndex=backPlaneEntityIndex, powerSupply5vMin=powerSupply5vMin, powerSupplyInsertion=powerSupplyInsertion, backPlaneTable=backPlaneTable, stwEntityIndex=stwEntityIndex, moduleFailure=moduleFailure, serialPortStopBits=serialPortStopBits, backPlaneHWRevisionNumber=backPlaneHWRevisionNumber, modNumSlotsOccupied=modNumSlotsOccupied, chassisNumSlots=chassisNumSlots, mgmntDiagnosticBootError=mgmntDiagnosticBootError, ePortAdminChange=ePortAdminChange, ePortAutoNegotiationOper=ePortAutoNegotiationOper, modAdminState=modAdminState, moduleEntityReset=moduleEntityReset, backPlaneFailure=backPlaneFailure, moduleRemoval=moduleRemoval, modFirstSlot=modFirstSlot, mgmnt5vMax=mgmnt5vMax, serialPortDataBits=serialPortDataBits, serialPortParity=serialPortParity, backPlaneGroup=backPlaneGroup, powerSupplyUnitIdentifier=powerSupplyUnitIdentifier, rtwLinkLossCarryForward=rtwLinkLossCarryForward, serialPortConnector=serialPortConnector, moduleTableEntry=moduleTableEntry, ePortType=ePortType, powerSupplyTable=powerSupplyTable, mgmntTableEntry=mgmntTableEntry, mgmntBootImageName=mgmntBootImageName, mgmntRamMemorySize=mgmntRamMemorySize, chassisMaxTemp=chassisMaxTemp, ePortSpeedOper=ePortSpeedOper, powerSupplyType=powerSupplyType, physicalEntities=physicalEntities, chassisGroup=chassisGroup, dualTwisterTable=dualTwisterTable, powerSupplyTableEntry=powerSupplyTableEntry, rtwABSelect=rtwABSelect, serialPortTableEntry=serialPortTableEntry, modOperStatus=modOperStatus, ePortConnector=ePortConnector, modSerialNumber=modSerialNumber, backPlaneSerialNumber=backPlaneSerialNumber, powerSupplyEntityIndex=powerSupplyEntityIndex, mgmnt3pt3vMin=mgmnt3pt3vMin, lastEntityResetType=lastEntityResetType, mgmntNVRamMemorySize=mgmntNVRamMemorySize, ePortLinkStatus=ePortLinkStatus, moduleTable=moduleTable, powerSupply5vCurrent=powerSupply5vCurrent, modHWRevisionNumber=modHWRevisionNumber, mgmntAppImageName=mgmntAppImageName, mgmnt5vMin=mgmnt5vMin, mgmntEntityIndex=mgmntEntityIndex, serialPortFlowControl=serialPortFlowControl, mgmnt5vCurrent=mgmnt5vCurrent, ePortTable=ePortTable, chassisEntityIndex=chassisEntityIndex, modReset=modReset, powerSupply5vChange=powerSupply5vChange, mgmnt3pt3vMax=mgmnt3pt3vMax, ePortAutoNegotiationAdmin=ePortAutoNegotiationAdmin, powerSupplyStatusChange=powerSupplyStatusChange, chassisReset=chassisReset, mgmntCoreImageName=mgmntCoreImageName, ePortLinkStatusChange=ePortLinkStatusChange, rtwEntityIndex=rtwEntityIndex, modNumPorts=modNumPorts, dtwLinkLossCarryForward=dtwLinkLossCarryForward, rtwLinkPulseControl=rtwLinkPulseControl, backPlaneDescription=backPlaneDescription, rtwSwitchOverChange=rtwSwitchOverChange, ePortName=ePortName, modDescription=modDescription, tempStatusChange=tempStatusChange, mgmntCoreImageVersion=mgmntCoreImageVersion, eportEntityReset=eportEntityReset)
|
""" Setup script for installation. """
import os
from distutils.core import setup
# Utility function to read files. Used for the long_description.
def read(fname):
""" Reads the description of the package from the README.md file. """
return open(os.path.join(os.path.dirname(__file__), fname)).read()
def get_version():
""" Extract the version of the package from the CHANGES file. """
version_fh = open("CHANGES", "r")
first_line = version_fh.readline().strip()
version_fh.close()
version = first_line.split()[1]
return version
setup(
name='cutlass',
description='An iHMP domain specific API using osdf-python',
long_description=read('README.md'),
version=get_version(),
author='Victor F',
author_email='[email protected]',
url='https://hmpdacc.org',
license='MIT',
packages=['cutlass', 'cutlass.aspera'],
requires=['osdf'],
classifiers=[
"Development Status :: 5 - Production/Stable",
"License :: OSI Approved :: MIT License",
"Intended Audience :: Science/Research",
"Natural Language :: English",
"Operating System :: POSIX",
"Programming Language :: Python :: 2.7",
"Topic :: Utilities",
"Topic :: Scientific/Engineering",
"Topic :: Scientific/Engineering :: Bio-Informatics"
]
)
|
from src.Stack.stack_linked_list import Stack
def is_balanced(brackets):
brackets=brackets.strip()
if brackets=='':
raise Exception("The string is empty")
if '(' not in brackets and ')' not in brackets:
raise Exception("This string without brackets")
stack=Stack()
for bracket in brackets:
if bracket==')' and stack.is_empty():
print ("The string is started with closed bracket and it is not balanced")
return False
elif bracket=='(':
stack.push(bracket)
elif bracket==')':
stack.pop()
else:
raise Exception("This string has something else but brackets")
if stack.is_empty():
print ("The string is balanced")
return True
else:
print ("The string is not balanced")
return False
if __name__=="__main__":
is_balanced('((')
is_balanced('()') |
from .goodlogging import *
|
from joblib import register_parallel_backend
def register_cloudbutton():
""" Register Cloudbutton Backend to be called with
joblib.parallel_backend("cloudbutton") """
try:
from cloudbutton.util.joblib.cloudbutton_backend import CloudbuttonBackend
register_parallel_backend("cloudbutton", CloudbuttonBackend)
except ImportError:
msg = ("To use the cloudbutton backend you must first install the plugin. "
"See https://github.com/Dahk/cloudbutton-backend.git "
"for instructions.")
raise ImportError(msg)
__all__ = ["register_cloudbutton"]
|
from flask import Blueprint, jsonify
import time
import logging
from ..model import WebUser, WatchedUser, WatchedVideo, Task, TaskStatus,\
ItemOnline, ItemVideoStat, ItemUpStat, ItemRegionActivity,\
TaskFailed,TotalWatchedUser,TotalWatchedVideo,TotalEnabledTask,\
WorkerStatus
main = Blueprint('api_stat', __name__)
@main.route('/get')
def get_stat():
d = dict()
d['total_user'] = TotalWatchedUser.get_total()
d['total_video'] = TotalWatchedVideo.get_total()
d['total_enabled_task'] = TotalEnabledTask.get_total()
d['today_task_failed'] = TaskFailed.count_today()
# 统计总提交数
ws = WorkerStatus.get_today()
today_submit = 0
for i in ws:
today_submit += i.day_submit_count
del i['id']
d['today_submit'] = today_submit
d['workers_stat'] = ws
return jsonify(d) |
import logging
from poet_distributed.niches.box2d.cppn import CppnEnvParams
from poet_distributed.niches.box2d.model import Model, simulate
from poet_distributed.niches.box2d.env import bipedhard_custom, Env_config
from inspection_tools.file_utilities import get_model_file_iterator, get_latest_cppn_file
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def inspect_model(model_file, cppn_genome):
"""
Load and inspect the performance of an agent in a given environment
:param model_file: The full path of the .json file containing the agent model to be inspected.
:param cppn_genome: Either the full path of the pickle file containing the CPPN that the agent will be tested in,
or the CppnEnvParams to be used directly
:return pos: (x, y) Tuple containing the position of an agents death, which can be input to (draw_
"""
# set master_seed
seed = -1 # if seed should not be used
# seed = 42 # if seed should be used
default_environment = Env_config(
name='flat',
ground_roughness=0,
pit_gap=[],
stump_width=[],
stump_height=[],
stump_float=[],
stair_height=[],
stair_width=[],
stair_steps=[])
if isinstance(cppn_genome, CppnEnvParams):
cppn_params = cppn_genome
elif isinstance(cppn_genome, str):
cppn_params = CppnEnvParams(genome_path=cppn_genome)
else:
raise IOError()
test_model = Model(bipedhard_custom)
test_model.make_env(seed=seed, env_config=default_environment)
test_model.load_model(filename=model_file)
_, _, info = simulate(test_model, seed=seed, train_mode=True, render_mode=True, num_episode=1, max_len=2000,
env_config_this_sim=default_environment, env_params=cppn_params)
return info
if __name__ == "__main__":
"""
Run a custom test of an agent and a cppn, or multiple sequentially.
"""
test_run_name = 'dec2_168h'
for current_agent_model_json in get_model_file_iterator(training_run=test_run_name):
current_optimizer_log_file = current_agent_model_json.split('.best.json')[0] + '.log'
current_cppn_genome_file = get_latest_cppn_file(training_run=test_run_name,
optimizer_log_file=current_optimizer_log_file)
print("\n\nNow running agent: {} \non environment: {}".format(current_agent_model_json,
current_cppn_genome_file))
inspect_model(model_file=current_agent_model_json, cppn_genome=current_cppn_genome_file)
|
# django imports
from django.contrib import admin
# lfs imports
from lfs.export.models import CategoryOption
from lfs.export.models import Export
from lfs.export.models import Script
admin.site.register(CategoryOption)
admin.site.register(Export)
admin.site.register(Script)
|
from sqlalchemy import Column, String, Text
from model.base import Base
class JG(Base):
__tablename__ = 'jg'
class_name = '机构'
import_handle_file = ['president', 'vice_president', 'executive_vice_president', 'chairman', 'secretary_general',
'deputy_secretary_general', 'director', 'chief_supervisor', 'supervisor', 'representative',
'historical_staff'
]
export_handle_file = ['president', 'vice_president', 'executive_vice_president', 'chairman', 'secretary_general',
'deputy_secretary_general', 'director', 'chief_supervisor', 'supervisor', 'representative',
'historical_staff', 'type'
]
field = [
'id', 'name', 'introduction', 'president', 'vice_president', 'executive_vice_president', 'chairman',
'secretary_general', 'deputy_secretary_general',
'director', 'chief_supervisor', 'supervisor', 'representative', 'historical_staff', 'remark', 'type'
]
combo_field = {
'type': {
'exclude': True,
'items': ['在台机构', '在绍机构', '其他']
}
}
staff_names = {
'会长': 'president',
'理事长': 'chairman',
'副会长': 'vice_president',
'常务副会长': 'executive_vice_president',
'监事长': 'chief_supervisor',
'监事': 'supervisor',
'总干事(秘书长)': 'secretary_general',
'副秘书长': 'deputy_secretary_general',
'理事': 'director',
'成员': 'representative',
'历史人员': 'historical_staff'
}
template_start_row = 3
name = Column(String(100), comment='名称')
introduction = Column(String(1000), comment='简介')
director_ = Column('director', Text, comment='理事')
# 123
chief_supervisor_ = Column('chief_supervisor', Text, comment='监事长')
supervisor_ = Column('supervisor', Text, comment='监事')
representative_ = Column('representative', Text, comment='成员')
president_ = Column('president', Text, comment='会长')
vice_president_ = Column('vice_president', Text, comment='副会长')
# 123
executive_vice_president_ = Column('executive_vice_president', Text, comment='常务副会长')
chairman_ = Column('chairman', Text, comment='理事长')
secretary_general_ = Column('secretary_general', Text, comment='总干事(秘书长)')
# 123
deputy_secretary_general_ = Column('deputy_secretary_general', Text, comment='副秘书长')
historical_staff_ = Column('historical_staff', Text, comment='历史人员')
remark = Column(Text, comment='备注')
type_ = Column('type', Text, comment='机构类型')
@property
def deputy_secretary_general(self):
if self.deputy_secretary_general_ is None:
return []
return self.deputy_secretary_general_.split(' ')
@deputy_secretary_general.setter
def deputy_secretary_general(self, raw: list):
while '' in raw:
raw.remove('')
self.deputy_secretary_general_ = " ".join(raw)
@property
def executive_vice_president(self):
if self.executive_vice_president_ is None:
return []
return self.executive_vice_president_.split(' ')
@executive_vice_president.setter
def executive_vice_president(self, raw: list):
while '' in raw:
raw.remove('')
self.executive_vice_president_ = " ".join(raw)
@property
def chief_supervisor(self):
if self.chief_supervisor_ is None:
return []
return self.chief_supervisor_.split(' ')
@chief_supervisor.setter
def chief_supervisor(self, raw: list):
while '' in raw:
raw.remove('')
self.chief_supervisor_ = " ".join(raw)
@property
def president(self):
if self.president_ is None:
return []
return self.president_.split(' ')
@president.setter
def president(self, raw: list):
while '' in raw:
raw.remove('')
self.president_ = " ".join(raw)
@property
def vice_president(self):
if self.vice_president_ is None:
return []
return self.vice_president_.split(' ')
@vice_president.setter
def vice_president(self, raw: list):
while '' in raw:
raw.remove('')
self.vice_president_ = " ".join(raw)
@property
def chairman(self):
if self.chairman_ is None:
return []
return self.chairman_.split(' ')
@chairman.setter
def chairman(self, raw: list):
while '' in raw:
raw.remove('')
self.chairman_ = " ".join(raw)
@property
def secretary_general(self):
if self.secretary_general_ is None:
return []
return self.secretary_general_.split(' ')
@secretary_general.setter
def secretary_general(self, raw: list):
while '' in raw:
raw.remove('')
self.secretary_general_ = " ".join(raw)
@property
def historical_staff(self):
if self.historical_staff_ is None:
return []
return self.historical_staff_.split(' ')
@historical_staff.setter
def historical_staff(self, raw: list):
while '' in raw:
raw.remove('')
self.historical_staff_ = " ".join(raw)
@property
def director(self):
if self.director_ is None:
return []
return self.director_.split(' ')
@director.setter
def director(self, raw: list):
while '' in raw:
raw.remove('')
self.director_ = " ".join(raw)
@property
def supervisor(self):
if self.supervisor_ is None:
return []
return self.supervisor_.split(' ')
@supervisor.setter
def supervisor(self, raw: list):
while '' in raw:
raw.remove('')
self.supervisor_ = " ".join(raw)
@property
def representative(self):
if self.representative_ is None:
return []
return self.representative_.split(' ')
@representative.setter
def representative(self, raw: list):
while '' in raw:
raw.remove('')
self.representative_ = " ".join(raw)
@property
def type(self):
return self.type_
@type.setter
def type(self, val):
self.type_ = val
|
import rq
from basic_test import wait
from multi_rq.multi_rq import MultiRQ
import numpy as np
mrq = MultiRQ()
nums = [[(i,j)] for i,j in zip(range(0,20,2),range(11,21))]
assert mrq.apply_async(np.mean,nums) == [5.5, 7.0, 8.5, 10.0, 11.5, 13.0, 14.5, 16.0, 17.5, 19.0]
assert mrq.apply_async
print('multi-rq: all tests passed')
|
# -*- coding: utf-8 -*-
from django.conf import settings
from django.contrib.auth.models import User
from .models import UploadFileModel, User
from django.http import JsonResponse, Http404
from django.http import HttpResponse, HttpResponseRedirect
from .models import User
from board.models import Board
from django.shortcuts import render, redirect
from django.views.decorators.csrf import csrf_exempt
from django.contrib.auth import authenticate
from .serializers import FidoSerializer
from rest_framework_jwt.settings import api_settings
from .forms import UploadFileForm, LoginForm
from django.core.files import File
from os.path import basename
from urllib.request import urlretrieve, urlcleanup
from urllib.parse import urlsplit
from django.db.models.signals import post_save
from tempfile import TemporaryFile
from django.contrib.auth import get_user_model
from rest_framework.viewsets import ModelViewSet
from django.template import RequestContext
from django.core.files import File
from Crypto.PublicKey import RSA
from Crypto.Cipher import PKCS1_OAEP
User = get_user_model()
import json, jwt, mimetypes, os, requests, urllib
JWT_PAYLOAD_HANDLER = api_settings.JWT_PAYLOAD_HANDLER
JWT_ENCODE_HANDLER = api_settings.JWT_ENCODE_HANDLER
JWT_SECRET='FIDO_2020'
serverprivatekey="MIICWwIBAAKBgQDRKOpN7uhayaJc6FEx3Yrp6+wVcBsiayBqpke3Yp9VIfa0dDX4Ni5uyB1w+9eeWS8CHia6jXPf75yb1BKO6TFhTiGbbev9rTQlhPdgo57V1iv3Ew1JUFIsWyZFTmSSCaL7j/bHF7OenBBG0utXI8KersmKRQHqEOfmEpuw5vkmNQIDAQABAoGAGWxdvodRmudzYtOiOutw3SoeEiUER1S6Jfx8LyA/ubtdH2YYwUjkr/aHDZqvBMJWYm2Dy50x/oBDivVmJBTYOYFhaCqbWtblFgZ3jBTIIoA6Lpx7lxK4mGQ8fk9WVPjS7hRPfZZEJ6QVfLZKloaAc8P+p/l0hjOE1jhlRY9BPoECQQDqq2fFZPQ4leAegu3aW8mRr+NwHJd0Ts6SknGXKIHaDLTrGP5PDJNUwoRERd0VZr4QQrL8QLkkZUplEIn/r9QxAkEA5CvqpMAxjdDTEpeWk2Tg747RSQP9sRben901m/ZRIQAKYVQ+N/tfmOT4GpdwYU3bdokG+Kd+LcdCnrZ5OHAFRQJAMjr2P15YmDQcgOttlivHfZO0jy7PjGnB9cW64qwc/1tw7lGvPaRndOEeBq8dn5MaY8ijHzOLbarwvalIoJ42QQJAXoAN25Q6MdkmQlIELCGgw7br2QjNHnYxWBafKGwY58kDg5IHftoems1iMGk+Qx6i4XIZMAz2xnD7l45NoGFM3QJATOZRBBCX0QTbterJp5cvVCdizOETfBLIbc7prMw7F56rXpraAWCjAayfSFDV6Bc+CY+4S24jYAMm38w+rieAWA=="
programpubkey="MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDz/ni+WCbbUUSgNy+wNT9nmgKlOsNxOAxGkUT/M+Hxt+myP2ewMCoapd0lcDIcufNQOZ6rXpg+pT+Am8N3jATyrAhpActh/2+xgj7b05yg/Hmgok9+O/vpgHNasLhla4pAOVbiRBRpsiuAh0urtZcjGNYrEwkn15+1Oncjc5pR5wIDAQAB"
testprivatekey="fidopriavtekey"
@csrf_exempt
def signup(request): #일반유저 회원가입
if request.method == 'GET':
return render(request, 'useraccount/signup.html')
elif request.method == 'POST':
username = request.POST['name']
userid = request.POST['id']
company = request.POST['company']
code = request.POST['code']
#db에 저장
account = User.objects.create_user(
username=username,
user_id = userid,
company = company,
companycode = code,
)
request.session['user'] = userid
account.save()
os.mkdir(os.path.join('media/account/', username))
return render(request, 'useraccount/signup.html')
#web login
@csrf_exempt
def login(request):
if request.method == 'GET':
return render(request, 'useraccount/login.html')
elif request.method == 'POST':
id = request.POST.get('id')
request.session['user'] = id
return redirect('https://fidochallenge486.tk:8080/login/' + id, id=id)
@csrf_exempt
def program_login(request):
if request.method == 'GET':
return render(request, 'useraccount/login.html')
elif request.method == 'POST':
data = json.loads(request.body)
name = data["name"]
#code = data["code"]
test = User.objects.get(username=name)
return JsonResponse({'name':name, 'id':test.user_id, 'company':test.company, 'code':test.companycode})
#return JsonResponse({'success':'true','token':token}, status=200)
@csrf_exempt
def program_signup(request): #app_signup
if request.method == 'POST':
data = json.loads(request.body)
username = data["name"]
userid = data["id"]
company = data["company"]
code = data["code"]
payload = {"name":username, "id":userid, "company":company, "code":code}
token = jwt.encode(payload, JWT_SECRET).decode('utf-8')
account = User.objects.create_user(
username=username,
user_id=userid,
company=company,
companycode=code,
)
account.save()
return JsonResponse({'Check': '1'}, status=200)
@csrf_exempt
#이름과 회사코드 공개키를 받으면 이름.pub으로 공개키 저장
def keystore(request):
if request.method == 'POST':
data = json.loads(request.body)
req_username = data["name"]
req_companycode = data["code"]
publickey = data["pbkey"]
user = authenticate(username = req_username, companycode = req_companycode)
if user is not None:
link = "useraccount/PublicKey/" + req_username + ".pub"
f = open(link, "w")
f.write(publickey)
f.close()
return HttpResponse('Success!', status=200)
else:
return HttpResponse("False!!", status=400)
@csrf_exempt
#복호화키, 이름, 파일이름을 받으면 synn, 복호화된 파일이름을 리턴
def receivefilekey(request):
if request.method == 'POST':
data = json.loads(request.body)
Synkey = data["Synkey"]
Recvname = data["Name"]
filename = data["filename"]
link = "useraccount/encryptedfile/" + str(Recvname) + "-" + str(filename)
f = open(link, "w")
f.write(Synkey)
resultname = filename
return JsonResponse({'Synkey':Synkey, "encrypt_filename":resultname}, status=200)
@csrf_exempt
#유저이름과 파일이름을 받고, 그 파일안에 있는 복호화 키전송, testfilekey가 아니라 다른이름으로 변경해야할듯
def receivesynkey(request):
if request.method == 'POST':
data = json.loads(request.body)
verify = data["username"]
filename = data["filename"]
link = "useraccount/encryptedfile/" + str(verify) + "-" + str(filename)
if link is None:
return JsonResponse({"status":"No file name!!"}, status=404)
else:
f = open(link, "r")
print("file found!")
synnkey = f.read()
return JsonResponse({"synnkey":synnkey}, status=200)
@csrf_exempt
def encryptsynnkey(request): #synnkey 암호화
if request.method == 'POST':
data = json.loads(request.body)
synkey = data["synkey"]
name = data["name"]
filename = data["filename"]
key = RSA.importKey(testprivatekey)
encryptor = PKCS1_OAEP.new(key)
encrypted = encryptor.encrypt(synkey) #여기서 문
link = "useraccount/encryptedfile/" + str(name)+ "-" + str(filename)
f = open(link, "w")
f.write(encrypted)
f.close()
@csrf_exempt
def app_login(request, id):
request.session['user'] = id
return redirect('https://fidochallenge486.tk:8080/login/' + id, user_id=id)
@csrf_exempt
#시그널을 보내는 부분
def receivesignal(request): #시그널 보내는곳
if request.method == 'POST':
data = json.loads(request.body)
Name = data["name"]
signal= data["check_me"]
Name = str(Name)
if User.objects.filter(username=Name):
return JsonResponse({"signal":"1"}, status=200)
else:
return JsonResponse({"signal":"0"}, status=400)
@csrf_exempt
def index(request):
return render(request, 'useraccount/index.html', {})
@csrf_exempt
def file_list(request):
return render(request, 'useraccount/list.html', {})
@csrf_exempt
def upload_file(request):
if request.method == 'POST':
form = UploadFileForm(request.POST, request.FILES)
if form.is_valid():
form.save()
return redirect('file_list')
else:
form = UploadFileForm()
return render(request, 'useraccount/upload.html', {'form': form})
@csrf_exempt
def download_to_file(url, field):
try:
tempname, _ = urlretrieve(url)
field.save(basename(urlsplit(url).path), File(open(tempname, 'wb')))
finally:
urlcleanup()
@csrf_exempt
def download_file(request, file):
fl_path = 'useraccount/encryptedfile/'
filename = str(file)
fl = open(fl_path, 'r')
mime_type, _ = mimetypes.guess_type(fl_path)
response = HttpResponse(fl, content_type=mime_type)
response['Content-Disposition'] = "attachment; filename = %s" % filename
return response
@csrf_exempt
def test_upload(url, field):
with TemporaryFile() as tf:
r = requests.get(url, stream=True)
for chunk in r.iter_content(chunk_size=4096):
tf.write(chunk)
tf.seek(0)
field.save(basename(urlsplit(url).path, File(tf)))
@csrf_exempt
def return_list(request, name):
if request.method == 'POST':
username = name # useraccount/return_list/userename
#data = json.loads(request.body)
path = "useraccount/NameFolder/" + str(username) #경로 생성
file_list = os.listdir(path)
f = open(path+"/date.txt")
date_list = [str(value) for value in f.read().split()]
f.close()
f = open(path+"/sender.txt")
sender_list = [str(value) for value in f.read().split()]
f.close()
f = open(path+"/receiver.txt")
receiver_list = [str(value) for value in f.read().split()]
f.close()
payload = {
"file_list" : file_list,
"sender_list" : sender_list,
"date_list" : date_list
}
return JsonResponse(payload, safe=False, status=200)
# Create your views here.
def logout(request):
if request.session.get('user'): # user의 세션ID가 존재한다면
del(request.session['user']) # 현재 세션ID를 지워주고
return redirect('/') # 홈으로 리다이렉트 해줌
@csrf_exempt
def home(request):
user_id = request.session.get('user') # 세션으로부터 사용자 ID를 가져옴
if user_id:
user = User.objects.get(pk=user_id) # 모델에서 id를 기본키로해서 가져옴
return HttpResponse(user.username) # 모델의 username을 출력 (로그인이 된경우)
return HttpResponse("Home!") # 로그인이 되지 않은 경우 Home!출력
def register(request):
if request.method == 'GET':
return render(request, 'useraccount/register.html')
elif request.method == 'POST':
username = request.POST.get('username', None) # 템플릿에서 입력한 name필드에 있는 값을 키값으로 받아옴
password = request.POST.get('password', None) # 받아온 키값에 값이 없는경우 None값으로 기본값으로 지정
re_password = request.POST.get('re-password', None)
useremail = request.POST.get('useremail', None)
res_data = {} # 응답 메세지를 담을 변수(딕셔너리)
if not (username and useremail and password and re_password):
res_data['error'] = '모든 값을 입력해야 합니다.'
elif password != re_password:
res_data['error'] = '비밀번호가 다릅니다.'
else:
user = User( # 모델에서 생성한 클래스를 가져와 객체를 생성
username=username,
useremail=useremail,
password=make_password(password) # 비밀번호를 암호화하여 저장
)
user.save() # 데이터베이스에 저장
return render(request, 'user/register.html', res_data) # res_data가 html코드로 전달이 됨
@csrf_exempt
def verity(request):
if request.method == 'POST':
dataa = json.loads(request.body)
data = dataa['token']
decoded = jwt.decode(data, JWT_SECRET, algorithms='HS256')
name = decoded['name']
code = decoded['code']
user = authenticate(username=name, companycode=code)
if user is not None:
return HttpResponse('success!', status=200)
return HttpResponse('false', status=404)
|
"""
DataMeta
DataMeta # noqa: E501
The version of the OpenAPI document: 1.4.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import unittest
import datameta_client_lib
from datameta_client_lib.api.files_api import FilesApi # noqa: E501
class TestFilesApi(unittest.TestCase):
"""FilesApi unit test stubs"""
def setUp(self):
self.api = FilesApi() # noqa: E501
def tearDown(self):
pass
def test_create_file(self):
"""Test case for create_file
Create a New File # noqa: E501
"""
pass
def test_delete_file(self):
"""Test case for delete_file
Delete Not-Submitted File # noqa: E501
"""
pass
def test_get_file(self):
"""Test case for get_file
Get Details for A File # noqa: E501
"""
pass
def test_update_file(self):
"""Test case for update_file
Update File Details # noqa: E501
"""
pass
if __name__ == '__main__':
unittest.main()
|
# -*- coding: utf-8 -*-
"""
Created on Wed Oct 8 14:45:02 2014.
@author: mje
"""
%reset -f
import mne
import sys
from mne.io import Raw
from mne.preprocessing import ICA, create_eog_epochs
import matplotlib
matplotlib.use('Agg')
#from my_settings import *
data_path = '/home/mje/mnt/hyades/scratch2/MINDLAB2011_24-MEG-readiness/mne_analysis_new/'
save_folder = data_path + "filter_ica_data/"
maxfiltered_folder = data_path + "maxfiltered_data/"
# SETTINGS
n_jobs = 1
l_freq, h_freq = 1, 98 # High and low frequency setting for the band pass
n_freq = 50 # notch filter frequency
decim = 4 # decim value
matplotlib.pyplot.close("all")
subject = "0020"
condition = "interupt"
reject_params = dict(grad=4000e-13, # T / m (gradiometers)
mag=4e-12 # T (magnetometers)
)
# SETTINGS
#raw = Raw(save_folder + "%s_%s_filtered_mc_tsss-raw.fif" % (subject,
# condition),
# preload=True)
raw = Raw(maxfiltered_folder + "%s_%s_mc_tsss-raw.fif" % (subject,
condition),
preload=True)
raw.drop_channels(raw.info["bads"])
raw.notch_filter(n_freq, n_jobs=n_jobs)
raw.filter(l_freq, h_freq, n_jobs=n_jobs)
raw.save(save_folder + "%s_%s_filtered_mc_tsss-raw.fif" % (subject,
condition),
overwrite=True)
# ICA Part
ica = ICA(n_components=0.99, method='fastica', max_iter=256)
picks = mne.pick_types(raw.info, meg=True, eeg=False, eog=False, emg=False,
bio=False, stim=False, exclude='bads')
ica.fit(raw, picks=picks, decim=decim, reject=reject_params)
# maximum number of components to reject
n_max_eog = 1
##########################################################################
# 2) identify bad components by analyzing latent sources.
# DETECT EOG BY CORRELATION
# HORIZONTAL EOG
title = "ICA: %s for %s"
picks = mne.pick_types(raw.info, meg=True, eeg=False, eog=False, emg=False,
bio=True, stim=False, exclude='bads')
eog_epochs = create_eog_epochs(raw, ch_name="EOG001")
eog_average = eog_epochs.average()
# channel name
eog_inds, scores = ica.find_bads_eog(raw)
# ica.plot_components()
ica.plot_sources(raw)
# %%
# eog_inds = [10]
# ica.exclude += eog_inds
fig = ica.plot_scores(scores, exclude=eog_inds,
title=title % ('eog', subject))
fig.savefig(save_folder + "pics/%s_%s_eog_scores.png" % (subject,
condition))
fig = ica.plot_sources(eog_average, exclude=None)
fig.savefig(save_folder + "pics/%s_%s_eog_source.png" % (subject,
condition))
fig = ica.plot_components(ica.exclude, title=title % ('eog', subject),
colorbar=True)
fig.savefig(save_folder + "pics/%s_%s_eog_component.png" % (subject,
condition))
fig = ica.plot_overlay(eog_average, exclude=None, show=False)
fig.savefig(save_folder + "pics/%s_%s_eog_excluded.png" % (subject,
condition))
# del eog_epochs, eog_average
##########################################################################
# Apply the solution to Raw, Epochs or Evoked like this:
raw_ica = ica.apply(raw)
ica.save(save_folder + "%s_%s-ica.fif" % (subject, condition)) # save ICA
# componenets
# Save raw with ICA removed
raw_ica.save(save_folder + "%s_%s_filtered_ica_mc_tsss-raw.fif" % (
subject, condition),
overwrite=True)
|
import logging
import os
import keyring
from getpass import getpass
from pathlib import Path
from string import Template
from decouple import config
from .cert_generator import run as generate_cert
from .constants import Constants
def init_user(user: str):
try:
__config_current_user_path(user)
__config_user(user)
__config_certificate(user)
except Exception as err:
raise Exception(f'Could not init the config for the user {user}. Error: {err}')
def get_user_password(user: str):
return keyring.get_password(__get_user_key(user), 'id')
def __get_user_key(user: str):
return Template(config(Constants.Keyring.user_mask)).substitute(user=user)
def __get_token_key(user: str):
return Template(config(Constants.Keyring.user_token_mask)).substitute(user=user)
def get_user_token_value(user: str):
token_key = __get_token_key(user)
return keyring.get_password(token_key, 'token')
def set_user_token_value(user: str, token: str):
token_key = __get_token_key(user)
keyring.set_password(token_key, 'token', token)
def __config_user(user: str):
# Config user
user_key = __get_user_key(user)
credential_exists = keyring.get_credential(user_key, 'id') != None
is_standalone_run = config(Constants.Wrapper.standalone_run, cast=bool)
if is_standalone_run and not credential_exists:
raise Exception(f'Please run the script without the standalone flag and config the user : {user}')
option = 'N'
if not is_standalone_run and credential_exists:
option = input(f'The user {user} in key {user_key} already exist, do you want to replace? [Y/N]: ')
if option == 'Y' or not credential_exists:
user_id = user
user_password = getpass(f'[>] Enter your password for {user_id} (Used on the app/website): ')
keyring.set_password(user_key, 'id', user_password)
def __config_current_user_path(user: str):
cache_dir_path: Path = config(Constants.Wrapper.cache_dir_path, cast=Path)
user_cache_dir_path = cache_dir_path.joinpath(user)
if not user_cache_dir_path.exists():
logging.debug('Creating User Data directory')
user_cache_dir_path.mkdir(parents=True, exist_ok=True)
logging.debug('User data directory created')
os.environ[Constants.Wrapper.user_cache_dir_path] = str(user_cache_dir_path)
def __config_certificate(user: str):
user_data_cache: Path = config(Constants.Wrapper.user_cache_dir_path, cast=Path)
certificate_file_name = Template(config(Constants.Wrapper.user_certificate_mask)).substitute(user=user)
certificate_file_path = user_data_cache.joinpath(certificate_file_name).absolute()
certificate_exists = certificate_file_path.exists()
os.environ[Constants.Wrapper.user_certificate_path] = str(certificate_file_path)
logging.debug(f'Cerfificate file name: {certificate_file_name}')
logging.debug(f'Cerfificate file path: {certificate_file_path}')
logging.debug(f'Cerfificate file exists: {certificate_exists}')
if not certificate_exists:
logging.debug(f'Generating Certificate {get_user_password(user)}')
generate_cert(user, get_user_password(user))
logging.debug(f'Certificate config finished')
|
import shutil
import subprocess
from pathlib import Path
LOG_PATH = 'logs'
HPC_PLATFORM = 'torque'
HPC_FILE_NAME = None
TEMPLATE_BASE = None
TEMPLATE_HPC_CODE = None
PYTHON_CLEANUP_CODE_TEMPLATE = None
queue_parameters_default = {
'queue_name': 'compute',
'job_name': 'job',
'env': 'base',
'n_nodes': 1,
'ppn': 1,
'walltime': '10:00:00',
'requested_mem': '4GB',
'max_mem': '8GB'
}
def config(hpc_platform='torque'):
global HPC_PLATFORM, HPC_FILE_NAME, TEMPLATE_BASE, TEMPLATE_HPC_CODE, PYTHON_CLEANUP_CODE_TEMPLATE
HPC_PLATFORM = hpc_platform
if HPC_PLATFORM == 'torque':
HPC_FILE_NAME = 'hpc_file.qsub'
TEMPLATE_BASE = """
#!/bin/sh
# embedded options to qsub - start with #PBS
# -- Name of the job ---
#PBS -N {{job_name}}
# -- specify queue --
#PBS -q {{queue_name}}
# -- estimated wall clock time (execution time): hh:mm:ss --
#PBS -l walltime={{walltime}}
# -- number of processors/cores/nodes --
#PBS -l nodes={{n_nodes}}:ppn={{ppn}}
# -- user email address --
# please uncomment the following line and put in your e-mail address,
# if you want to receive e-mail notifications on a non-default address
##PBS -M your_email_address
# -- mail notification --
##PBS -m abe
#PBS -o {{hpc_output_path}}/{{LOG_PATH}}/{{job_id_format}}.output
#PBS -e {{hpc_output_path}}/{{LOG_PATH}}/{{job_id_format}}.error
# -- run in the current working (submission) directory --
if test X$PBS_ENVIRONMENT = XPBS_BATCH; then cd $PBS_O_WORKDIR; fi
# here follow the commands you want to execute
echo '**** Activating conda environment ****: env_name = '{{env}}
source activate {{env}}
"""
elif HPC_PLATFORM == 'lsf':
HPC_FILE_NAME = 'hpc_file.bsub'
TEMPLATE_BASE = """
#!/bin/sh
# embedded options to bsub
# -- Name of the job ---
#BSUB -J {{job_name}}
# -- specify queue --
#BSUB -q {{queue_name}}
# -- estimated wall clock time (execution time): hh:mm --
#BSUB -W {{walltime}}
# -- number of processors/cores/nodes --
#BSUB -n {{ppn}}
#BSUB -R "span[ptile={{n_nodes}}]"
#BSUB -R "rusage[mem={{requested_mem}}]"
#BSUB -M {{max_mem}}
# -- user email address --
# please uncomment the following line and put in your e-mail address,
# if you want to receive e-mail notifications on a non-default address
##BSUB -u your_email_address
# -- mail notification --
##BSUB -BN
#BSUB -o {{hpc_output_path}}/{{LOG_PATH}}/{job_id_format}.output
#BSUB -e {{hpc_output_path}}/{{LOG_PATH}}/{job_id_format}.error
# here follow the commands you want to execute
echo '**** Activating conda environment ****: env_name = '{{env}}
source activate {{env}}
"""
if HPC_PLATFORM == 'torque':
job_id_format = '$PBS_JOBID'
elif HPC_PLATFORM == 'lsf':
job_id_format = '%J'
TEMPLATE_HPC_CODE = TEMPLATE_BASE.format(job_id_format=job_id_format)+"""
echo '**** Running script ****'
{script_call}
echo '**** Script completed ****'
"""
PYTHON_CLEANUP_CODE_TEMPLATE = """
# SCRIPT SUFFIX AUTOMATICALLY ADDED
#import os
#print('removing script file')
#os.remove('{script}')
"""
def prepare_output_location(output_path):
hpc_output_path = Path(output_path) / 'hpc_files'
hpc_output_path.mkdir(parents=True, exist_ok=True)
hpc_output_path.joinpath(LOG_PATH).mkdir(parents=True, exist_ok=True)
return hpc_output_path
def prepare_script_from_code(code, code_cleanup_template, script_file):
script_file = str(script_file)
script_text = code + \
code_cleanup_template.format(
script=script_file)
open(script_file, 'w').write(script_text)
def prepare_hpc_file(hpc_output_path, script_call, queue_parameters):
queue_parameters = {**queue_parameters_default, **queue_parameters}
if HPC_PLATFORM == 'lsf':
# lsf takes total number of cores
queue_parameters['ppn'] = \
queue_parameters['ppn'] * \
queue_parameters['n_nodes']
with open(str(hpc_output_path / HPC_FILE_NAME), 'w') as f:
f.write(TEMPLATE_HPC_CODE.format(script_call=script_call,
hpc_output_path=hpc_output_path,
LOG_PATH=LOG_PATH,
**queue_parameters
))
def submit_job(hpc_output_path):
if HPC_PLATFORM == 'torque':
subprocess.call('qsub < ' + str(hpc_output_path /
HPC_FILE_NAME), shell=True)
elif HPC_PLATFORM == 'lsf':
subprocess.call('bsub < ' + str(hpc_output_path /
HPC_FILE_NAME), shell=True)
def submit_bash_code(code, output_path, **queue_parameters):
hpc_output_path = prepare_output_location(output_path)
script_call = code
prepare_hpc_file(hpc_output_path, script_call, queue_parameters)
submit_job(hpc_output_path)
def submit_python_code(code, output_path, script_arguments='', **queue_parameters):
hpc_output_path = prepare_output_location(output_path)
script_file_name = hpc_output_path / 'job_script.py'
prepare_script_from_code(
code, PYTHON_CLEANUP_CODE_TEMPLATE, script_file_name)
submit_python_script(script_file_name, output_path, script_arguments=script_arguments,
**queue_parameters)
def submit_python_script(script_file_path, output_path, script_arguments='', **queue_parameters):
hpc_output_path = prepare_output_location(output_path)
job_script_name = hpc_output_path / 'job_script.py'
if not job_script_name.exists():
shutil.copy(script_file_path, job_script_name)
script_call = "python " + str(job_script_name) + ' ' + script_arguments
prepare_hpc_file(hpc_output_path, script_call, queue_parameters)
submit_job(hpc_output_path)
|
# -*- coding: utf-8 -*-
# ======================================================================================================================
# Copyright (©) 2015-2022 LCS - Laboratoire Catalyse et Spectrochimie, Caen, France. =
# CeCILL-B FREE SOFTWARE LICENSE AGREEMENT - See full LICENSE agreement in the root directory =
# ======================================================================================================================
"""Define plots."""
# import plotly.graph_objects as go
# import numpy as np
#
# from spectrochempy.utils import colorscale
# from spectrochempy.core import preferences
#
# # from matplotlib.ticker import MaxNLocator
#
# __all__ = ['plotly', 'plotly_stack']
# __dataset_methods__ = __all__
#
#
# # ======================================================================================================================
# # nddataset plot2D functions
# # ======================================================================================================================
#
# # contour map (default) -------------------------------------------------------
#
# def plotly_map(dataset, **kwargs):
# """
# Plot a 2D dataset as a contoured map.
#
# Alias of plot_2D (with `method` argument set to ``map``.
# """
# kwargs['method'] = 'map'
# return plotly(dataset, **kwargs)
#
#
# # stack plot -----------------------------------------------------------------
#
# def plotly_stack(dataset, **kwargs):
# """
# Plot a 2D dataset as a stacked plot.
#
# Alias of plot_2D (with `method` argument set to ``stack``).
# """
# kwargs['method'] = 'stack'
# return plotly(dataset, **kwargs)
#
#
# # image plot --------------------------------------------------------
#
# def plotly_image(dataset, **kwargs):
# """
# Plot a 2D dataset as an image plot.
#
# Alias of plot_2D (with `method` argument set to ``image``).
# """
# kwargs['method'] = 'image'
# return plotly(dataset, **kwargs)
#
#
# # surface plot -----------------------------------------------------------------
#
# def plotly_surface(dataset, **kwargs):
# """
# Plot a 2D dataset as a a 3D-surface.
#
# Alias of plot_2D (with `method` argument set to ``surface``.
# """
# kwargs['method'] = 'surface'
# return plotly(dataset, **kwargs)
#
#
# # generic plot (default stack plot) -------------------------------------------
#
# def plotly(dataset, **kwargs):
# """
# Generate a Plotly plot
#
# Parameters
# ----------
# dataset; |NDDataset|
# The dataset to plot
# kwargs: any
# Additional keyword arguments
#
# Returns
# -------
# figure
# A plotly figure
# """
#
# # TODO: not finished (replace preferences)
# # get all plot preferences
# # ------------------------------------------------------------------------------------------------------------------
#
# prefs = dataset.preferences
#
# # method of plot
# # ------------------------------------------------------------------------------------------------------------------
# method = kwargs.get('method', None)
#
# if not prefs.style:
# # not yet set, initialize with default project preferences
# prefs.update(dataset_preferences.to_dict())
#
# # surface specific setting
# if method not in ['surface']:
# prefs['colorbar'] = False
#
# if method is None:
# method = prefs.method_2D
#
# selector = kwargs.get('selector', '[Processed]')
# data_transposed = True if 'Transposed' in selector else False
#
# name = dataset.name
# if data_transposed:
# new = dataset.copy().T # transpose dataset
# nameadd = ' (Transposed)'
# else:
# new = dataset
# nameadd = ''
#
# # new = new.squeeze()
#
# # ------------------------------------------------------------------------------------------------------------------
# # coordinates
# # ------------------------------------------------------------------------------------------------------------------
#
# # the actual dimension name for x is the last in the new.dims list
# dimx = new.dims[-1]
# x = getattr(new, dimx)
#
# # the actual dimension name for y is the one before last in the new.dims list
# dimy = new.dims[-2]
# y = getattr(new, dimy)
#
# # ------------------------------------------------------------------------------------------------------------------
# # Should we display only ROI region?
# # ------------------------------------------------------------------------------------------------------------------
#
# if 'Processed' in selector:
# # in this case we make the selection
# new = new[y.roi[0]:y.roi[1], x.roi[0]:x.roi[1]]
# x = getattr(new, dimx)
# y = getattr(new, dimy)
#
# xsize = new.shape[-1]
# ysize = new.shape[-2]
#
# # figure setup
# # ------------------------------------------------------------------------------------------------------------------
# fig = new._fig
#
# # should we use the previous figure?
# clear = kwargs.get('clear', True)
#
# dragmode = kwargs.get('dragmode', 'zoom')
#
# if fig is None or not isinstance(fig, go.Figure) or clear:
#
# fig = go.Figure()
#
# # set the layout
# layout = dict(
# title=name + nameadd,
# paper_bgcolor='rgba(0,0,0,0)', # transparent
# autosize=True,
# hovermode='closest',
# showlegend=False,
# clickmode='event+select',
# dragmode=dragmode,
# selectdirection='h',
# margin=dict(t=43, r=50),
# )
#
# fig.update_layout(layout)
#
# if dragmode == 'select':
# fig.update_layout(
# paper_bgcolor='lightsalmon',
# annotations=[
# dict(
# x=2,
# y=5,
# xref="x",
# yref="y",
# text="Mask selection mode ACTIVE",
#
# ax=0,
# ay=-40
# )
# ]
# )
# # Other properties
# # ------------------------------------------------------------------------------------------------------------------
#
# # colorbar = kwargs.get('colorbar', prefs.colorbar)
#
# cmap = kwargs.get('cmap', 'viridis')
#
# # viridis is the default setting,
# # so we assume that it must be overwritten here
# # except if style is grayscale which is a particular case.
# style = kwargs.get('style', prefs.style)
#
# if style and "grayscale" not in style and cmap == "viridis":
#
# if method in ['map', 'image']:
# cmap = kwargs.get('colormap',
# kwargs.get('cmap', prefs.colormap))
# elif data_transposed:
# cmap = kwargs.get('colormap',
# kwargs.get('cmap', prefs.colormap_transposed))
# elif method in ['surface']:
# cmap = kwargs.get('colormap',
# kwargs.get('cmap', prefs.colormap_surface))
# else:
# cmap = kwargs.get('colormap',
# kwargs.get('cmap', prefs.colormap_stack))
#
# # lw = kwargs.get('linewidth', kwargs.get('lw',
# # prefs.pen_linewidth))
#
# # alpha = kwargs.get('calpha', prefs.contour_alpha)
#
# # antialiased = kwargs.get('antialiased', prefs.antialiased)
#
# # rcount = kwargs.get('rcount', prefs.rcount)
#
# # ccount = kwargs.get('ccount', prefs.ccount)
#
# number_x_labels = prefs.number_of_x_labels
# # number_y_labels = prefs.number_of_y_labels
#
# # ax.xaxis.set_major_locator(MaxNLocator(nbins=number_x_labels))
# # ax.yaxis.set_major_locator(MaxNLocator(nbins=number_y_labels))
# # x_locator = MaxNLocator(nbins=number_x_labels)
# # y_locator = MaxNLocator(nbins=number_y_labels)
#
# # if method not in ['surface']:
# # ax.xaxis.set_ticks_position('bottom')
# # ax.yaxis.set_ticks_position('left')
#
# # the next lines are to avoid multipliers in axis scale
# # formatter = ScalarFormatter(useOffset=False)
# # ax.xaxis.set_major_formatter(formatter)
# # ax.yaxis.set_major_formatter(formatter)
#
# # ------------------------------------------------------------------------------------------------------------------
# # Set axis
# # ------------------------------------------------------------------------------------------------------------------
#
# # set the abscissa axis
# # ------------------------------------------------------------------------------------------------------------------
#
# # discrete_data = False
# if x is not None and (not x.is_empty or x.is_labeled):
# xdata = x.data
# if not np.any(xdata):
# if x.is_labeled:
# # discrete_data = True
# # take into account the fact that sometimes axis have just labels
# xdata = range(1, len(x.labels) + 1)
# # TODO it would be more convenient if the data attribute returned the correct values
# else:
# xdata = range(xsize)
#
# xl = [xdata[0], xdata[-1]]
# xl.sort()
#
# if xsize < number_x_labels + 1:
# # extend the axis so that the labels are not too close to the limits
# inc = abs(xdata[1] - xdata[0]) * .5
# xl = [xl[0] - inc, xl[1] + inc]
#
# xlim = list(kwargs.get('xlim', xl)) # zoom?
# xlim.sort()
# xlim[-1] = min(xlim[-1], xl[-1])
# xlim[0] = max(xlim[0], xl[0])
#
# if kwargs.get('x_reverse', kwargs.get('reverse', x.reversed if x else False)):
# xlim.reverse()
#
# # xscale = kwargs.get("xscale", "linear")
#
# fig.update_layout(
# dict(
# xaxis=_make_axis('x', range=xlim, label=f'{x.alt_title} / {x.units:~P}', **kwargs),
# )
# )
#
# # set the ordinates axis
# # ------------------------------------------------------------------------------------------------------------------
# # the actual dimension name is the second in the new.dims list
# dimy = new.dims[-2]
# y = getattr(new, dimy)
# ysize = new.shape[-2]
# if y is not None and (not y.is_empty or y.is_labeled):
# ydata = y.data
# if not np.any(ydata):
# if y.is_labeled:
# ydata = range(1, len(y.labels) + 1)
# else:
# ydata = range(ysize)
#
# yl = [ydata[0], ydata[-1]]
# yl.sort()
#
# # if ysize < number_y_labels + 1:
# # # extend the axis so that the labels are not too close to the limits
# # inc = abs(ydata[1] - ydata[0]) * .5
# # yl = [yl[0] - inc, yl[1] + inc]
#
# ylim = list(kwargs.get("ylim", yl))
# ylim.sort()
# ylim[-1] = min(ylim[-1], yl[-1])
# ylim[0] = max(ylim[0], yl[0])
#
# # yscale = kwargs.get("yscale", "linear")
#
# fig.update_layout(
# dict(
# yaxis=_make_axis('y', label=f'{new.title} / {new.units:~P}', **kwargs),
# )
# )
#
# zoomreset = kwargs.get('zoomreset', None)
# uirevision = f'{new} {zoomreset}'
# fig.update_layout(
# dict(
# uirevision=uirevision,
# )
# )
#
# # Data
#
# # colorscale
# amp = 0
# mi = np.ma.min(y.data)
# ma = np.ma.max(y.data)
#
# if kwargs.get('reduce_range', True):
# amp = (ma - mi) / 20.
#
# if kwargs.get('extrema', True):
# vmin, vmax = mi - amp, ma + amp
# else:
# vmin, vmax = -ma - amp, ma + amp
#
# # Here I am using matplotlib to find the color map
# # because I did not find the way to do this easily using plotly)
# colorscale.normalize(vmin, vmax, cmap=cmap, rev=False)
#
# # DATA
#
# data = []
#
# optimisation = kwargs.get('optimisation', 0)
# offset = getattr(new, dimx).offset
#
# for trace in _compress(new[::-1], optimisation):
# name = f'{getattr(trace, dimy).values:~P}' # - dataset.y.offset_value:~P}'
# color = colorscale.rgba(getattr(trace, dimy).data) # , offset=yoffset)
# trace = trace.squeeze()
# x = getattr(trace, dimx).data
# if trace.mask is not None and np.any(trace.mask):
# z = trace.data
# z[trace.mask] = np.NaN
# else:
# z = trace.data
# y_string = f'{getattr(new, dimy).alt_title}:' + ' %{fullData.name} <br>' if getattr(new, dimy).size > 1 else ''
# data.append(dict(x=x,
# xaxis='x',
# y=z,
# yaxis='y',
# name=name,
# hoverlabel=dict(
# font_size=12,
# font_family="Rockwell"
# ),
# hovertemplate=f'{getattr(trace, dimx).alt_title}:'
# ' %{x:.2f} '
# f'{getattr(trace, dimx).units:~P}'
# f'<br>'
# f'{y_string}'
# f'{trace.alt_title}:'
# ' %{y:.2f} '
# f'{trace.units:~P}'
# '<extra></extra>',
# mode='lines',
# type='scattergl',
# connectgaps=False,
# line=dict(
# color=color,
# dash='solid',
# width=1.5),
# )
# )
# fig.add_traces(data)
#
# # show out of X ROI zone
# fullrange = getattr(new, dimx).limits
# roirange = getattr(new, dimx).roi
#
# x0, x1 = fullrange[0], roirange[0] - offset
# x2, x3 = fullrange[1], roirange[1] - offset
# fig.update_layout(
# shapes=[
# dict(
# type="rect",
# # x-reference is assigned to the x-values
# xref="x",
# # y-reference is assigned to the plot paper [0,1]
# yref="paper",
# x0=x0,
# y0=0,
# x1=x1,
# y1=1,
# fillcolor="LightSalmon",
# opacity=0.2,
# layer="below",
# line_width=0,
# ),
# dict(
# type="rect",
# # x-reference is assigned to the x-values
# xref="x",
# # y-reference is assigned to the plot paper [0,1]
# yref="paper",
# x0=x2,
# y0=0,
# x1=x3,
# y1=1,
# fillcolor="LightSalmon",
# opacity=0.2,
# layer="below",
# line_width=0,
# ),
# ]
# )
#
# return fig
#
#
# def _make_axis(axis,
# range=None,
# label=None,
# **kwargs):
# fontsize = kwargs.get('fontsize', 18)
#
# return dict(
# anchor='y' if axis == 'x' else 'x',
# domain=[0.0, 1.0],
# nticks=7,
# range=range,
# showgrid=True,
# side='bottom' if axis == 'x' else 'left',
# tickfont={
# # 'family': 'Times',
# 'size': fontsize
# },
# ticks='outside',
# title={
# 'font': {
# # 'family': 'Times',
# 'color': '#000000',
# 'size': fontsize,
# },
# 'text': label,
# },
# type='linear',
# zeroline=False,
# linecolor='black',
# linewidth=1,
# mirror=True,
# )
#
#
# def point_line_distance(x0, y0, x1, y1, x2, y2):
# return np.abs((x2 - x1) * (y1 - y0) - (x1 - x0) * (y2 - y1)) / np.sqrt(
# (x2 - x1) * (x2 - x1) + (y2 - y1) * (y2 - y1))
#
#
# def calc_distances(p, start=None, end=None):
# """
# Parameters
# ----------
# p : |NDDataset|
# """
# dimx = p.dims[-1]
# x = getattr(p, dimx).data
# z = p.data
# # ny = z.shape[0]
# if start is None:
# start = 0
# if end is None:
# end = x.size - 1
# distances = np.zeros(z.shape)
# for i in range(start + 1, end):
# distances[i] = point_line_distance(x[i], z[i], x[start], z[start], x[end], z[end])
# return distances
#
#
# def douglas_peucker(tolerance, p, mask, start, end, it):
# distances = calc_distances(p, start, end)
# more = distances > tolerance
# sum = np.count_nonzero(more)
# if sum > 0:
# maxindex = np.argmax(distances)
# mask[maxindex] = False
# m1 = douglas_peucker(tolerance, p, mask, start, maxindex, it + 1)
# m2 = douglas_peucker(tolerance, p, mask, maxindex, end, it + 1)
# mask = np.logical_and(mask, m1)
# mask = np.logical_and(mask, m2)
# else:
# for i in range(start + 1, end):
# if distances[i] <= tolerance:
# mask[i] = True
# return mask
#
#
# def simplify_douglas_peucker(tolerance, ds):
# # start by masking all the row of data except extremity
# new = ds.copy()
# new.mask = np.ones_like(ds, dtype='bool')
# new.mask[0] = False
# new.mask[-1] = False
# mask = douglas_peucker(tolerance, new, new.mask.copy(), 0, new.size - 1, 0)
# new.mask[:] = mask
# return new
#
#
# def _compress(ds, optimisation=None):
# """
# reduce the number of spectra to display
# Parameters
# ----------
# ds: |NDDataset|
# The dataset to simplify
#
# Returns
# -------
# a list a (x,y) traces
# """
# sizey, sizex = ds.shape
#
# # # find the closeness of each trace
# # # we always keep the first and the last
# if (optimisation == 0 and sizey < 100) or sizey < 5:
# return ds
#
# COUNT = {
# 0: 1000,
# 1: 500,
# 2: 150,
# 3: 30,
# 4: 5
# }[optimisation]
# dd = np.sum(ds, axis=1)
# new = dd.copy()
# if new.size > 250:
# COUNT = COUNT / 4
# TOLERANCE = np.max(calc_distances(dd)) / COUNT
# new = simplify_douglas_peucker(TOLERANCE, dd)
#
# print(np.count_nonzero(~new.mask), COUNT)
# keep = []
# for i, b in enumerate(new.mask):
# if not b:
# keep.append(ds[i])
# return keep
#
#
# if __name__ == '__main__':
# pass
|
# coding: utf-8
from frontend.root import *
if __name__ == '__main__':
# Inicia en la pantalla de inicio de sesión.
thread1 = Root(icono="frontend/recursos/icon.ico")
thread1.mainloop()
|
__all__=['AsymSphere', 'MultiPeaks', 'MultiSphereAtInterface', 'Parratt', 'Parratt_Biphasic', 'Parratt_New', 'SphereAtInterface', 'SymSphere', 'trialSphere'] |
# Functions for plotting the data
from .configuration_constants import variables_x, variables_y, convective_zone_types, overshoot_directions
from shared.plot import set_axes_limits, layout_plot, invert_axes, set_axes_labels as shared_set_axes_labels
import matplotlib.lines as mlines
def plot(arguments, models, plt):
models = filter_models(arguments=arguments, models=models)
legend={}
for plot_index, y_variable in enumerate(arguments.y):
plot_variable(arguments=arguments,
plt=plt, models=models, x_variable=arguments.x, y_variable=y_variable,
legend=legend, plot_index=plot_index)
show_legend(plt,arguments,legend)
set_axes_labels(plt, arguments)
set_axes_limits(plt, arguments)
invert_axes(plt, arguments)
layout_plot(plt)
def set_axes_labels(plt, arguments):
xlabel = variables_x[arguments.x]['description']
descriptions = list(map(lambda name: variables_y[name]['description'], arguments.y))
ylabel = '\n'.join(descriptions)
shared_set_axes_labels(plt=plt, arguments=arguments, xlabel=xlabel, ylabel=ylabel)
def filter_models(arguments, models):
if arguments.zone_type != None and arguments.zone_type != 'all':
models = list(filter(lambda x: x['type'] == arguments.zone_type, models))
if len(arguments.zone_numbers) != 0:
models = list(filter(lambda x: x['zone_number'] in arguments.zone_numbers, models))
if arguments.overshoot_direction != None and arguments.overshoot_direction != 'both':
models = list(filter(lambda x: x['overshoot_direction'] == arguments.overshoot_direction, models))
return models
def plot_variable(arguments,plt, models, x_variable, y_variable, legend, plot_index):
for zone_number in range(1, 10):
models_by_zone_number = list(filter(lambda x: x['zone_number'] == zone_number, models))
if len(models_by_zone_number) == 0: continue;
plot_zone(arguments=arguments,plt=plt, models=models_by_zone_number,
x_variable=x_variable, y_variable=y_variable, legend=legend, plot_index=plot_index)
def plot_zone(arguments,plt, models, x_variable, y_variable, legend, plot_index):
for zone_name, zone_settins in convective_zone_types.items():
models_by_type = list(filter(lambda x: x['type'] == zone_name, models))
if len(models_by_type) == 0: continue;
for direction in list(overshoot_directions.keys())[1:]:
models_by_direction = list(filter(lambda x: x['overshoot_direction'] == direction, models_by_type))
if len(models_by_direction) == 0: continue;
color = 'b'
linestyle = '-'
label = None
if ('shared_among_zones' in variables_y[y_variable]
and variables_y[y_variable]['shared_among_zones']):
scatter_variable(plt=plt, models=models_by_direction,
x_variable=x_variable, y_variable=y_variable)
return
if direction in zone_settins['direction']:
direction_data = zone_settins['direction'][direction]
colors = direction_data['color']
if plot_index >= len(colors):
color = colors[-1]
else:
color = colors[plot_index]
linestyle = direction_data['linestyle']
label = zone_settins['label']
label = f'{label} {direction}'
if plot_index > 0:
label = f'{label} #{plot_index+1}'
plot_variable_with_style(plt=plt, models=models_by_direction,
x_variable=x_variable, y_variable=y_variable,
color=color, label=label, linestyle=linestyle, legend=legend)
def plot_variable_with_style(plt, models, x_variable, y_variable, color, label, linestyle, legend):
if len(models) == 0: return
legend[label] = {
'color': color,
'linestyle': linestyle
}
variable_y = list(map(lambda x: x[y_variable], models))
variable_x = list(map(lambda x: x[x_variable], models))
plt.plot(variable_x, variable_y, color=color, linestyle=linestyle)
def scatter_variable(plt, models, x_variable, y_variable):
if len(models) == 0: return
variable_y = list(map(lambda x: x[y_variable], models))
variable_x = list(map(lambda x: x[x_variable], models))
plt.scatter(variable_x, variable_y)
def show_legend(plt, arguments, legend):
if arguments.nolegend: return
items = []
for label, settings in legend.items():
if label == None: continue
items.append(mlines.Line2D([], [], color=settings['color'], linestyle=settings['linestyle'], label=label))
if len(items) > 0: plt.legend(handles=items)
|
import matplotlib.pyplot as plt
import numpy as np
def plot_density_cut(rho, rmax=0, plane=2, height=0, *args, **kwargs):
"""Take a quick look at the loaded data in a particular plane
Parameters
----------
rmin,rmax: (3) list; upper and lower cutoffs
plane = {0: yz-plane, 1: xz-plane, 2: xy-plane}
"""
grid = np.array(rho.shape)
RHO = _plane_cut(rho, plane, height, grid, rmax=rmax)
fig = plt.figure()
if 'cmap' in kwargs:
plt.imshow(RHO, *args, origin='lower', **kwargs)
else:
plt.imshow(RHO, *args, cmap=plt.cm.jet, origin='lower', **kwargs)
plt.colorbar()
# plt.show()
return fig
def _plane_cut(data, plane, height, grid, rmax=0, return_mesh=False):
"""return_mesh = False : returns a two dimensional cut through 3d data
True : instead of data, 2d mesh is returned
Parameters:
----------
data
plane = {0: yz-plane, 1: xz-plane, 2:xy-plane}
unitcell = 3x3 array size of the unitcell
grid = 3x1 array size of grid
rmax = lets you choose the max grid cutoff
rmax = 0 means the entire grid is used
return_mesh = boolean; decides wether mesh or cut through data is returned
"""
if rmax == 0:
mid_grid = (grid / 2).astype(int)
rmax = mid_grid
rmin = [0, 0, 0]
# resolve the periodic boundary conditions
x_pbc = list(range(-rmax[0], -rmin[0])) + list(range(rmin[0], rmax[0]))
y_pbc = list(range(-rmax[1], -rmin[1])) + list(range(rmin[1], rmax[1]))
z_pbc = list(range(-rmax[2], -rmin[2])) + list(range(rmin[2], rmax[2]))
height = (int)(np.round(height))
pbc_grids = [x_pbc, y_pbc, z_pbc]
pbc_grids.pop(plane)
A, B = np.meshgrid(*pbc_grids)
indeces = [A, B]
indeces.insert(plane, height)
if not return_mesh:
return data[indeces[0], indeces[1], indeces[2]]
else:
return A, B
|
import numpy as np
from .FillGaps import FillGaps
from .InterpGaps import InterpGaps
from .InsertGaps import InsertGaps
def ResampleTimeSeries(xi,yi,xr,MaxGap,UseSpline=True,AddGaps=False):
x = np.copy(xi)
y = np.copy(yi)
if AddGaps:
x,y = InsertGaps(x,y,MaxGap)
if MaxGap == None:
yn = np.copy(y)
else:
yn = FillGaps(x,y,MaxGap,UseSpline)
yr = InterpGaps(x,yn,xr,UseSpline)
return yr
|
"""
tests.test_extension
====================
Tests for extension
"""
import json
from flask import Flask
from flask_swag import Swag
def test_extension():
"""Basic test for flask extension."""
app = Flask(__name__)
app.config['SWAG_TITLE'] = "Test application."
app.config['SWAG_API_VERSION'] = '1.0.1'
swag = Swag(app)
with app.test_request_context('/swagger/swagger.json'):
swagger_json = app.generate_swagger()
client = app.test_client()
response = client.get('/swagger/swagger.json')
assert 200 == response.status_code
assert swagger_json == json.loads(response.data.decode('utf-8'))
|
from timeit import default_timer as timer
from termcolor import colored
from Problem.Problem import Problem
from Problem.Problem import ProblemType
from Problem.ProblemReport import ProblemReport
class ProunciationProblem(Problem):
def __init__(self, question, correct_answer):
super(ProunciationProblem, self).__init__(question, correct_answer, ProblemType.TO_PROUNCIATION)
def run(self):
start = timer()
prounciation = input(self.PROMPT_TEMPLATE_MAP[ProblemType.TO_PROUNCIATION].format(self.question))
is_successful = prounciation == self.correct_answer
if not is_successful:
print(colored(self.correct_answer, 'red', attrs=['reverse', 'blink']))
return ProblemReport(
problem_type=self.problem_type,
is_successful=is_successful,
question=self.question,
correct_answer=self.correct_answer,
actual_answer=prounciation,
time_elapsed_in_sec=timer() - start
)
|
# coding: utf-8
###
# @file median.py
# @author Sébastien Rouault <[email protected]>
#
# @section LICENSE
#
# Copyright © 2018-2020 École Polytechnique Fédérale de Lausanne (EPFL).
# All rights reserved.
#
# @section DESCRIPTION
#
# NaN-resilient, coordinate-wise median GAR.
###
import tools
from . import register
import math
import torch
# Optional 'native' module
try:
import native
except ImportError:
native = None
# ---------------------------------------------------------------------------- #
# NaN-resilient, coordinate-wise median GAR
def aggregate(gradients, **kwargs):
""" NaN-resilient median coordinate-per-coordinate rule.
Args:
gradients Non-empty list of gradients to aggregate
... Ignored keyword-arguments
Returns:
NaN-resilient, coordinate-wise median of the gradients
"""
return torch.stack(gradients).median(dim=0)[0]
def aggregate_native(gradients, **kwargs):
""" NaN-resilient median coordinate-per-coordinate rule.
Args:
gradients Non-empty list of gradients to aggregate
... Ignored keyword-arguments
Returns:
NaN-resilient, coordinate-wise median of the gradients
"""
return native.median.aggregate(gradients)
def check(gradients, **kwargs):
""" Check parameter validity for the median rule.
Args:
gradients Non-empty list of gradients to aggregate
... Ignored keyword-arguments
Returns:
None if valid, otherwise error message string
"""
if not isinstance(gradients, list) or len(gradients) < 1:
return "Expected a list of at least one gradient to aggregate, got %r" % gradients
def upper_bound(n, f, d):
""" Compute the theoretical upper bound on the ratio non-Byzantine standard deviation / norm to use this rule.
Args:
n Number of workers (Byzantine + non-Byzantine)
f Expected number of Byzantine workers
d Dimension of the gradient space
Returns:
Theoretical upper-bound
"""
return 1 / math.sqrt(n - f)
# ---------------------------------------------------------------------------- #
# GAR registering
# Register aggregation rule (pytorch version)
method_name = "median"
register(method_name, aggregate, check, upper_bound)
# Register aggregation rule (native version, if available)
if native is not None:
native_name = method_name
method_name = "native-" + method_name
if native_name in dir(native):
register(method_name, aggregate_native, check, upper_bound)
else:
tools.warning("GAR %r could not be registered since the associated native module %r is unavailable" % (method_name, native_name))
|
import typing
from anchorpy.error import ProgramError
class SomeError(ProgramError):
def __init__(self) -> None:
super().__init__(6000, "Example error.")
code = 6000
name = "SomeError"
msg = "Example error."
class OtherError(ProgramError):
def __init__(self) -> None:
super().__init__(6001, "Another error.")
code = 6001
name = "OtherError"
msg = "Another error."
CustomError = typing.Union[SomeError, OtherError]
CUSTOM_ERROR_MAP: dict[int, CustomError] = {
6000: SomeError(),
6001: OtherError(),
}
def from_code(code: int) -> typing.Optional[CustomError]:
maybe_err = CUSTOM_ERROR_MAP.get(code)
if maybe_err is None:
return None
return maybe_err
|
from flask import Blueprint
error_blueprint = Blueprint('error_blueprint',__name__)
from app.errors import handlers
# Doing this import so that the error handlers in it are registered with the blueprint |
import types
import numpy as np
SMALL_GRID = np.array([[0.5581934, -0.82923452, 0.02811105],
[0.85314824, -0.15111695, 0.49930127],
[0.33363164, 0.92444637, 0.18463162],
[-0.79055406, -0.61207313, -0.0197677],
[0.54115503, -0.6990933, -0.46735403],
[0.2572988, 0.76735657, 0.587334],
[-0.71900497, 0.13915788, -0.68093094],
[-0.01575003, -0.98767151, -0.15574634],
[0.11254023, 0.38710762, 0.91514064],
[0.65727848, -0.25558372, 0.70898658]])
GRID = np.array([[-0.3889949, 0.25055803, -0.88651207],
[-0.32215506, -0.01322882, -0.94659448],
[0.02367464, 0.33125212, 0.94324522],
[0.14352843, 0.98208741, -0.12208155],
[-0.33731424, -0.02639622, 0.94102197],
[-0.71371608, -0.00913657, 0.70037553],
[0.27543912, -0.56977281, 0.77426884],
[0.9494808, -0.27132451, 0.15769981],
[0.80534178, 0.58419015, -0.10072976],
[-0.71157706, 0.56389324, -0.41914496],
[-0.63249914, 0.26813677, -0.72666878],
[0.03723451, 0.50673582, 0.86129693],
[0.58980619, -0.66773587, 0.45415578],
[0.91747066, -0.04932981, 0.39473302],
[-0.63278129, 0.50029337, -0.59101132],
[0.71905015, 0.63130778, 0.29054667],
[0.41343092, 0.4093163, 0.81334804],
[0.69237513, 0.53825332, 0.48052059],
[-0.66960206, 0.47597747, -0.57015659],
[0.99181207, -0.11223239, -0.06093197],
[-0.97539005, -0.15734202, -0.15445952],
[-0.73199196, -0.13932144, -0.66691627],
[0.09859593, 0.79242886, 0.60194298],
[0.86632913, 0.4181876, -0.27311714],
[0.1962053, -0.62076618, 0.75904732],
[-0.52996612, 0.68788398, 0.49593502],
[-0.57038469, -0.72583872, 0.38447297],
[0.94680602, 0.32009305, 0.03314802],
[0.2062331, -0.91981748, -0.33377194],
[0.00492786, -0.90938187, -0.41593308],
[-0.72349802, 0.606303, -0.33007164],
[-0.64359981, -0.50221586, -0.57754525],
[0.21802296, -0.89754906, 0.38323841],
[0.73938226, -0.37591704, -0.55856983],
[-0.45225152, -0.07171458, -0.88900258],
[0.52498364, -0.47577634, -0.70571174],
[0.90728605, 0.10420356, 0.40739863],
[0.08465876, 0.98327685, -0.16124373],
[-0.51854311, 0.63194738, -0.57598225],
[0.60001613, 0.5703173, 0.56099806],
[-0.25533381, -0.37958125, -0.88922591],
[-0.41425908, 0.37349873, 0.82999284],
[-0.09570411, 0.76619074, -0.63544667],
[-0.56434898, -0.42910009, -0.7052541],
[-0.65264073, -0.75588422, 0.05195301],
[0.00409419, -0.82815987, -0.56047699],
[0.39340692, -0.35219701, 0.84922804],
[-0.40230759, -0.71564088, 0.57096999],
[-0.10278411, 0.48295417, -0.86959226],
[0.54247975, 0.83325265, 0.10679769],
[0.92379565, -0.32112374, -0.20852131],
[0.15224038, 0.21710568, -0.96420329],
[-0.10514164, 0.79083545, -0.60292995],
[-0.21746656, -0.37151197, 0.90260021],
[0.3109654, 0.46004438, -0.8316608],
[-0.23916412, -0.49089814, -0.83774671],
[0.29699089, -0.89150092, -0.34208554],
[0.14917159, -0.21317452, -0.96555914],
[0.22686163, 0.10414401, -0.96834283],
[0.7175495, 0.3904845, -0.57675347],
[-0.13066132, -0.8137806, 0.56629387],
[-0.71179249, -0.32746321, 0.62138498],
[0.2548561, -0.6620188, 0.70482585],
[-0.60030469, 0.75526266, 0.26308287],
[-0.95210526, 0.22242061, 0.2098205],
[0.63696893, -0.7544887, -0.15816883],
[0.80888482, -0.48146657, -0.33748375],
[-0.22148124, 0.84744604, 0.48247411],
[0.03338003, 0.57086839, -0.82036276],
[0.35481394, 0.93054951, 0.09046918],
[-0.57813618, 0.69862557, 0.42152208],
[0.39088467, 0.77462782, 0.49715281],
[0.81270012, 0.58214702, -0.02496695],
[0.30466405, 0.34525589, -0.88768135],
[-0.08086346, 0.76866636, 0.63451803],
[0.79030596, -0.60912802, 0.06617809],
[0.40744375, -0.69386156, -0.59375561],
[-0.93496061, 0.30292708, 0.18461811],
[-0.99092609, -0.04953639, -0.12494651],
[0.61112374, 0.7797983, 0.13580276],
[0.26064656, -0.28859611, 0.92129021],
[-0.5490118, -0.65302497, -0.52167464],
[-0.842748, -0.50960614, -0.17342834],
[0.6244172, 0.55517995, 0.5494346],
[-0.06157987, 0.95344137, 0.29522445],
[0.63583035, 0.57326159, -0.51680839],
[0.56591439, 0.0229997, 0.82414314],
[0.71834931, 0.29183486, -0.63151142],
[0.47572203, -0.35993717, -0.80257946],
[-0.29635979, -0.4446486, 0.84525647],
[0.66083764, 0.74029486, 0.12351978],
[0.45341129, -0.85099596, 0.26499828],
[-0.64599992, -0.30902696, 0.69798742],
[-0.57768232, 0.64783958, -0.49657529],
[-0.64443451, -0.6510948, 0.40097347],
[0.94840987, 0.05513716, -0.31221566],
[-0.64239476, 0.62933839, -0.4373353],
[0.35569241, 0.86516397, -0.35351691],
[-0.47306257, 0.28775386, -0.83271215],
[-0.3091001, 0.52021522, 0.79613645]])
def sample(batch_size, f, grid=None):
if grid is None:
grid = SMALL_GRID.copy()
repr_vectors = np.random.normal(size=(batch_size, 4))
axes, angles = repr_4d_to_axis_angle(repr_vectors)
return f(multiply_from_left(repr_vectors, grid, invert=True)), repr_vectors, axes, angles
def repr_4d_to_axis_angle(vectors):
length = vectors.shape[0]
vectors = vectors / np.reshape(np.linalg.norm(vectors, axis=-1), (length, 1))
angles = np.arccos(vectors[:, 0])
vectors_3d = vectors[:, 1:4]
vectors_3d = vectors_3d / np.reshape(np.linalg.norm(vectors_3d, axis=-1), (length, 1))
return vectors_3d, angles
def spherical_axis_angle_to_repr_4d(phis, thetas, angles):
length = phis.shape[0]
repr_4d = np.zeros(shape=(length, 4))
repr_4d[:, 0] = np.cos(angles)
repr_4d[:, 1] = np.cos(phis) * np.sin(thetas) * np.sin(angles)
repr_4d[:, 2] = np.sin(phis) * np.sin(thetas) * np.sin(angles)
repr_4d[:, 3] = np.cos(thetas) * np.sin(angles)
return repr_4d
def vector_to_rotation(vectors, invert=False):
"""Produces a rotation matrix from a vector in R^4
We use the axis-angle representation."""
# First normalize the vectors
length = vectors.shape[0]
vectors = vectors / np.reshape(np.linalg.norm(vectors, axis=-1), (length, 1))
if invert:
angles = - np.arccos(vectors[:, 0])
else:
angles = np.arccos(vectors[:, 0])
vectors_3d = vectors[:, 1:4]
vectors_3d = vectors_3d / np.reshape(np.linalg.norm(vectors_3d, axis=-1), (length, 1))
return axis_angle(vectors_3d, angles)
def cross_product_matrices(vectors):
"""Return corresponding cross product matrices for an array of vectors
"""
length = vectors.shape[0]
result = np.tensordot(np.cross(vectors, unit_vectors(length, 3, 0)), np.array([1., 0., 0.]), axes=0) \
+ np.tensordot(np.cross(vectors, unit_vectors(length, 3, 1)), np.array([0., 1., 0.]), axes=0) \
+ np.tensordot(np.cross(vectors, unit_vectors(length, 3, 2)), np.array([0., 0., 1.]), axes=0)
return result
def multiply_from_left(repr_vectors, vectors, invert=False):
rot_matrices = np.transpose(vector_to_rotation(repr_vectors, invert), axes=(0, 2, 1))
pre_result = np.tensordot(rot_matrices, vectors, axes=((-2,), (-1,)))
result = np.transpose(pre_result, axes=(0, 2, 1))
return result
def axis_angle(vectors, angles):
"""Return rotation matrices according to axis-angle representation
"""
length = vectors.shape[0]
matrices = cross_product_matrices(vectors)
identities = np.tile(np.identity(3), (length, 1, 1))
rotation_matrices = identities \
+ np.reshape(np.sin(angles), (length, 1, 1)) * matrices \
+ np.reshape(1 - np.cos(angles), (length, 1, 1)) * np.matmul(matrices, matrices)
return rotation_matrices
def unit_vectors(n_rows, n_cols, i):
"""Return n_rows standard unit vectors of n_cols component, st ith component is 1
>>> eps = 0.0001
>>> np.linalg.norm(unit_vectors(2,3,1) - np.array([[0,1,0],[0,1,0]]) ) < eps
True
"""
result = np.zeros((n_rows, n_cols))
result[:, i] = 1
return result
def stereographic_projection(z_values):
z_upper = np.reshape(-np.sign(z_values[:, 0]), (-1, 1)) * z_values
dims = z_values.shape[1]
z_0 = z_upper[:, 0]
stereo_proj = np.copy(z_upper[:, 1:dims]) / (1 - z_0[:, np.newaxis])
return stereo_proj
def example_function(values):
return values[..., 0] * values[..., 0] * values[..., 0] + 0.3 * values[..., 0] + values[..., 2] * values[..., 2] * \
values[..., 2]
def ex_function_deg_2(values):
return values[..., 0] * values[..., 1] + 0.8 * values[..., 2] + 0.5 * values[..., 0] * values[..., 0]
class random_polynomial:
def __init__(self):
self.degree = 3
gamma = [1, 1, 0.8, 0.3]
self._coeff = np.zeros(shape=(self.degree + 1, self.degree + 1, self.degree + 1))
self._coeff[0, 0, 1] = gamma[1] * np.random.normal(1) # z
self._coeff[0, 0, 2] = gamma[2] * np.random.normal(1) # z^2
self._coeff[0, 0, 3] = gamma[3] * np.random.normal(1) # z^3
self._coeff[0, 1, 0] = gamma[1] * np.random.normal(1) # mu_z
self._coeff[0, 1, 1] = gamma[2] * np.random.normal(1) # yz
self._coeff[0, 1, 2] = gamma[3] * np.random.normal(1) # yz^2
self._coeff[0, 2, 0] = gamma[2] * np.random.normal(1) # mu_z^2
self._coeff[0, 2, 1] = gamma[3] * np.random.normal(1) # mu_z^2 z
self._coeff[0, 3, 0] = gamma[3] * np.random.normal(1) # mu_z^3
self._coeff[1, 0, 0] = gamma[1] * np.random.normal(1) # x
self._coeff[1, 0, 1] = gamma[2] * np.random.normal(1) # xz
self._coeff[1, 0, 2] = gamma[3] * np.random.normal(1) # xz^2
self._coeff[1, 1, 0] = gamma[2] * np.random.normal(1) # xy
self._coeff[1, 1, 1] = gamma[3] * np.random.normal(1) # xyz
self._coeff[1, 2, 0] = gamma[3] * np.random.normal(1) # xy^2
self._coeff[2, 0, 0] = gamma[2] * np.random.normal(1) # x^2
self._coeff[2, 0, 1] = gamma[3] * np.random.normal(1) # x^2z
self._coeff[2, 1, 0] = gamma[3] * np.random.normal(1) # x^2y
self._coeff[3, 0, 0] = gamma[3] * np.random.normal(1) # x^3
def __call__(self, values):
x = values[..., 0]
y = values[..., 1]
z = values[..., 2]
return self._coeff[0, 0, 1] * z + \
self._coeff[0, 0, 2] * z * z + \
self._coeff[0, 0, 3] * z * z * z + \
self._coeff[0, 1, 0] * y + \
self._coeff[0, 1, 1] * y * z + \
self._coeff[0, 1, 2] * y * z * z + \
self._coeff[0, 2, 0] * y * y + \
self._coeff[0, 2, 1] * y * y * z + \
self._coeff[0, 3, 0] * y * y * y + \
self._coeff[1, 0, 0] * x + \
self._coeff[1, 0, 1] * x * z + \
self._coeff[1, 0, 2] * x * z * z + \
self._coeff[1, 1, 0] * x * y + \
self._coeff[1, 1, 1] * x * y * z + \
self._coeff[1, 2, 0] * x * y * y + \
self._coeff[2, 0, 0] * x * x + \
self._coeff[2, 0, 1] * x * x * z + \
self._coeff[2, 1, 0] * x * x * y + \
self._coeff[3, 0, 0] * x * x * x
if __name__ == "__main__":
import doctest
doctest.testmod()
|
from Models.Application import Application
import sqlite3
import os.path
class ApplicationRepository:
def __init__(self):
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
db_path = os.path.join(BASE_DIR, "../database.db")
self.connection = sqlite3.connect(db_path)
self.cursor = self.connection.cursor()
def create(self, job_id, applicant_email_id, company_email_id):
application = Application()
application.job_id = job_id
application.applicant_email_id = applicant_email_id
application.company_email_id = company_email_id
application.response = 'Applied'
self.connection.execute(
'''INSERT INTO application(job_id, company_email_id, applicant_email_id, company_email_id) VALUES (?, ?, ?, ?)''',
(job_id, company_email_id, applicant_email_id, company_email_id))
self.connection.commit()
application.application_id = self.get(job_id, applicant_email_id, company_email_id).application_id
return application
def get(self, job_id, applicant_email_id, company_email_id):
application = Application()
self.cursor.execute(
'''SELECT * FROM application WHERE job_id = ? AND applicant_email_id = ? AND company_email_id = ?''',
(job_id, applicant_email_id, company_email_id))
result = self.cursor.fetchone()
if result is None:
return None
else:
application.application_id = result[0]
application.job_id = result[1]
application.company_email_id = result[2]
application.applicant_email_id = result[3]
application.response = result[4]
return application
def get_all(self, job_id):
application_list = list()
self.cursor.execute('''SELECT * FROM application WHERE job_id = ?''', job_id)
result = self.cursor.fetchall()
if result is None:
return None
else:
for row in result:
application = Application()
application.application_id = row[0]
application.job_id = row[1]
application.company_email_id = row[2]
application.applicant_email_id = row[3]
application.response = row[4]
application_list.append(application)
return application_list
def respond(self, job_id, applicant_email_id, company_email_id, response):
application = self.get(job_id, applicant_email_id, company_email_id)
application.response = 'Rejected'
self.connection.execute('''UPDATE application SET response = ? WHERE application_id = ?''',
(response, application.application_id))
self.connection.commit()
return application
def delete(self, job_id, applicant_email_id, company_email_id):
application = self.get(job_id, applicant_email_id, company_email_id)
self.connection.execute('''DELETE FROM application WHERE application_id = ?''', application.application_id)
self.connection.commit()
|
from src.domain.pipeline_steps.question_response_evaluator.or_question_response_evaluator import \
ORQuestionResponseEvaluator
from src.domain.pipeline_steps.question_response_evaluator.wh_question_response_evaluator import \
WHQuestionResponseEvaluator
from src.domain.pipeline_steps.question_response_evaluator.yesno_question_response_evaluator import \
YESNOQuestionProcessor
from src.domain.pipeline_steps.question_response_evaluator.question_type_analyser import QuestionTypeFinder
class BestAnswerExtractor():
def __init__(self):
self.or_question_processor = ORQuestionResponseEvaluator()
self.wh_question_processor = WHQuestionResponseEvaluator()
self.yesno_question_processor = YESNOQuestionProcessor()
self.question_type_finder = QuestionTypeFinder()
def extract_best_answer(self, user_input, question, bert_answers):
if self.question_type_finder.is_wh_question(question):
return self.wh_question_processor.extract_best_answer(user_input, question, bert_answers)
if self.question_type_finder.is_or_question(question):
return self.or_question_processor.extract_best_answer(question, bert_answers)
else:
return self.yesno_question_processor.extract_best_answer(question, bert_answers)
|
import argparse
import itertools
import shlex
import unittest2 as unittest
class FabricioTestCase(unittest.TestCase):
def command_checker(self, args_parsers=(), expected_args_set=(), side_effects=()):
def check_command_args(command, **kwargs):
try:
command_parser = next(args_parsers)
expected_command_args = next(expected_args_set)
side_effect = next(side_effects)
except StopIteration:
self.fail('unexpected command: {0}'.format(command))
args = shlex.split(command)
# removing \x00 necessary for Python 2.6
args = map(lambda arg: arg.replace('\x00', ''), args)
self.assertDictEqual(
expected_command_args,
vars(command_parser.parse_args(args)),
)
if isinstance(side_effect, Exception):
raise side_effect
return side_effect
if isinstance(args_parsers, argparse.ArgumentParser):
args_parsers = itertools.repeat(args_parsers)
else:
args_parsers = iter(args_parsers)
expected_args_set = iter(expected_args_set)
side_effects = iter(side_effects)
return check_command_args
class SucceededResult(str):
succeeded = True
failed = False
class FailedResult(str):
succeeded = False
failed = True
docker_run_args_parser = argparse.ArgumentParser(argument_default=argparse.SUPPRESS)
docker_run_args_parser.add_argument('executable', nargs=1)
docker_run_args_parser.add_argument('run_or_create', nargs=1)
docker_run_args_parser.add_argument('--user')
docker_run_args_parser.add_argument('--publish', action='append')
docker_run_args_parser.add_argument('--env', action='append')
docker_run_args_parser.add_argument('--volume', action='append')
docker_run_args_parser.add_argument('--link', action='append')
docker_run_args_parser.add_argument('--label', action='append')
docker_run_args_parser.add_argument('--add-host', action='append', dest='add-host')
docker_run_args_parser.add_argument('--net')
docker_run_args_parser.add_argument('--network')
docker_run_args_parser.add_argument('--mount')
docker_run_args_parser.add_argument('--restart')
docker_run_args_parser.add_argument('--stop-signal', dest='stop-signal')
docker_run_args_parser.add_argument('--detach', action='store_true')
docker_run_args_parser.add_argument('--tty', action='store_true')
docker_run_args_parser.add_argument('--interactive', action='store_true')
docker_run_args_parser.add_argument('--rm', action='store_true')
docker_run_args_parser.add_argument('--name')
docker_run_args_parser.add_argument('--custom-option', dest='custom-option')
docker_run_args_parser.add_argument('image')
docker_run_args_parser.add_argument('command', nargs=argparse.REMAINDER)
args_parser = argparse.ArgumentParser(argument_default=argparse.SUPPRESS)
args_parser.add_argument('args', nargs=argparse.REMAINDER)
# TODO use args_parser instead
docker_inspect_args_parser = argparse.ArgumentParser(argument_default=argparse.SUPPRESS)
docker_inspect_args_parser.add_argument('executable', nargs=2)
docker_inspect_args_parser.add_argument('--type')
docker_inspect_args_parser.add_argument('image_or_container')
# TODO use args_parser instead
docker_entity_inspect_args_parser = argparse.ArgumentParser(argument_default=argparse.SUPPRESS)
docker_entity_inspect_args_parser.add_argument('executable', nargs=3)
docker_entity_inspect_args_parser.add_argument('--format')
docker_entity_inspect_args_parser.add_argument('service')
docker_service_update_args_parser = argparse.ArgumentParser(argument_default=argparse.SUPPRESS)
docker_service_update_args_parser.add_argument('executable', nargs=3)
docker_service_update_args_parser.add_argument('--env-add', dest='env-add', action='append')
docker_service_update_args_parser.add_argument('--env-rm', dest='env-rm', action='append')
docker_service_update_args_parser.add_argument('--image')
docker_service_update_args_parser.add_argument('--mount-add', dest='mount-add', action='append')
docker_service_update_args_parser.add_argument('--mount-rm', dest='mount-rm', action='append')
docker_service_update_args_parser.add_argument('--name')
docker_service_update_args_parser.add_argument('--publish-add', dest='publish-add', action='append')
docker_service_update_args_parser.add_argument('--publish-rm', dest='publish-rm', action='append')
docker_service_update_args_parser.add_argument('--label-add', dest='label-add', action='append')
docker_service_update_args_parser.add_argument('--label-rm', dest='label-rm', action='append')
docker_service_update_args_parser.add_argument('--constraint-add', dest='constraint-add', action='append')
docker_service_update_args_parser.add_argument('--constraint-rm', dest='constraint-rm', action='append')
docker_service_update_args_parser.add_argument('--container-label-add', dest='container-label-add', action='append')
docker_service_update_args_parser.add_argument('--container-label-rm', dest='container-label-rm', action='append')
docker_service_update_args_parser.add_argument('--network-add', dest='network-add', action='append')
docker_service_update_args_parser.add_argument('--network-rm', dest='network-rm', action='append')
docker_service_update_args_parser.add_argument('--secret-add', dest='secret-add', action='append')
docker_service_update_args_parser.add_argument('--secret-rm', dest='secret-rm', action='append')
docker_service_update_args_parser.add_argument('--replicas')
docker_service_update_args_parser.add_argument('--restart-condition', dest='restart-condition')
docker_service_update_args_parser.add_argument('--user')
docker_service_update_args_parser.add_argument('--stop-grace-period', dest='stop-grace-period')
docker_service_update_args_parser.add_argument('--args')
docker_service_update_args_parser.add_argument('--custom_option')
docker_service_update_args_parser.add_argument('service')
docker_service_create_args_parser = argparse.ArgumentParser(argument_default=argparse.SUPPRESS)
docker_service_create_args_parser.add_argument('executable', nargs=3)
docker_service_create_args_parser.add_argument('--env', action='append')
docker_service_create_args_parser.add_argument('--mount', action='append')
docker_service_create_args_parser.add_argument('--name')
docker_service_create_args_parser.add_argument('--publish', action='append')
docker_service_create_args_parser.add_argument('--label', action='append')
docker_service_create_args_parser.add_argument('--host', action='append')
docker_service_create_args_parser.add_argument('--secret', action='append')
docker_service_create_args_parser.add_argument('--config', action='append')
docker_service_create_args_parser.add_argument('--group', action='append')
docker_service_create_args_parser.add_argument('--dns', action='append')
docker_service_create_args_parser.add_argument('--constraint', action='append')
docker_service_create_args_parser.add_argument('--container-label', dest='container-label', action='append')
docker_service_create_args_parser.add_argument('--placement-pref', dest='placement-pref', action='append')
docker_service_create_args_parser.add_argument('--dns-option', dest='dns-option', action='append')
docker_service_create_args_parser.add_argument('--dns-search', dest='dns-search', action='append')
docker_service_create_args_parser.add_argument('--replicas')
docker_service_create_args_parser.add_argument('--restart-condition', dest='restart-condition')
docker_service_create_args_parser.add_argument('--user')
docker_service_create_args_parser.add_argument('--network')
docker_service_create_args_parser.add_argument('--mode')
docker_service_create_args_parser.add_argument('--stop-grace-period', dest='stop-grace-period')
docker_service_create_args_parser.add_argument('--custom_option')
docker_service_create_args_parser.add_argument('image', nargs=1)
docker_service_create_args_parser.add_argument('args', nargs=argparse.REMAINDER)
docker_build_args_parser = argparse.ArgumentParser(argument_default=argparse.SUPPRESS)
docker_build_args_parser.add_argument('executable', nargs=2)
docker_build_args_parser.add_argument('--tag')
docker_build_args_parser.add_argument('--no-cache', type=int, dest='no-cache')
docker_build_args_parser.add_argument('--pull', nargs='?', const=True, type=int)
docker_build_args_parser.add_argument('--force-rm', nargs='?', const=True, type=int, dest='force-rm')
docker_build_args_parser.add_argument('--custom')
docker_build_args_parser.add_argument('--custom-bool', nargs='?', const=True, type=int, dest='custom-bool')
docker_build_args_parser.add_argument('path')
class Command(object):
def __init__(self, parser, args):
self.parser = parser
self.args = args
def __eq__(self, command):
command_args = vars(self.parser.parse_args(shlex.split(command)))
return self.args == command_args
def __ne__(self, command):
return not self.__eq__(command)
|
from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
def index(requet):
return HttpResponse("<h1>Hello Veronica</h1>")
def working(requet):
return HttpResponse("<h1>Working Veronica</h1>") |
"""Instance operations and instances."""
import copy
import json
import logging
import requests
from objectrocket import bases
from objectrocket import util
from objectrocket.instances.mongodb import MongodbInstance
from objectrocket.instances.elasticsearch import ESInstance
from objectrocket.instances.redis import RedisInstance
logger = logging.getLogger(__name__)
class Instances(bases.BaseOperationsLayer, bases.Extensible):
"""Instance operations.
:param objectrocket.client.Client base_client: An instance of objectrocket.client.Client.
"""
def __init__(self, base_client):
super(Instances, self).__init__(base_client=base_client)
# Register any extensions for this class.
self._register_extensions('objectrocket.instances.Instances')
#####################
# Public interface. #
#####################
@util.token_auto_auth
def all(self):
"""Get all ObjectRocket instances the current client has access to.
:returns: A list of :py:class:`bases.BaseInstance` instances.
:rtype: list
"""
response = requests.get(self._url, **self._default_request_kwargs)
data = self._get_response_data(response)
return self._concrete_instance_list(data)
@util.token_auto_auth
def create(self, name, plan, zone,
service_type='mongodb', instance_type='mongodb_sharded', version='2.4.6'):
"""Create an ObjectRocket instance.
:param str name: The name to give to the new instance.
:param int plan: The plan size of the new instance.
:param str zone: The zone that the new instance is to exist in.
:param str service_type: The type of service that the new instance is to provide.
:param str instance_type: The instance type to create.
:param str version: The version of the service the new instance is to provide.
"""
# Build up request data.
url = self._url
request_data = {
'name': name,
'service': service_type,
'plan': plan,
'type': instance_type,
'version': version,
'zone': zone
}
# Call to create an instance.
response = requests.post(
url,
data=json.dumps(request_data),
**self._default_request_kwargs
)
# Log outcome of instance creation request.
if response.status_code == 200:
logger.info('Successfully created a new instance with: {}'.format(request_data))
else:
logger.info('Failed to create instance with: {}'.format(request_data))
logger.info('Response: [{0}] {1}'.format(response.status_code, response.content))
data = self._get_response_data(response)
return self._concrete_instance(data)
@util.token_auto_auth
def get(self, instance_name):
"""Get an ObjectRocket instance by name.
:param str instance_name: The name of the instance to retrieve.
:returns: A subclass of :py:class:`bases.BaseInstance`, or None if instance does not exist.
:rtype: :py:class:`bases.BaseInstance`
"""
url = self._url + instance_name + '/'
response = requests.get(url, **self._default_request_kwargs)
data = self._get_response_data(response)
return self._concrete_instance(data)
######################
# Private interface. #
######################
def _concrete_instance(self, instance_doc):
"""Concretize an instance document.
:param dict instance_doc: A document describing an instance. Should come from the API.
:returns: A subclass of :py:class:`bases.BaseInstance`, or None.
:rtype: :py:class:`bases.BaseInstance`
"""
if not isinstance(instance_doc, dict):
return None
# Attempt to instantiate the appropriate class for the given instance document.
try:
service = instance_doc['service']
cls = self._service_class_map[service]
return cls(instance_document=instance_doc, instances=self)
# If construction fails, log the exception and return None.
except Exception as ex:
logger.exception(ex)
logger.error(
'Instance construction failed. You probably need to upgrade to a more '
'recent version of the client. Instance document which generated this '
'warning: {}'.format(instance_doc)
)
return None
def _concrete_instance_list(self, instance_docs):
"""Concretize a list of instance documents.
:param list instance_docs: A list of instance documents. Should come from the API.
:returns: A list of :py:class:`bases.BaseInstance`s.
:rtype: list
"""
if not instance_docs:
return []
return list(
filter(None, [self._concrete_instance(instance_doc=doc) for doc in instance_docs])
)
@property
def _default_request_kwargs(self):
"""The default request keyword arguments to be passed to the requests library."""
defaults = copy.deepcopy(super(Instances, self)._default_request_kwargs)
defaults.setdefault('headers', {}).update({
'X-Auth-Token': self._client.auth._token
})
return defaults
@property
def _service_class_map(self):
"""A mapping of service names to service classes."""
service_map = {
'mongodb': MongodbInstance,
'elasticsearch': ESInstance,
'redis': RedisInstance,
}
return service_map
@property
def _url(self):
"""The base URL for instance operations."""
return self._client._url + 'instances/'
|
Subsets and Splits