id
stringlengths 1
265
| text
stringlengths 6
5.19M
| dataset_id
stringclasses 7
values |
---|---|---|
/NeodroidAgent-0.4.8-py36-none-any.whl/neodroidagent/utilities/misc/environment_model/environment_utilities.py | from itertools import product
import gym
import numpy
__all__ = [
"get_gym_environs",
"get_gym_stats",
"is_tuple",
"is_continuous",
"is_multidimensional",
"obs_stats",
"action_stats",
"env_stats",
]
def get_gym_environs():
""" List all valid OpenAI ``gym`` environment ids. """
return [e.id for e in gym.envs.registry.all()]
def get_gym_stats():
""" Return a pandas DataFrame of the environment IDs. """
try:
import pandas as pd
except:
raise ImportError("Cannot import `pandas`; unable to run `get_gym_stats`")
df = []
for e in gym.envs.registry.all():
print(e.id)
df.append(env_stats(gym.make(e.id)))
cols = [
"id",
"continuous_actions",
"continuous_observations",
"action_dim",
# "action_ids",
"deterministic",
"multidim_actions",
"multidim_observations",
"n_actions_per_dim",
"n_obs_per_dim",
"obs_dim",
# "obs_ids",
"seed",
"tuple_actions",
"tuple_observations",
]
return pd.DataFrame(df)[cols]
def is_tuple(env):
"""
Check if the action and observation spaces for `env` are instances of
``gym.spaces.Tuple`` or ``gym.spaces.Dict``.
Notes
-----
A tuple space is a tuple of *several* (possibly multidimensional)
action/observation spaces. For our purposes, a tuple space is necessarily
multidimensional.
Returns
-------
tuple_action : bool
Whether the `env`'s action space is an instance of ``gym.spaces.Tuple``
or ``gym.spaces.Dict``.
tuple_obs : bool
Whether the `env`'s observation space is an instance of
``gym.spaces.Tuple`` or ``gym.spaces.Dict``.
"""
tuple_space, dict_space = gym.spaces.Tuple, gym.spaces.dict.Dict
tuple_action = isinstance(env.action_space, (tuple_space, dict_space))
tuple_obs = isinstance(env.observation_space, (tuple_space, dict_space))
return tuple_action, tuple_obs
def is_multidimensional(env):
"""
Check if the action and observation spaces for `env` are multidimensional
or ``Tuple`` spaces.
Notes
-----
A multidimensional space is any space whose actions / observations have
more than one element in them. This includes ``Tuple`` spaces, but also
includes single action/observation spaces with several dimensions.
Parameters
----------
env : ``gym.wrappers`` or ``gym.envs`` instance
The environment to evaluate.
Returns
-------
md_action : bool
Whether the `env`'s action space is multidimensional.
md_obs : bool
Whether the `env`'s observation space is multidimensional.
tuple_action : bool
Whether the `env`'s action space is a ``Tuple`` instance.
tuple_obs : bool
Whether the `env`'s observation space is a ``Tuple`` instance.
"""
md_action, md_obs = True, True
tuple_action, tuple_obs = is_tuple(env)
if not tuple_action:
act = env.action_space.sample()
md_action = isinstance(act, (list, tuple, numpy.ndarray)) and len(act) > 1
if not tuple_obs:
OS = env.observation_space
obs = OS.low if "low" in dir(OS) else OS.sample() # sample causes problems
md_obs = isinstance(obs, (list, tuple, numpy.ndarray)) and len(obs) > 1
return md_action, md_obs, tuple_action, tuple_obs
def is_continuous(env, tuple_action, tuple_obs):
"""
Check if an `env`'s observation and action spaces are continuous.
Parameters
----------
env : ``gym.wrappers`` or ``gym.envs`` instance
The environment to evaluate.
tuple_action : bool
Whether the `env`'s action space is an instance of `gym.spaces.Tuple`
or `gym.spaces.Dict`.
tuple_obs : bool
Whether the `env`'s observation space is an instance of `gym.spaces.Tuple`
or `gym.spaces.Dict`.
Returns
-------
cont_action : bool
Whether the `env`'s action space is continuous.
cont_obs : bool
Whether the `env`'s observation space is continuous.
"""
Continuous = gym.spaces.box.Box
if tuple_obs:
spaces = env.observation_space.spaces
cont_obs = all([isinstance(s, Continuous) for s in spaces])
else:
cont_obs = isinstance(env.observation_space, Continuous)
if tuple_action:
spaces = env.action_space.spaces
cont_action = all([isinstance(s, Continuous) for s in spaces])
else:
cont_action = isinstance(env.action_space, Continuous)
return cont_action, cont_obs
def action_stats(env, md_action, cont_action):
"""
Get information on `env`'s action space.
Parameters
----------
md_action : bool
Whether the `env`'s action space is multidimensional.
cont_action : bool
Whether the `env`'s action space is continuous.
Returns
-------
n_actions_per_dim : list of length (action_dim,)
The number of possible actions for each dimension of the action space.
action_ids : list or None
A list of all valid actions within the space. If `cont_action` is
True, this value will be None.
action_dim : int or None
The number of dimensions in a single action.
"""
if cont_action:
action_dim = 1
action_ids = None
n_actions_per_dim = [numpy.inf]
if md_action:
action_dim = env.action_space.shape[0]
n_actions_per_dim = [numpy.inf for _ in range(action_dim)]
else:
if md_action:
n_actions_per_dim = [
space.n if hasattr(space, "n") else numpy.inf
for space in env.action_space.spaces
]
action_ids = (
None
if numpy.inf in n_actions_per_dim
else list(product(*[range(i) for i in n_actions_per_dim]))
)
action_dim = len(n_actions_per_dim)
else:
action_dim = 1
n_actions_per_dim = [env.action_space.n]
action_ids = list(range(n_actions_per_dim[0]))
return n_actions_per_dim, action_ids, action_dim
def obs_stats(env, md_obs, cont_obs):
"""
Get information on the observation space for `env`.
Parameters
----------
env : ``gym.wrappers`` or ``gym.envs`` instance
The environment to evaluate.
md_obs : bool
Whether the `env`'s action space is multidimensional.
cont_obs : bool
Whether the `env`'s observation space is multidimensional.
Returns
-------
n_obs_per_dim : list of length (obs_dim,)
The number of possible observation classes for each dimension of the
observation space.
obs_ids : list or None
A list of all valid observations within the space. If `cont_obs` is
True, this value will be None.
obs_dim : int or None
The number of dimensions in a single observation.
"""
if cont_obs:
obs_ids = None
obs_dim = env.observation_space.shape[0]
n_obs_per_dim = [numpy.inf for _ in range(obs_dim)]
else:
if md_obs:
n_obs_per_dim = [
space.n if hasattr(space, "n") else numpy.inf
for space in env.observation_space.spaces
]
obs_ids = (
None
if numpy.inf in n_obs_per_dim
else list(product(*[range(i) for i in n_obs_per_dim]))
)
obs_dim = len(n_obs_per_dim)
else:
obs_dim = 1
n_obs_per_dim = [env.observation_space.n]
obs_ids = list(range(n_obs_per_dim[0]))
return n_obs_per_dim, obs_ids, obs_dim
def env_stats(env):
"""
Compute statistics for the current environment.
Parameters
----------
env : ``gym.wrappers`` or ``gym.envs`` instance
The environment to evaluate.
Returns
-------
env_info : dict
A dictionary containing information about the action and observation
spaces of `env`.
"""
md_action, md_obs, tuple_action, tuple_obs = is_multidimensional(env)
cont_action, cont_obs = is_continuous(env, tuple_action, tuple_obs)
n_actions_per_dim, action_ids, action_dim = action_stats(
env, md_action, cont_action
)
n_obs_per_dim, obs_ids, obs_dim = obs_stats(env, md_obs, cont_obs)
env_info = {
"id": env.spec.id,
"seed": env.spec.seed if "seed" in dir(env.spec) else None,
"deterministic": bool(~env.spec.nondeterministic),
"tuple_actions": tuple_action,
"tuple_observations": tuple_obs,
"multidim_actions": md_action,
"multidim_observations": md_obs,
"continuous_actions": cont_action,
"continuous_observations": cont_obs,
"n_actions_per_dim": n_actions_per_dim,
"action_dim": action_dim,
"n_obs_per_dim": n_obs_per_dim,
"obs_dim": obs_dim,
"action_ids": action_ids,
"obs_ids": obs_ids,
}
return env_info | PypiClean |
/Cfg-Loader-0.2.2.tar.gz/Cfg-Loader-0.2.2/docs/api_reference.rst | .. _api_reference:
***
API
***
.. module:: cfg_loader
This part of the documentation covers all the interfaces of Conf-Loader.
Schema
======
.. py:currentmodule:: cfg_loader.schema.base
.. autoclass:: InterpolatingSchema
:members:
.. autoclass:: ExtraFieldsSchema
:members:
.. autoclass:: UnwrapNestedSchema
:members:
.. autoclass:: ConfigSchema
:members:
Loader
======
.. py:currentmodule:: cfg_loader.loader
.. autoclass:: BaseConfigLoader
:members:
.. autoclass:: YamlConfigLoader
:members:
Interpolator
============
.. py:currentmodule:: cfg_loader.interpolator
.. autoclass:: Interpolator
:members:
.. autoclass:: SubstitutionTemplate
:members:
Fields
======
.. py:currentmodule:: cfg_loader.fields
.. autoclass:: Path
:members:
.. autoclass:: UnwrapNested
:members:
Exceptions
==========
.. py:currentmodule:: cfg_loader.exceptions
.. autoclass:: ConfigLoaderError
:show-inheritance:
.. autoclass:: ConfigFileMissingError
:show-inheritance:
.. autoclass:: ConfigFileNotFoundError
:show-inheritance:
.. autoclass:: LoadingError
:show-inheritance:
.. autoclass:: ValidationError
:show-inheritance:
.. autoclass:: UnsetRequiredSubstitution
:show-inheritance:
.. autoclass:: InvalidSubstitution
:show-inheritance:
| PypiClean |
/Assimulo-3.0.tar.gz/Assimulo-3.0/assimulo/examples/ida_with_jac.py |
# Copyright (C) 2010 Modelon AB
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import numpy as N
import pylab as P
from assimulo.solvers import IDA
from assimulo.problem import Implicit_Problem
import nose
def run_example(with_plots=True):
r"""
Example for demonstrating the use of a user supplied Jacobian
ODE:
.. math::
\dot y_1-y_3 &= 0 \\
\dot y_2-y_4 &= 0 \\
\dot y_3+y_5 y_1 &= 0 \\
\dot y_4+y_5 y_2+9.82&= 0 \\
y_3^2+y_4^2-y_5(y_1^2+y_2^2)-9.82 y_2&= 0
on return:
- :dfn:`imp_mod` problem instance
- :dfn:`imp_sim` solver instance
"""
#Defines the residual
def f(t,y,yd):
res_0 = yd[0]-y[2]
res_1 = yd[1]-y[3]
res_2 = yd[2]+y[4]*y[0]
res_3 = yd[3]+y[4]*y[1]+9.82
res_4 = y[2]**2+y[3]**2-y[4]*(y[0]**2+y[1]**2)-y[1]*9.82
return N.array([res_0,res_1,res_2,res_3,res_4])
#Defines the Jacobian
def jac(c,t,y,yd):
jacobian = N.zeros([len(y),len(y)])
#Derivative
jacobian[0,0] = 1*c
jacobian[1,1] = 1*c
jacobian[2,2] = 1*c
jacobian[3,3] = 1*c
#Differentiated
jacobian[0,2] = -1
jacobian[1,3] = -1
jacobian[2,0] = y[4]
jacobian[3,1] = y[4]
jacobian[4,0] = y[0]*2*y[4]*-1
jacobian[4,1] = y[1]*2*y[4]*-1-9.82
jacobian[4,2] = y[2]*2
jacobian[4,3] = y[3]*2
#Algebraic
jacobian[2,4] = y[0]
jacobian[3,4] = y[1]
jacobian[4,4] = -(y[0]**2+y[1]**2)
return jacobian
#The initial conditons
y0 = [1.0,0.0,0.0,0.0,5] #Initial conditions
yd0 = [0.0,0.0,0.0,-9.82,0.0] #Initial conditions
#Create an Assimulo implicit problem
imp_mod = Implicit_Problem(f,y0,yd0,name = 'Example using an analytic Jacobian')
#Sets the options to the problem
imp_mod.jac = jac #Sets the jacobian
imp_mod.algvar = [1.0,1.0,1.0,1.0,0.0] #Set the algebraic components
#Create an Assimulo implicit solver (IDA)
imp_sim = IDA(imp_mod) #Create a IDA solver
#Sets the paramters
imp_sim.atol = 1e-6 #Default 1e-6
imp_sim.rtol = 1e-6 #Default 1e-6
imp_sim.suppress_alg = True #Suppres the algebraic variables on the error test
#Let Sundials find consistent initial conditions by use of 'IDA_YA_YDP_INIT'
imp_sim.make_consistent('IDA_YA_YDP_INIT')
#Simulate
t, y, yd = imp_sim.simulate(5,1000) #Simulate 5 seconds with 1000 communication points
#Basic tests
nose.tools.assert_almost_equal(y[-1][0],0.9401995, places=4)
nose.tools.assert_almost_equal(y[-1][1],-0.34095124, places=4)
nose.tools.assert_almost_equal(yd[-1][0], -0.88198927, places=4)
nose.tools.assert_almost_equal(yd[-1][1], -2.43227069, places=4)
#Plot
if with_plots:
P.plot(t,y,linestyle="dashed",marker="o") #Plot the solution
P.xlabel('Time')
P.ylabel('State')
P.title(imp_mod.name)
P.show()
return imp_mod, imp_sim
if __name__=='__main__':
mod,sim = run_example() | PypiClean |
/Makima-0.1.9.1-py3-none-any.whl/makima/windows/utils/mouse.py | import ctypes
import ctypes.wintypes
from time import sleep
from makima.windows.utils import common
from makima.windows.call_win_api.i_mouse import IMouse
class WinMouse(IMouse):
_MOUSEEVENTF_MOVE = 0x0001 # mouse move
_MOUSEEVENTF_LEFTDOWN = 0x0002 # left button down
_MOUSEEVENTF_LEFTUP = 0x0004 # left button up
_MOUSEEVENTF_RIGHTDOWN = 0x0008 # right button down
_MOUSEEVENTF_RIGHTUP = 0x0010 # right button up
_MOUSEEVENTF_MIDDLEDOWN = 0x0020 # middle button down
_MOUSEEVENTF_MIDDLEUP = 0x0040 # middle button up
_MOUSEEVENTF_ABSOLUTE = 0x8000 # absolute move
_MOUSEEVENTF_XDOWN = 0x0080 # X button down
_MOUSEEVENTF_XUP = 0x0100 # X button up
_MOUSEEVENTF_WHEEL = 0x0800 # wheel button is rotated
_MOUSEEVENTF_HWHEEL = 0x01000 # wheel button is tilted
_SM_CXSCREEN = 0
_SM_CYSCREEN = 1
LEFT_BUTTON = u'b1c'
RIGHT_BUTTON = u'b3c'
_SUPPORTED_BUTTON_NAMES = [LEFT_BUTTON, RIGHT_BUTTON]
def _compose_mouse_event(self, name, press=True, release=False):
"""
Composes mouse event based on button name and action flags.
:param str name: mouse button name. Should be one
of: 'b1c' - left button or 'b3c' - right button.
:param bool press: flag indicating whether event should indicate
button press.
:param bool release: flag indicating whether event should indicate
button release.
"""
mouse_event = 0
if name == self.LEFT_BUTTON:
if press:
mouse_event += self._MOUSEEVENTF_LEFTDOWN
if release:
mouse_event += self._MOUSEEVENTF_LEFTUP
if name == self.RIGHT_BUTTON:
if press:
mouse_event += self._MOUSEEVENTF_RIGHTDOWN
if release:
mouse_event += self._MOUSEEVENTF_RIGHTUP
return mouse_event
def _do_event(self, flags, x, y, data, extra_info):
"""
Generates mouse event fo a special coordinate.
:param int flags: integer value holding flags that describes mouse
events to trigger. Can be a combination of:
_MOUSEEVENTF_MOVE = 0x0001 # mouse move
_MOUSEEVENTF_LEFTDOWN = 0x0002 # left button down
_MOUSEEVENTF_LEFTUP = 0x0004 # left button up
_MOUSEEVENTF_RIGHTDOWN = 0x0008 # right button down
_MOUSEEVENTF_RIGHTUP = 0x0010 # right button up
_MOUSEEVENTF_MIDDLEDOWN = 0x0020 # middle button down
_MOUSEEVENTF_MIDDLEUP = 0x0040 # middle button up
_MOUSEEVENTF_ABSOLUTE = 0x8000 # absolute move
_MOUSEEVENTF_XDOWN = 0x0080 # X button down
_MOUSEEVENTF_XUP = 0x0100 # X button up
_MOUSEEVENTF_WHEEL = 0x0800 # wheel button is rotated
_MOUSEEVENTF_HWHEEL = 0x01000 # wheel button is tilted
:param int x: x coordinate.
:param int y: y coordinate.
:param int data: value holding additional event data, for ex.:
* If flags contains _MOUSEEVENTF_WHEEL, then data specifies the
amount of wheel movement. A positive value indicates that the
wheel was rotated forward, away from the user; a negative
value indicates that the wheel was rotated backward, toward
the user. One wheel click is defined as WHEEL_DELTA, which is
120.
* If flags contains _MOUSEEVENTF_HWHEEL, then data specifies the
amount of wheel movement. A positive value indicates that the
wheel was tilted to the right; a negative value indicates that
the wheel was tilted to the left.
* If flags contains _MOUSEEVENTF_XDOWN or _MOUSEEVENTF_XUP, then
data specifies which X buttons were pressed or released. This
value may be any combination of the following flags.
* If flags is not _MOUSEEVENTF_WHEEL, _MOUSEEVENTF_XDOWN, or
_MOUSEEVENTF_XUP, then data should be zero.
:param int extra_info: value with additional value associated with
the mouse event.
"""
x_metric = ctypes.windll.user32.GetSystemMetrics(self._SM_CXSCREEN)
y_metric = ctypes.windll.user32.GetSystemMetrics(self._SM_CYSCREEN)
x_calc = 65536 * x / x_metric + 1
y_calc = 65536 * y / y_metric + 1
ctypes.windll.user32.mouse_event(
flags, int(x_calc), int(y_calc), data, extra_info)
def move(self, x, y, smooth=True):
common.verify_xy_coordinates(x, y)
old_x, old_y = self.get_position()
for i in range(100):
intermediate_x = old_x + (x - old_x) * (i + 1) / 100.0
intermediate_y = old_y + (y - old_y) * (i + 1) / 100.0
smooth and sleep(.01)
self._do_event(self._MOUSEEVENTF_MOVE + self._MOUSEEVENTF_ABSOLUTE,
int(intermediate_x), int(intermediate_y), 0, 0)
def drag(self, x1, y1, x2, y2, smooth=True):
common.verify_xy_coordinates(x1, y1)
common.verify_xy_coordinates(x2, y2)
self.press_button(x1, y1, self.LEFT_BUTTON)
self.move(x2, y2, smooth=smooth)
self.release_button(self.LEFT_BUTTON)
def press_button(self, x, y, button_name=LEFT_BUTTON):
common.verify_xy_coordinates(x, y)
common.verify_mouse_button_name(button_name,
self._SUPPORTED_BUTTON_NAMES)
self.move(x, y)
self._do_event(
self._compose_mouse_event(button_name, press=True, release=False),
0, 0, 0, 0)
def release_button(self, button_name=LEFT_BUTTON):
common.verify_mouse_button_name(button_name,
self._SUPPORTED_BUTTON_NAMES)
self._do_event(
self._compose_mouse_event(button_name, press=False, release=True),
0, 0, 0, 0)
def click(self, x, y, button_name=LEFT_BUTTON):
common.verify_xy_coordinates(x, y)
common.verify_mouse_button_name(button_name,
self._SUPPORTED_BUTTON_NAMES)
self.move(x, y, False)
self._do_event(
self._compose_mouse_event(button_name, press=True, release=True),
0, 0, 0, 0)
def double_click(self, x, y, button_name=LEFT_BUTTON):
common.verify_xy_coordinates(x, y)
common.verify_mouse_button_name(button_name,
self._SUPPORTED_BUTTON_NAMES)
self.move(x, y)
self._do_event(
self._compose_mouse_event(button_name, press=True, release=True),
0, 0, 0, 0)
self._do_event(
self._compose_mouse_event(button_name, press=True, release=True),
0, 0, 0, 0)
def wheel(self, x, y, movement):
common.verify_xy_coordinates(x, y)
self.move(x, y, False)
self._do_event(self._MOUSEEVENTF_WHEEL, x, y, movement, 0)
def get_position(self):
obj_point = ctypes.wintypes.POINT()
ctypes.windll.user32.GetCursorPos(ctypes.byref(obj_point))
return obj_point.x, obj_point.y | PypiClean |
/NovalIDE-1.1.8-py3-none-any.whl/noval/editor/text.py | import tkinter as tk
from tkinter import messagebox,filedialog
from noval import GetApp,_
import noval.core as core
import noval.filewatcher as filewatcher
import codecs
import noval.python.parser.utils as parserutils
import noval.consts as consts
import os
import noval.util.fileutils as fileutils
import sys
import noval.util.utils as utils
import noval.ui_base as ui_base
from tkinter import ttk
import time
import noval.menu as tkmenu
import noval.syntax.lang as lang
import noval.util.strutils as strutils
import shutil
from noval.filewatcher import FileEventHandler
import noval.ui_common as ui_common
import noval.misc as misc
import tkinter.font as tkfont
import noval.docposition as docposition
import noval.constants as constants
import noval.python.pyutils as pyutils
import noval.ui_utils as ui_utils
import noval.syntax.syntax as syntax
import noval.find.findtext as findtext
import noval.editor.format as textformat
def classifyws(s, tabwidth):
raw = effective = 0
for ch in s:
if ch == " ":
raw = raw + 1
effective = effective + 1
elif ch == "\t":
raw = raw + 1
effective = (effective // tabwidth + 1) * tabwidth
else:
break
return raw, effective
def index2line(index):
return int(float(index))
def line2index(line):
return str(float(line))
class TextDocument(core.Document):
ASC_FILE_ENCODING = "ascii"
UTF_8_FILE_ENCODING = "utf-8"
ANSI_FILE_ENCODING = "cp936"
DEFAULT_FILE_ENCODING = ASC_FILE_ENCODING
def __init__(self):
core.Document .__init__(self)
self._inModify = False
self.file_watcher = filewatcher.FileAlarmWatcher()
self._is_watched = False
self.file_encoding = GetApp().GetConfig().Read(consts.DEFAULT_FILE_ENCODING_KEY,TextDocument.DEFAULT_FILE_ENCODING)
if self.file_encoding == "":
self.file_encoding = TextDocument.DEFAULT_FILE_ENCODING
self._is_new_doc = True
self._is_loading_doc = False
def GetSaveObject(self,filename):
return codecs.open(filename, 'w',self.file_encoding)
def DoSave(self):
if self._is_watched:
self.file_watcher.StopWatchFile(self)
#should check document data encoding first before save document
self.file_encoding = self.DetectDocumentEncoding()
def GetOpenDocument(self,filepath,exclude_self=True):
'''
通过文件名查找打开文件
exclude_self:是否排查自身
'''
if exclude_self and parserutils.ComparePath(self.GetFilename(),filepath):
return None
doc = GetApp().GetDocumentManager().GetDocument(filepath)
return doc
def SaveAs(self):
"""
Prompts the user for a file to save to, and then calls OnSaveDocument.
"""
docTemplate = self.GetDocumentTemplate()
if not docTemplate:
return False
if docTemplate.GetDocumentType() == TextDocument and docTemplate.GetFileFilter() != "*.*":
default_ext = ""
else:
default_ext = docTemplate.GetDefaultExtension()
descrs = strutils.gen_file_filters()
filename = filedialog.asksaveasfilename(
master = GetApp(),
filetypes=descrs,
defaultextension=default_ext,
initialdir=os.getcwd(),
initialfile=os.path.basename(self.GetFilename())
)
if filename == "":
return False
#将路径转换成系统标志路径格式
filename = fileutils.opj(filename)
#检查文件名是否已经被打开的文件占用,如果占用了则不能保存文件
if self.GetOpenDocument(filename):
messagebox.showwarning(GetApp().GetAppName(),_("File has already been opened,could not overwrite it."))
return False
if not self.OnSaveDocument(filename):
return False
self.SetFilename(filename)
self.SetTitle(fileutils.get_filename_from_path(filename))
for view in self._documentViews:
view.OnChangeFilename()
if docTemplate.FileMatchesTemplate(filename):
self.GetDocumentManager().AddFileToHistory(filename)
return True
def OnSaveDocument(self, filename):
"""
Constructs an output file for the given filename (which must
not be empty), and calls SaveObject. If SaveObject returns true, the
document is set to unmodified; otherwise, an error message box is
displayed.
"""
if not filename:
return False
msgTitle = GetApp().GetAppName()
if not msgTitle:
msgTitle = _("File Error")
self._is_loading_doc = False
backupFilename = None
fileObject = None
copied = False
try:
self.DoSave()
# if current file exists, move it to a safe place temporarily
if os.path.exists(filename):
# Check if read-only.
if not os.access(filename, os.W_OK):
messagebox.showerror(msgTitle,"Could not save '%s'. No write permission to overwrite existing file." % \
fileutils.get_filename_from_path(filename))
return False
backupFilename = "%s.bk%s" % (filename, 1)
shutil.copy(filename, backupFilename)
copied = True
fileObject = self.GetSaveObject(filename)
self.SaveObject(fileObject)
fileObject.close()
fileObject = None
if backupFilename:
os.remove(backupFilename)
except Exception as e:
utils.get_logger().exception("")
if fileObject:
fileObject.close() # file is still open, close it, need to do this before removal
# save failed, remove copied file
if backupFilename and copied:
shutil.copy(backupFilename,filename)
os.remove(backupFilename)
messagebox.showerror(msgTitle,_("Could not save '%s': %s") % (fileutils.get_filename_from_path(filename), e))
if not self._is_new_doc:
self.SetDocumentModificationDate()
return False
self.SetFilename(filename, True)
self.Modify(False)
self.SetDocumentSaved(True)
self._is_watched = True
self._is_new_doc = False
self.file_watcher.StartWatchFile(self)
self.SetDocumentModificationDate()
return True
def DetectFileEncoding(self,filepath):
file_encoding = TextDocument.DEFAULT_FILE_ENCODING
try:
with open(filepath,"rb") as f:
data = f.read()
result = fileutils.detect(data)
file_encoding = result['encoding']
except:
utils.get_logger().exception("")
#if detect file encoding is None,we should assume the file encoding is ansi,which cp936 encoding is instead
if None == file_encoding or file_encoding.lower().find('iso') != -1:
file_encoding = TextDocument.ANSI_FILE_ENCODING
return file_encoding
def DetectDocumentEncoding(self):
view = self.GetFirstView()
file_encoding = self.file_encoding
#when the file encoding is accii or new document,we should check the document data contain chinese character,
#the we should change the document encoding to utf-8 to save chinese character
if file_encoding == self.ASC_FILE_ENCODING or self.IsNewDocument:
guess_encoding = file_encoding.lower()
if guess_encoding == self.ASC_FILE_ENCODING:
guess_encoding = self.UTF_8_FILE_ENCODING
result = fileutils.detect(view.GetValue().encode(guess_encoding))
file_encoding = result['encoding']
if None == file_encoding:
file_encoding = TextDocument.ASC_FILE_ENCODING
return file_encoding
def OnOpenDocument(self, filename):
"""
Constructs an input file for the given filename (which must not
be empty), and calls LoadObject. If LoadObject returns true, the
document is set to unmodified; otherwise, an error message box is
displayed. The document's views are notified that the filename has
changed, to give windows an opportunity to update their titles. All of
the document's views are then updated.
"""
#文件在外部改变重新打开时检查界面文本是否更改需要保存
if not self.OnSaveModified():
return False
self._is_loading_doc = True
msgTitle = GetApp().GetAppName()
if not msgTitle:
msgTitle = _("File Error")
self.file_encoding = self.DetectFileEncoding(filename)
fileObject = None
try:
if self.file_encoding == 'binary':
fileObject = open(filename, 'rb')
is_bytes = True
else:
fileObject = codecs.open(filename, 'r',self.file_encoding)
is_bytes = False
self.LoadObject(fileObject,is_bytes)
fileObject.close()
fileObject = None
except Exception as e:
utils.get_logger().exception("")
if fileObject:
fileObject.close() # file is still open, close it
messagebox.showerror(msgTitle,_("Could not open '%s': %s") % (fileutils.get_filename_from_path(filename), e))
self._is_loading_doc = False
return False
self.SetDocumentModificationDate()
self.SetFilename(filename, True)
self.Modify(False)
self.SetDocumentSaved(True)
self.UpdateAllViews()
self.file_watcher.AddFileDoc(self)
self._is_watched = True
self._is_new_doc = False
rember_file_pos = GetApp().GetConfig().ReadInt(consts.REMBER_FILE_KEY, True)
if rember_file_pos:
pos = docposition.DocMgr.GetPos(filename)
if pos[0] != None:
self.GetFirstView().GetCtrl().GotoPos(*pos)
self._is_loading_doc = False
return True
@property
def IsWatched(self):
return self._is_watched
@property
def FileWatcher(self):
return self.file_watcher
def SaveObject(self, fileObject):
view = self.GetFirstView()
fileObject.write(view.GetValue())
view.SetModifyFalse()
return True
def LoadObject(self, fileObject,is_bytes=False):
view = self.GetFirstView()
data = fileObject.read()
if is_bytes:
view.SetBinaryValue(data)
else:
view.SetValue(data)
view.SetModifyFalse()
return True
def IsModified(self):
filename = self.GetFilename()
if filename and not os.path.exists(filename) and not self._is_new_doc:
return True
view = self.GetFirstView()
if view:
return view.IsModified()
return False
@property
def IsNewDocument(self):
return self._is_new_doc
def Modify(self, modify):
if self._inModify:
return
self._inModify = True
view = self.GetFirstView()
if not modify and view:
#设置编辑器状态为未修改
view.SetModifyFalse()
core.Document.Modify(self, modify)
self._inModify = False
class TextView(misc.AlarmEventView):
def __init__(self):
misc.AlarmEventView.__init__(self)
self._textEditor = None
self._markerCount = 0
# Initialize the classes position manager for the first control
# that is created only.
if not docposition.DocMgr.IsInitialized():
docposition.DocMgr.InitPositionCache()
def GetCtrlClass(self):
""" Used in split window to instantiate new instances """
return SyntaxTextCtrl
def GetLangId(self):
return lang.ID_LANG_TXT
def GetCtrl(self):
return self._textEditor
def SetCtrl(self, ctrl):
self._textEditor = ctrl
def OnCreate(self, doc, flags):
frame = GetApp().CreateDocumentFrame(self, doc, flags)
#wrap为None表示不允许自动换行,undo默认为False,表示禁止撤销操作,设置为True表示允许文本撤销恢复操作
#use_edit_tester表示弹出菜单使用主编辑菜单的tester函数,use_edit_image表示使用主编辑菜单的编辑图标
self._text_frame = ui_base.TextviewFrame(frame,text_class=self.GetCtrlClass(),\
font="EditorFont",horizontal_scrollbar_class=ui_common.AutoScrollbar,wrap=tk.NONE,undo=True,use_edit_tester=True,use_edit_image=True)
self._textEditor = self._text_frame.text
#绑定视图激活事件,鼠标松开或者键盘松开时激活文本视图
self._textEditor.bind("<<ActivateView>>", self.ActivateView)
self._textEditor.event_add("<<ActivateView>>","<KeyRelease>")
self._textEditor.bind("<ButtonRelease>", self.ButtonRelease)
#文本框按Esc键时如果处于全屏模式,则退出全屏模式
self._textEditor.bind("<Escape>", self.ToogleFullScreen)
self._text_frame.grid(row=0, column=0, sticky=tk.NSEW, in_=frame)
self._text_frame.home_widget = frame # don't forget home
frame.columnconfigure(0, weight=1)
frame.rowconfigure(0, weight=1)
self._text_frame.text.bind("<<Modified>>", self.OnModify, True)
self._text_frame.text.bind("<<TextChange>>", self.OnModify, True)
#ctrl+鼠标滚轮放大缩小字体
self._text_frame.text.bind("<Control-MouseWheel>", self._cmd_zoom_with_mouse, True)
self.update_appearance()
return True
def ToogleFullScreen(self,event):
if GetApp().IsFullScreen:
ui_utils.GetFullscreenDialog().CloseDialog()
def ButtonRelease(self,event):
self.ActivateView(event)
self.RecordTrack()
@misc.update_toolbar
@docposition.jumpto
def RecordTrack(self):
pass
def ActivateView(self,event):
self.SetFocus()
#设置视图为活跃视图
GetApp().GetDocumentManager().ActivateView(self)
#设置状态栏行列号
self.set_line_and_column()
def _cmd_zoom_with_mouse(self, event):
if event.delta > 0:
self.UpdateFontSize(1)
else:
self.UpdateFontSize(-1)
def set_line_and_column(self):
#获取当前行,列位置
line, column = self._textEditor.GetCurrentLineColumn()
GetApp().MainFrame.status_bar.SetColumnNumber(column+1)
GetApp().MainFrame.status_bar.SetLineNumber(line)
def OnModify(self, event):
self.GetDocument().Modify(self.IsModified())
def SetValue(self, value,keep_undo=False):
self.GetCtrl().set_content(value)
#加载文件后需要禁止undo操作
if not keep_undo:
self.GetCtrl().edit_reset()
self._text_frame.update_gutter()
self.GetCtrl().after(100,self.CheckEol,value)
def CheckEol(self,value):
end_line = self.GetCtrl().get_line_col(self.GetCtrl().index('end'))[0]
check_mixed_eol = utils.profile_get_int("check_mixed_eol",False)
#检查混合行尾时检查全部行,否则只检查前10行文本作为eol的判断依据
if not check_mixed_eol:
end_line = min(end_line,10)
mixed = False
tmp = None
line_eol = None
for cur in range(1, end_line):
txt = self.GetCtrl().get('%i.0' % cur, '%i.end' % cur)
txt2 = self.GetCtrl().get('%i.0' % cur, '%i.end+1c' % cur)
if txt.endswith(consts.EOL_DIC[consts.EOL_CR]) and txt2.endswith(consts.EOL_DIC[consts.EOL_LF]):
self.GetCtrl().SetEol(consts.EOL_CRLF)
line_eol = consts.EOL_CRLF
#最后一行总是在最后添加\n换行符,故排除最后一行
elif txt2.endswith(consts.EOL_DIC[consts.EOL_LF]) and cur != (end_line-1):
self.GetCtrl().SetEol(consts.EOL_LF)
line_eol = consts.EOL_LF
elif txt.endswith(consts.EOL_DIC[consts.EOL_CR]):
self.GetCtrl().SetEol(consts.EOL_CR)
line_eol = consts.EOL_CR
if check_mixed_eol:
if line_eol and not tmp:
tmp = line_eol
elif tmp:
if line_eol != tmp:
mixed = True
break
if mixed:
dlg = textformat.EOLFormatDlg(self.GetFrame(),self)
if dlg.ShowModal() == constants.ID_OK:
self.GetCtrl().SetEol(dlg.eol)
self.GetCtrl().edit_modified(True)
@docposition.jump
def GotoLine(self,line):
self.GetCtrl().GotoLine(line)
self.set_line_and_column()
@docposition.jump
def GotoPos(self,line,col):
self.GetCtrl().GotoPos(line,col)
def IsModified(self):
return self.GetCtrl().edit_modified()
def SetModifyFalse(self):
self.GetCtrl().edit_modified(False)
def SetFocus(self):
self._text_frame.focus_set()
def update_appearance(self):
self._text_frame.set_gutter_visibility(utils.profile_get_int("TextViewLineNumbers", True))
#设置代码边界线长度
view_right_edge = utils.profile_get_int("TextViewRightEdge", True)
if view_right_edge:
line_length_margin = utils.profile_get_int("TextEditorEdgeGuideWidth", consts.DEFAULT_EDGE_GUIDE_WIDTH)
else:
line_length_margin = 0
self._text_frame.set_line_length_margin(line_length_margin)
tag_current_line = utils.profile_get_int("TextHighlightCaretLine", True)
self.GetCtrl().SetTagCurrentLine(tag_current_line)
#更新代码着色
self.GetCtrl().event_generate("<<UpdateAppearance>>")
def GetValue(self):
return self.GetCtrl().GetValue()
#关闭文档以及文档标签页
def OnClose(self, deleteWindow = True):
if not core.View.OnClose(self, deleteWindow):
return False
document = self.GetDocument()
if document.IsWatched:
document.FileWatcher.RemoveFileDoc(document)
#关闭文档时纪录当前光标位置,下次从新打开文件时定位到该位置
if not document.IsNewDocument:
docposition.DocMgr.AddRecord([document.GetFilename(),
self.GetCtrl().GetCurrentLineColumn()])
self.Activate(False)
if deleteWindow and self.GetFrame():
self.GetFrame().Destroy()
return True
def check_for_external_changes(self):
if self._asking_about_external_change:
return
self._asking_about_external_change = True
if self._alarm_event == FileEventHandler.FILE_MODIFY_EVENT:
ret = messagebox.askyesno(_("Reload.."),_("File \"%s\" has already been modified outside,Do You Want to reload it?") % self.GetDocument().GetFilename(),
parent = self.GetFrame())
if ret == True:
document = self.GetDocument()
document.OnOpenDocument(document.GetFilename())
elif self._alarm_event == FileEventHandler.FILE_MOVED_EVENT or \
self._alarm_event == FileEventHandler.FILE_DELETED_EVENT:
ret = messagebox.askyesno( _("Keep Document.."),_("File \"%s\" has already been moved or deleted outside,Do You Want to keep it in Editor?") % self.GetDocument().GetFilename(),
parent=self.GetFrame())
document = self.GetDocument()
if ret == True:
document.Modify(True)
else:
document.DeleteAllViews()
self._asking_about_external_change = False
misc.AlarmEventView.check_for_external_changes(self)
def ZoomView(self,delta = 0):
self.UpdateFontSize(delta)
def UpdateFontSize(self, delta = 0):
editor_font = tkfont.nametofont("EditorFont")
default_editor_font_size = editor_font['size']
editor_font_size = default_editor_font_size
if delta != 0:
editor_font_size += delta
self.UpdateFont(editor_font_size)
# editor_font['size'] = editor_font_size
# self.GetCtrl().configure(font=editor_font)
def UpdateFont(self,size=-1,font=""):
font_list = ["EditorFont","BoldEditorFont","ItalicEditorFont","BoldItalicEditorFont"]
for font_name in font_list:
editor_font = tkfont.nametofont(font_name)
if size != -1:
editor_font['size'] = size
if font != "":
editor_font['family'] = font
if font_name == "EditorFont":
self.GetCtrl().configure(font=editor_font)
def AddText(self,txt,pos=None):
if pos == None:
line,col = self.GetCtrl().GetCurrentLineColumn()
else:
line,col = pos
self.GetCtrl().insert("insert", txt,"%d.%d" % (line,col))
def UpdateUI(self, command_id):
if command_id == constants.ID_SAVE:
return self.GetDocument().IsModified()
elif command_id in [constants.ID_INSERT_COMMENT_TEMPLATE,constants.ID_INSERT_DECLARE_ENCODING,constants.ID_UNITTEST,constants.ID_COMMENT_LINES,constants.ID_UNCOMMENT_LINES,\
constants.ID_RUN,constants.ID_DEBUG,constants.ID_GOTO_DEFINITION,constants.ID_SET_EXCEPTION_BREAKPOINT,constants.ID_STEP_INTO,constants.ID_STEP_NEXT,constants.ID_RUN_LAST,\
constants.ID_CHECK_SYNTAX,constants.ID_SET_PARAMETER_ENVIRONMENT,constants.ID_DEBUG_LAST,constants.ID_START_WITHOUT_DEBUG,constants.ID_AUTO_COMPLETE,constants.ID_TOGGLE_BREAKPOINT]:
return False
elif command_id == constants.ID_UNDO:
return self.GetCtrl().CanUndo()
elif command_id == constants.ID_REDO:
return self.GetCtrl().CanRedo()
elif command_id in[ constants.ID_CUT,constants.ID_COPY,constants.ID_CLEAR]:
return self.GetCtrl().CanCopy()
elif command_id == constants.ID_PASTE:
return True
elif command_id in [constants.ID_UPPERCASE,constants.ID_LOWERCASE,constants.ID_CLEAN_WHITESPACE,constants.ID_TAB_SPACE,constants.ID_SPACE_TAB]:
return self.GetCtrl().HasSelection()
elif command_id in [constants.ID_EOL_MAC,constants.ID_EOL_UNIX,constants.ID_EOL_WIN]:
GetApp().MainFrame.GetNotebook().eol_var.set(self.GetCtrl().eol)
return True
class TextCtrl(ui_base.TweakableText):
"""Text widget with extra navigation and editing aids.
Provides more comfortable deletion, indentation and deindentation,
and undo handling. Not specific to Python code.
Most of the code is adapted from idlelib.EditorWindow.
use_edit_tester表示文本窗口弹出编辑菜单是否使用主编辑菜单的tester函数,默认为False
use_edit_image表示是否使用主编辑菜单的编辑图标,默认为False
tag_current_line:表示是否高亮当前行
"""
def __init__(self, master=None, style="Text", tag_current_line=True,
indent_with_tabs=False, replace_tabs=False, cnf={},use_edit_tester=False,use_edit_image=False, **kw):
# Parent class shouldn't autoseparate
# TODO: take client provided autoseparators value into account
kw["autoseparators"] = False
self._style = style
self._original_options = kw.copy()
self._use_edit_tester = use_edit_tester
self._use_edit_image = use_edit_image
self._popup_menu = None
ui_base.TweakableText.__init__(self,master=master, cnf=cnf, **kw)
self.tabwidth = 8 # See comments in idlelib.editor.EditorWindow
self.indent_width = 4
self.indent_with_tabs = indent_with_tabs
self.replace_tabs = replace_tabs
self.eol = GetApp().MainFrame.GetNotebook().eol_var.get()
self._last_event_kind = None
self._last_key_time = None
self._bind_editing_aids()
self._bind_movement_aids()
self._bind_selection_aids()
self._bind_undo_aids()
self._bind_mouse_aids()
self._ui_theme_change_binding = self.bind(
"<<ThemeChanged>>", self.reload_ui_theme, True
)
self._initial_configuration = self.configure()
self._regular_insertwidth = self["insertwidth"]
self._reload_theme_options()
self.SetTagCurrentLine(tag_current_line)
#是否高亮当前行
self.bind("<<CursorMove>>", self._tag_current_line, True)
self.bind("<<TextChange>>", self._tag_current_line, True)
if tag_current_line:
self._tag_current_line()
def GetEol(self):
return self.eol
def SetEol(self,eol):
self.eol = eol
def SetTagCurrentLine(self,tag_current_line=False):
self._should_tag_current_line = tag_current_line
def _bind_mouse_aids(self):
#单击鼠标右键事件
self.bind("<Button-3>", self.on_secondary_click)
def _bind_editing_aids(self):
def if_not_readonly(fun):
def dispatch(event):
if not self.is_read_only():
return fun(event)
else:
return "break"
return dispatch
self.bind("<Control-BackSpace>", if_not_readonly(self.delete_word_left), True)
self.bind("<Control-Delete>", if_not_readonly(self.delete_word_right), True)
#文本默认绑定这些快捷键事件,要禁止这些默认事件,重新绑定快捷键事件
self.bind("<Control-d>", self._redirect_ctrld, True)
self.bind("<Control-t>", self._redirect_ctrlt, True)
self.bind("<Control-k>", self._redirect_ctrlk, True)
self.bind("<Control-h>", self._redirect_ctrlh, True)
self.bind("<Control-a>", self._redirect_ctrla, True)
#tk8.5.15版本默认绑定了contrl-f事件,如果tk版本小于8.6.6,需要重新绑定该事件
if strutils.compare_version(pyutils.get_tk_version_str(),("8.6.6")) < 0:
self.bind("<Control-f>", self._redirect_ctrlf, True)
self.bind("<BackSpace>", if_not_readonly(self.perform_smart_backspace), True)
self.bind("<Return>", if_not_readonly(self.perform_return), True)
# self.bind("<KP_Enter>", if_not_readonly(self.perform_return), True)
self.bind("<Tab>", if_not_readonly(self.perform_tab), True)
try:
# Is needed on eg. Ubuntu with Estonian keyboard
self.bind("<ISO_Left_Tab>", if_not_readonly(self.perform_tab), True)
except Exception:
pass
if utils.is_windows():
self.bind("<KeyPress>", self._insert_untypable_characters_on_windows, True)
def _bind_movement_aids(self):
self.bind("<Home>", self.perform_smart_home, True)
self.bind("<Left>", self.move_to_edge_if_selection(0), True)
self.bind("<Right>", self.move_to_edge_if_selection(1), True)
self.bind("<Next>", self.perform_page_down, True)
self.bind("<Prior>", self.perform_page_up, True)
def _bind_selection_aids(self):
self.bind("<Control-a>", self.select_all, True)
def _bind_undo_aids(self):
self.bind("<<Undo>>", self._on_undo, True)
self.bind("<<Redo>>", self._on_redo, True)
self.bind("<<Cut>>", self._on_cut, True)
self.bind("<<Copy>>", self._on_copy, True)
self.bind("<<Paste>>", self._on_paste, True)
self.bind("<FocusIn>", self._on_get_focus, True)
self.bind("<FocusOut>", self._on_lose_focus, True)
self.bind("<Key>", self._on_key_press, True)
self.bind("<1>", self._on_mouse_click, True)
self.bind("<2>", self._on_mouse_click, True)
self.bind("<3>", self._on_mouse_click, True)
def _redirect_ctrld(self, event):
# I want to disable the deletion effect of Ctrl-D in the text but still
# keep the event for other purposes
self.event_generate("<<CtrlDInText>>")
return "break"
def _redirect_ctrlt(self, event):
# I want to disable the swap effect of Ctrl-T in the text but still
# keep the event for other purposes
self.event_generate("<<CtrlTInText>>")
return "break"
def _redirect_ctrlf(self,event):
self.event_generate("<<CtrlFInText>>")
return "break"
def _redirect_ctrla(self,event):
self.event_generate("<<CtrlAInText>>")
return "break"
def _redirect_ctrlk(self, event):
# I want to disable the swap effect of Ctrl-K in the text but still
# keep the event for other purposes
self.event_generate("<<CtrlKInText>>")
return "break"
def _redirect_ctrlh(self, event):
# I want to disable the swap effect of Ctrl-H in the text but still
# keep the event for other purposes
self.event_generate("<<CtrlHInText>>")
return "break"
def tag_reset(self, tag_name):
empty_conf = {key: "" for key in self.tag_configure(tag_name)}
self.tag_configure(empty_conf)
def select_lines(self, first_line, last_line):
self.tag_remove("sel", "1.0", tk.END)
self.tag_add("sel", "%s.0" % first_line, "%s.end" % last_line)
def delete_word_left(self, event):
self.event_generate("<Meta-Delete>")
self.edit_separator()
return "break"
def delete_word_right(self, event):
self.event_generate("<Meta-d>")
self.edit_separator()
return "break"
def perform_smart_backspace(self, event):
self._log_keypress_for_undo(event)
text = self
first, last = self.get_selection_indices()
if first and last:
text.delete(first, last)
text.mark_set("insert", first)
return "break"
# Delete whitespace left, until hitting a real char or closest
# preceding virtual tab stop.
chars = text.get("insert linestart", "insert")
if chars == "":
if text.compare("insert", ">", "1.0"):
# easy: delete preceding newline
text.delete("insert-1c")
else:
text.bell() # at start of buffer
return "break"
if (
chars.strip() != ""
): # there are non-whitespace chars somewhere to the left of the cursor
# easy: delete preceding real char
text.delete("insert-1c")
self._log_keypress_for_undo(event)
return "break"
# Ick. It may require *inserting* spaces if we back up over a
# tab character! This is written to be clear, not fast.
have = len(chars.expandtabs(self.tabwidth))
assert have > 0
want = ((have - 1) // self.indent_width) * self.indent_width
# Debug prompt is multilined....
# if self.context_use_ps1:
# last_line_of_prompt = sys.ps1.split('\n')[-1]
# else:
last_line_of_prompt = ""
ncharsdeleted = 0
while 1:
if chars == last_line_of_prompt:
break
chars = chars[:-1]
ncharsdeleted = ncharsdeleted + 1
have = len(chars.expandtabs(self.tabwidth))
if have <= want or chars[-1] not in " \t":
break
text.delete("insert-%dc" % ncharsdeleted, "insert")
if have < want:
text.insert("insert", " " * (want - have))
return "break"
def perform_midline_tab(self, event=None):
"如果要实现tab键自动完成单词功能,请重写该方法"
#默认实现还是tab键缩进功能
return self.perform_smart_tab(event)
def perform_smart_tab(self, event=None):
self._log_keypress_for_undo(event)
# if intraline selection:
# delete it
# elif multiline selection:
# do indent-region
# else:
# indent one level
first, last = self.get_selection_indices()
if first and last:
if index2line(first) != index2line(last):
return self.indent_region(event)
self.delete(first, last)
self.mark_set("insert", first)
prefix = self.get("insert linestart", "insert")
raw, effective = classifyws(prefix, self.tabwidth)
# tab to the next 'stop' within or to right of line's text:
if self.indent_with_tabs:
pad = "\t"
else:
effective = len(prefix.expandtabs(self.tabwidth))
n = self.indent_width
pad = " " * (n - effective % n)
self.insert("insert", pad)
self.see("insert")
return "break"
def get_cursor_position(self):
return map(int, self.index("insert").split("."))
def get_line_count(self):
return list(map(int, self.index("end-1c").split(".")))[0]
def perform_return(self, event):
self.insert("insert", "\n")
return "break"
def GetIndent(self):
return self.indent_width
def perform_page_down(self, event):
# if last line is visible then go to last line
# (by default it doesn't move then)
try:
last_visible_idx = self.index("@0,%d" % self.winfo_height())
row, _ = map(int, last_visible_idx.split("."))
line_count = self.get_line_count()
if (
row == line_count or row == line_count - 1
): # otherwise tk doesn't show last line
self.mark_set("insert", "end")
except Exception:
traceback.print_exc()
def perform_page_up(self, event):
# if first line is visible then go there
# (by default it doesn't move then)
try:
first_visible_idx = self.index("@0,0")
row, _ = map(int, first_visible_idx.split("."))
if row == 1:
self.mark_set("insert", "1.0")
except Exception:
traceback.print_exc()
def compute_smart_home_destination_index(self):
"""Is overridden in shell"""
line = self.get("insert linestart", "insert lineend")
for insertpt in range(len(line)):
if line[insertpt] not in (" ", "\t"):
break
else:
insertpt = len(line)
lineat = int(self.index("insert").split(".")[1])
if insertpt == lineat:
insertpt = 0
return "insert linestart+" + str(insertpt) + "c"
def perform_smart_home(self, event):
if (event.state & 4) != 0 and event.keysym == "Home":
# state&4==Control. If <Control-Home>, use the Tk binding.
return None
dest = self.compute_smart_home_destination_index()
if (event.state & 1) == 0:
# shift was not pressed
self.tag_remove("sel", "1.0", "end")
else:
if not self.index_sel_first():
# there was no previous selection
self.mark_set("my_anchor", "insert")
else:
if self.compare(self.index_sel_first(), "<", self.index("insert")):
self.mark_set("my_anchor", "sel.first") # extend back
else:
self.mark_set("my_anchor", "sel.last") # extend forward
first = self.index(dest)
last = self.index("my_anchor")
if self.compare(first, ">", last):
first, last = last, first
self.tag_remove("sel", "1.0", "end")
self.tag_add("sel", first, last)
self.mark_set("insert", dest)
self.see("insert")
return "break"
def move_to_edge_if_selection(self, edge_index):
"""Cursor move begins at start or end of selection
When a left/right cursor key is pressed create and return to Tkinter a
function which causes a cursor move from the associated edge of the
selection.
"""
def move_at_edge(event):
if (
self.has_selection() and (event.state & 5) == 0
): # no shift(==1) or control(==4) pressed
try:
self.mark_set("insert", ("sel.first+1c", "sel.last-1c")[edge_index])
except tk.TclError:
pass
return move_at_edge
def perform_tab(self, event=None):
self._log_keypress_for_undo(event)
if (
event.state & 0x0001
): # shift is pressed (http://stackoverflow.com/q/32426250/261181)
return self.dedent_region(event)
else:
# check whether there are letters before cursor on this line
index = self.index("insert")
left_text = self.get(index + " linestart", index)
if left_text.strip() == "" or self.has_selection():
return self.perform_smart_tab(event)
else:
#如果tab键左边有非空白字符,调用自动完成功能
return self.perform_midline_tab(event)
def indent_region(self, event=None):
return self._change_indentation(True)
def dedent_region(self, event=None):
return self._change_indentation(False)
def _change_indentation(self, increase=True):
head, tail, chars, lines = self._get_region()
# Text widget plays tricks if selection ends on last line
# and content doesn't end with empty line,
text_last_line = index2line(self.index("end-1c"))
sel_last_line = index2line(tail)
if sel_last_line >= text_last_line:
while not self.get(head, "end").endswith("\n\n"):
self.insert("end", "\n")
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, self.tabwidth)
if increase:
effective = effective + self.indent_width
else:
effective = max(effective - self.indent_width, 0)
lines[pos] = self._make_blanks(effective) + line[raw:]
self._set_region(head, tail, chars, lines)
return "break"
def select_all(self, event):
self.tag_remove("sel", "1.0", tk.END)
self.tag_add("sel", "1.0", tk.END)
def set_read_only(self, value):
if value == self.is_read_only():
return
ui_base.TweakableText.set_read_only(self, value)
self._reload_theme_options()
if self._should_tag_current_line:
self._tag_current_line()
def _reindent_to(self, column):
# Delete from beginning of line to insert point, then reinsert
# column logical (meaning use tabs if appropriate) spaces.
if self.compare("insert linestart", "!=", "insert"):
self.delete("insert linestart", "insert")
if column:
self.insert("insert", self._make_blanks(column))
def _get_region(self):
first, last = self.get_selection_indices()
if first and last:
head = self.index(first + " linestart")
tail = self.index(last + "-1c lineend +1c")
else:
head = self.index("insert linestart")
tail = self.index("insert lineend +1c")
chars = self.get(head, tail)
lines = chars.split("\n")
return head, tail, chars, lines
def _set_region(self, head, tail, chars, lines):
newchars = "\n".join(lines)
if newchars == chars:
self.bell()
return
self.tag_remove("sel", "1.0", "end")
self.mark_set("insert", head)
self.delete(head, tail)
self.insert(head, newchars)
self.tag_add("sel", head, "insert")
def _log_keypress_for_undo(self, e):
if e is None:
return
# NB! this may not execute if the event is cancelled in another handler
event_kind = self._get_event_kind(e)
if (
event_kind != self._last_event_kind
or e.char in ("\r", "\n", " ", "\t")
or e.keysym in ["Return", "KP_Enter"]
or time.time() - self._last_key_time > 2
):
self.edit_separator()
self._last_event_kind = event_kind
self._last_key_time = time.time()
def _get_event_kind(self, event):
if event.keysym in ("BackSpace", "Delete"):
return "delete"
elif event.char:
return "insert"
else:
# eg. e.keysym in ("Left", "Up", "Right", "Down", "Home", "End", "Prior", "Next"):
return "other_key"
def _make_blanks(self, n):
# Make string that displays as n leading blanks.
if self.indent_with_tabs:
ntabs, nspaces = divmod(n, self.tabwidth)
return "\t" * ntabs + " " * nspaces
else:
return " " * n
def _on_undo(self, e):
self._last_event_kind = "undo"
def _on_redo(self, e):
self._last_event_kind = "redo"
def _on_cut(self, e):
self._last_event_kind = "cut"
self.edit_separator()
def _on_copy(self, e):
self._last_event_kind = "copy"
self.edit_separator()
def _on_paste(self, e):
if self.is_read_only():
return
try:
if self.has_selection():
self.direct_delete("sel.first", "sel.last")
except Exception:
pass
self._last_event_kind = "paste"
self.edit_separator()
self.see("insert")
self.after_idle(lambda: self.see("insert"))
def _on_get_focus(self, e):
self._last_event_kind = "get_focus"
self.edit_separator()
def _on_lose_focus(self, e):
self._last_event_kind = "lose_focus"
self.edit_separator()
def _on_key_press(self, e):
return self._log_keypress_for_undo(e)
def _on_mouse_click(self, event):
self.edit_separator()
def _tag_current_line(self, event=None):
self.tag_remove("current_line", "1.0", "end")
# Let's show current line only with readable text
# (this fits well with Thonny debugger,
# otherwise debugger focus box and current line interact in an ugly way)
if self._should_tag_current_line and not self.is_read_only():
# we may be on the same line as with prev event but tag needs extension
lineno = int(self.index("insert").split(".")[0])
self.tag_add("current_line", str(lineno) + ".0", str(lineno + 1) + ".0")
def on_secondary_click(self, event=None):
"Use this for invoking context menu"
self.focus_set()
#如果弹出菜单存在,先销毁菜单
if self._popup_menu is not None:
self._popup_menu.destroy()
self._popup_menu = None
self.CreatePopupMenu()
self._popup_menu.configure(misc.get_style_configuration("Menu"))
self._popup_menu.tk_popup(event.x_root, event.y_root)
def CreatePopupMenu(self):
def default_tester():
return False
common_kwargs = {}
#是否使用主编辑菜单的tester回调函数
if not self._use_edit_tester:
common_kwargs.update({
'tester':default_tester,
})
#是否使用主编辑菜单的图标
if not self._use_edit_image:
common_kwargs.update({
'image':None
})
self._popup_menu = tkmenu.PopupMenu(self,**misc.get_style_configuration("Menu"))
self._popup_menu.AppendMenuItem(GetApp().Menubar.GetEditMenu().FindMenuItem(consts.ID_UNDO),\
handler=GetApp().MainFrame.CreateEditCommandHandler("<<Undo>>"),**common_kwargs)
self._popup_menu.AppendMenuItem(GetApp().Menubar.GetEditMenu().FindMenuItem(consts.ID_REDO),\
handler=GetApp().MainFrame.CreateEditCommandHandler("<<Redo>>"),**common_kwargs)
self._popup_menu.add_separator()
args = {}
if not self._use_edit_image:
args.update({
'image':None
})
if not self._use_edit_tester:
args.update({
'tester':self.CanCut,
})
self._popup_menu.AppendMenuItem(GetApp().Menubar.GetEditMenu().FindMenuItem(consts.ID_CUT),\
handler=GetApp().MainFrame.CreateEditCommandHandler("<<Cut>>"),**args)
if not self._use_edit_tester:
args.update({
'tester':self.CanCopy,
})
self._popup_menu.AppendMenuItem(GetApp().Menubar.GetEditMenu().FindMenuItem(consts.ID_COPY),\
handler=GetApp().MainFrame.CreateEditCommandHandler("<<Copy>>"),**args)
if not self._use_edit_tester:
args.update({
'tester':self.CanPaste,
})
self._popup_menu.AppendMenuItem(GetApp().Menubar.GetEditMenu().FindMenuItem(consts.ID_PASTE),\
handler=GetApp().MainFrame.CreateEditCommandHandler("<<Paste>>"),**args)
if not self._use_edit_tester:
args.update({
'tester':self.CanDelete,
})
self._popup_menu.AppendMenuItem(GetApp().Menubar.GetEditMenu().FindMenuItem(consts.ID_CLEAR),\
handler=self.OnDelete,**args)
sel_args = {}
if not self._use_edit_tester:
sel_args['tester'] = None
self._popup_menu.AppendMenuItem(GetApp().Menubar.GetEditMenu().FindMenuItem(consts.ID_SELECTALL),\
handler=GetApp().MainFrame.SelectAll,**sel_args)
self._popup_menu["postcommand"] = lambda: self._popup_menu._update_menu()
def OnDelete(self):
'''
删除选中文本
'''
start,end = self.get_selection()
self.delete(start, end)
def CanCut(self):
if self.is_read_only() or self.IsStateDisabled():
return False
return self.HasSelection()
def IsStateDisabled(self):
return self['state'] == tk.DISABLED
def CanCopy(self):
return self.HasSelection()
def CanDelete(self):
if self.is_read_only() or self.IsStateDisabled():
return False
return self.HasSelection()
def CanPaste(self):
if self.is_read_only() or self.IsStateDisabled():
return False
return True
def CanUndo(self):
if self.is_read_only() or self.IsStateDisabled():
return False
return True
def CanRedo(self):
if self.is_read_only() or self.IsStateDisabled():
return False
return True
def reload_ui_theme(self, event=None):
self._reload_theme_options(force=True)
def _reload_theme_options(self, force=False):
style = ttk.Style()
states = []
if self.is_read_only():
states.append("readonly")
# Following crashes when a combobox is focused
# if self.focus_get() == self:
# states.append("focus")
if "background" not in self._initial_configuration or force:
background = style.lookup(self._style, "background", states)
if background:
self.configure(background=background)
if "foreground" not in self._initial_configuration or force:
foreground = style.lookup(self._style, "foreground", states)
if foreground:
self.configure(foreground=foreground)
self.configure(insertbackground=foreground)
def _insert_untypable_characters_on_windows(self, event):
if event.state == 131084: # AltGr or Ctrl+Alt
lang_id = get_keyboard_language()
char = _windows_altgr_chars_by_lang_id_and_keycode.get(lang_id, {}).get(
event.keycode, None
)
if char is not None:
self.insert("insert", char)
def destroy(self):
self.unbind("<<ThemeChanged>>", self._ui_theme_change_binding)
ui_base.TweakableText.destroy(self)
def direct_insert(self, index, chars, tags=None, **kw):
try:
concrete_index = self.index(index)
chars = self.check_convert_tabs_to_spaces(chars)
ui_base.TweakableText.direct_insert(self,index, chars, tags, **kw)
finally:
GetApp().event_generate(
"TextInsert",
index=concrete_index,
text=chars,
tags=tags,
text_widget=self,
)
def direct_delete(self, index1, index2=None, **kw):
try:
# index1 may be eg "sel.first" and it doesn't make sense *after* deletion
concrete_index1 = self.index(index1)
if index2 is not None:
concrete_index2 = self.index(index2)
else:
concrete_index2 = None
return ui_base.TweakableText.direct_delete(
self, index1, index2=index2, **kw
)
finally:
GetApp().event_generate(
"TextDelete",
index1=concrete_index1,
index2=concrete_index2,
text_widget=self,
)
def check_convert_tabs_to_spaces(self, chars):
'''
检查插入文本是否包含制表符, 并是否弹出警告提示
'''
tab_count = chars.count("\t")
if not self.replace_tabs or tab_count == 0:
return chars
else:
if messagebox.askyesno(_("Convert tabs to spaces?"),
_("NovalIDE (according to Python recommendation) uses spaces for indentation, ")
+ _("but the text you are about to insert/open contains %d tab characters. ") % tab_count
+ _("To avoid confusion, it's better to convert them into spaces (unless you know they should be kept as tabs).\n\n" )
+ _("Do you want me to replace each tab with %d spaces?\n\n") % self.indent_width,
parent=tk._default_root):
return chars.expandtabs(self.indent_width)
else:
return chars
def GetLineCount(self):
#不使用end因为会在文件末尾默认添加一行,使用end-1c防止末尾添加行
text_line_count = int(self.index("end-1c").split(".")[0])
return text_line_count
def GetCurrentLineColumn(self):
line, column = self.get_line_col(self.index(tk.INSERT))
return line,column
def get_line_col(self,index):
'''Return (line, col) tuple of ints from 'line.col' string.'''
line, col = map(int, index.split(".")) # Fails on invalid index
return line, col
def GetCurrentLine(self):
return self.GetCurrentLineColumn()[0]
def GetCurrentColumn(self):
return self.GetCurrentLineColumn()[1]
def GetCurrentPos(self):
return self.get_line_col(self.index(tk.INSERT))
def GetLineText(self,line):
return self.get("%d.0" % line,"%d.end"%line)
def GetValue(self):
value = self.get(
"1.0", "end-1c"
) # -1c because Text always adds a newline itself
#tkintext text控件只支持\n换行符,保存文件时,需要全部处理\r换行符
chars = value.replace("\r", "")
#如果是windows系统,则适应windows换行符
if self.eol == consts.EOL_CRLF:
chars = chars.replace("\n", "\r\n")
return chars
def GotoLine(self,lineno):
assert(type(lineno) == int)
if lineno <=0:
lineno = 1
self.mark_set("insert", "%d.0" % lineno)
self.see("insert")
self.focus_set()
def ScrolltoEnd(self):
'''
滚到到文本最后一行
'''
self.mark_set("insert", "end")
self.see("insert")
def get_selection(self):
'''Return tuple of 'line.col' indexes from selection or insert mark.
'''
try:
first = self.index("sel.first")
last = self.index("sel.last")
except TclError:
first = last = None
if not first:
first = self.index("insert")
if not last:
last = first
return first, last
def GotoPos(self,lineno,colno):
self.mark_set("insert", "%d.%d" % (lineno,colno))
self.see("insert")
self.focus_set()
def HasSelection(self):
start,end = self.get_selection()
return start!=end
def GetTopLines(self,line_num):
lines = []
for i in range(line_num):
#行号从1开始
lines.append(self.GetLineText(i+1))
return lines
def GetSelectionText(self):
first,last = self.get_selection()
if first == last:
return ''
return self.get(first,last)
def do_replace(self,text):
first,last = self.get_selection()
self.mark_set("insert", first)
self.delete(first, last)
if text:
self.insert(first, text)
def AddText(self,txt):
self.insert("insert", txt)
def tabify_region(self):
head, tail, chars, lines = self._get_region()
tabwidth = 4
if tabwidth is None: return
for pos in range(len(lines)):
line = lines[pos]
if line:
raw, effective = classifyws(line, tabwidth)
ntabs, nspaces = divmod(effective, tabwidth)
lines[pos] = '\t' * ntabs + ' ' * nspaces + line[raw:]
self._set_region(head, tail, chars, lines)
def untabify_region(self):
head, tail, chars, lines = self._get_region()
tabwidth = 4
if tabwidth is None: return
for pos in range(len(lines)):
lines[pos] = lines[pos].expandtabs(tabwidth)
self._set_region(head, tail, chars, lines)
class SyntaxTextCtrl(TextCtrl,findtext.FindTextEngine):
'''
文本语法控件同时兼顾查找功能
'''
def __init__(self, master=None, cnf={}, **kw):
TextCtrl.__init__(self, master, cnf=cnf, **kw)
self.replace_tabs = utils.profile_get_int("check_text_tabs",True)
findtext.FindTextEngine.__init__(self)
self.UpdateSyntaxTheme()
def UpdateSyntaxTheme(self):
self.SetSyntax(syntax.SyntaxThemeManager().SYNTAX_THEMES)
def SetSyntax(self,syntax_options):
# apply new options
for tag_name in syntax_options:
if tag_name == "TEXT":
self.configure(**syntax_options[tag_name])
break
self.SetOtherOptions(syntax_options)
def SetOtherOptions(self,syntax_options):
if "current_line" in syntax_options:
self.tag_lower("current_line")
self.tag_raise("sel")
def _reload_theme_options(self,force=False):
pass
class TextOptionsPanel(ui_utils.BaseConfigurationPanel):
def __init__(self, parent, hasWordWrap = False):
ui_utils.BaseConfigurationPanel.__init__(self, parent)
self._hasWordWrap = hasWordWrap
self._hasTabs = False
if self._hasWordWrap:
self._wordWrapCheckBox = ttk.Checkbutton(self, text=_("Wrap words inside text area"))
self._wordWrapCheckBox.SetValue(wx.ConfigBase_Get().ReadInt(self._configPrefix + "EditorWordWrap", False))
# self._viewWhitespaceCheckBox = wx.CheckBox(self, -1, _("Show whitespace"))
# self._viewWhitespaceCheckBox.SetValue(config.ReadInt(self._configPrefix + "EditorViewWhitespace", False))
# self._viewEOLCheckBox = wx.CheckBox(self, -1, _("Show end of line markers"))
# self._viewEOLCheckBox.SetValue(config.ReadInt(self._configPrefix + "EditorViewEOL", False))
# self._viewIndentationGuideCheckBox = wx.CheckBox(self, -1, _("Show indentation guides"))
# self._viewIndentationGuideCheckBox.SetValue(config.ReadInt(self._configPrefix + "EditorViewIndentationGuides", False))
self._viewRightEdgeVar = tk.IntVar(value=utils.profile_get_int("TextViewRightEdge", True))
viewRightEdgeCheckBox = ttk.Checkbutton(self,text=_("Show right edge"),variable=self._viewRightEdgeVar,command=self.CheckViewRightEdge)
viewRightEdgeCheckBox.pack(padx=consts.DEFAUT_CONTRL_PAD_X,fill="x",pady=(consts.DEFAUT_CONTRL_PAD_Y,0))
self._viewLineNumbersVar = tk.IntVar(value=utils.profile_get_int("TextViewLineNumbers", True))
viewLineNumbersCheckBox = ttk.Checkbutton(self,text=_("Show line numbers"),variable=self._viewLineNumbersVar)
viewLineNumbersCheckBox.pack(padx=consts.DEFAUT_CONTRL_PAD_X,fill="x")
self._highlightCaretLineVar = tk.IntVar(value=utils.profile_get_int("TextHighlightCaretLine", True))
highlightCaretLineCheckBox = ttk.Checkbutton(self,text=_("Highlight Caret Line"),variable=self._highlightCaretLineVar)
highlightCaretLineCheckBox.pack(padx=consts.DEFAUT_CONTRL_PAD_X,fill="x")
self._highlightParenthesesVar = tk.IntVar(value=utils.profile_get_int("TextHighlightParentheses", True))
highlightParenthesesCheckBox = ttk.Checkbutton(self,text=_("Highlight parentheses"),variable=self._highlightParenthesesVar)
highlightParenthesesCheckBox.pack(padx=consts.DEFAUT_CONTRL_PAD_X,fill="x")
self._highlightSyntaxVar = tk.IntVar(value=utils.profile_get_int("TextHighlightSyntax", True))
highlightSyntaxCheckBox = ttk.Checkbutton(self,text=_("Highlight syntax elements"),variable=self._highlightSyntaxVar)
highlightSyntaxCheckBox.pack(padx=consts.DEFAUT_CONTRL_PAD_X,fill="x")
# self._hasTabsVar = tk.IntVar(value=utils.profile_get_int("TextEditorUseTabs", False))
# hasTabsCheckBox = ttk.Checkbutton(self,text=_("Use spaces instead of tabs"),variable=self._hasTabsVar)
# hasTabsCheckBox.pack(padx=consts.DEFAUT_CONTRL_PAD_X,fill="x")
self._tabCompletionVar = tk.IntVar(value=utils.profile_get_int("TextTabCompletion", True))
tabCompletionCheckBox = ttk.Checkbutton(self,text=_("Allow code completion with tab-key"),variable=self._tabCompletionVar)
tabCompletionCheckBox.pack(padx=consts.DEFAUT_CONTRL_PAD_X,fill="x")
## row = ttk.Frame(self)
## indentWidthLabel = ttk.Label(row,text=_("Indent Width:"))
## indentWidthLabel.pack(side=tk.LEFT)
## self._indentWidthVar = tk.IntVar(value = utils.profile_get_int("TextEditorIndentWidth", 4))
## indentWidthChoice = ttk.Combobox(row, values = ["2", "4", "6", "8", "10"],textvariable=self._indentWidthVar)
## indentWidthChoice.pack(side=tk.LEFT)
## row.pack(padx=consts.DEFAUT_CONTRL_PAD_X,fill="x",pady=(consts.DEFAUT_CONTRL_PAD_Y,0))
self.checkTabsVar = tk.IntVar(value=utils.profile_get_int("check_text_tabs", True))
chkTabBox = ttk.Checkbutton(self, text=_("Warn when text contains Tabs"),variable=self.checkTabsVar)
chkTabBox.pack(padx=consts.DEFAUT_CONTRL_PAD_X,fill="x")
row = ttk.Frame(self)
edgeWidthLabel = ttk.Label(row, text= _("Edge Guide Width:"))
edgeWidthLabel.pack(side=tk.LEFT)
self._edgeWidthVar = tk.IntVar(value = utils.profile_get_int("TextEditorEdgeGuideWidth", consts.DEFAULT_EDGE_GUIDE_WIDTH))
self.edge_spin_ctrl = tk.Spinbox(row, from_=0, to=160,textvariable=self._edgeWidthVar)
self.edge_spin_ctrl.pack(side=tk.LEFT)
row.pack(padx=consts.DEFAUT_CONTRL_PAD_X,fill="x",pady=(consts.DEFAUT_CONTRL_PAD_Y,0))
## defaultEOLModelLabel = wx.StaticText(self, -1, _("Default EOL Mode:"))
## self.eol_model_combox = wx.ComboBox(self, -1,choices=EOLFormat.EOLFormatDlg.EOL_CHOICES,style= wx.CB_READONLY)
## if sysutilslib.isWindows():
## eol_mode = config.ReadInt(self._configPrefix + "EditorEOLMode", wx.stc.STC_EOL_CRLF)
## else:
## eol_mode = config.ReadInt(self._configPrefix + "EditorEOLMode", wx.stc.STC_EOL_LF)
## idx = EOLFormat.EOLFormatDlg.EOL_ITEMS.index(eol_mode)
## self.eol_model_combox.SetSelection(idx)
self.CheckViewRightEdge()
def CheckViewRightEdge(self):
if self._viewRightEdgeVar.get():
self.edge_spin_ctrl['state'] = tk.NORMAL
else:
self.edge_spin_ctrl['state'] = tk.DISABLED
def OnOK(self, optionsDialog):
doViewStuffUpdate = False
# doViewStuffUpdate = config.ReadInt(self._configPrefix + "EditorViewWhitespace", False) != self._viewWhitespaceCheckBox.GetValue()
# config.WriteInt(self._configPrefix + "EditorViewWhitespace", self._viewWhitespaceCheckBox.GetValue())
# doViewStuffUpdate = doViewStuffUpdate or config.ReadInt(self._configPrefix + "EditorViewEOL", False) != self._viewEOLCheckBox.GetValue()
# config.WriteInt(self._configPrefix + "EditorViewEOL", self._viewEOLCheckBox.GetValue())
#doViewStuffUpdate = doViewStuffUpdate or config.ReadInt(self._configPrefix + "EditorViewIndentationGuides", False) != self._viewIndentationGuideCheckBox.GetValue()
#config.WriteInt(self._configPrefix + "EditorViewIndentationGuides", self._viewIndentationGuideCheckBox.GetValue())
doViewStuffUpdate = doViewStuffUpdate or utils.profile_get_int("TextViewRightEdge", False) != self._viewRightEdgeVar.get()
utils.profile_set( "TextViewRightEdge", self._viewRightEdgeVar.get())
doViewStuffUpdate = doViewStuffUpdate or utils.profile_get_int("TextViewLineNumbers", True) != self._viewLineNumbersVar.get()
utils.profile_set("TextViewLineNumbers", self._viewLineNumbersVar.get())
doViewStuffUpdate = doViewStuffUpdate or utils.profile_get_int("TextHighlightCaretLine", True) != self._highlightCaretLineVar.get()
utils.profile_set("TextHighlightCaretLine", self._highlightCaretLineVar.get())
doViewStuffUpdate = doViewStuffUpdate or utils.profile_get_int("TextHighlightParentheses", True) != self._highlightParenthesesVar.get()
utils.profile_set("TextHighlightParentheses", self._highlightParenthesesVar.get())
doViewStuffUpdate = doViewStuffUpdate or utils.profile_get_int("TextHighlightSyntax", True) != self._highlightSyntaxVar.get()
utils.profile_set("TextHighlightSyntax", self._highlightSyntaxVar.get())
# if sysutilslib.isWindows():
# default_eol_mode = wx.stc.STC_EOL_CRLF
#else:
# default_eol_mode = wx.stc.STC_EOL_LF
#eol_mode = EOLFormat.EOLFormatDlg.EOL_ITEMS[self.eol_model_combox.GetSelection()]
#doViewStuffUpdate = doViewStuffUpdate or config.ReadInt(self._configPrefix + "EditorEOLMode", default_eol_mode) != eol_mode
#config.WriteInt(self._configPrefix + "EditorEOLMode", eol_mode)
if self._viewRightEdgeVar.get():
doViewStuffUpdate = doViewStuffUpdate or utils.profile_get_int("TextEditorEdgeGuideWidth", consts.DEFAULT_EDGE_GUIDE_WIDTH) != self._edgeWidthVar.get()
utils.profile_set("TextEditorEdgeGuideWidth", self._edgeWidthVar.get())
# if self._hasFolding:
# doViewStuffUpdate = doViewStuffUpdate or config.ReadInt(self._configPrefix + "EditorViewFolding", True) != self._viewFoldingCheckBox.GetValue()
# config.WriteInt(self._configPrefix + "EditorViewFolding", self._viewFoldingCheckBox.GetValue())
#if self._hasWordWrap:
# doViewStuffUpdate = doViewStuffUpdate or config.ReadInt(self._configPrefix + "EditorWordWrap", False) != self._wordWrapCheckBox.GetValue()
# config.WriteInt(self._configPrefix + "EditorWordWrap", self._wordWrapCheckBox.GetValue())
if self._hasTabs:
doViewStuffUpdate = doViewStuffUpdate or not config.ReadInt(self._configPrefix + "EditorUseTabs", True) != self._hasTabsCheckBox.GetValue()
config.WriteInt(self._configPrefix + "EditorUseTabs", not self._hasTabsCheckBox.GetValue())
newIndentWidth = int(self._indentWidthChoice.GetStringSelection())
oldIndentWidth = config.ReadInt(self._configPrefix + "EditorIndentWidth", 4)
if newIndentWidth != oldIndentWidth:
doViewStuffUpdate = True
config.WriteInt(self._configPrefix + "EditorIndentWidth", newIndentWidth)
GetApp().MainFrame.GetNotebook().update_appearance()
utils.profile_set("TextTabCompletion",self._tabCompletionVar.get())
utils.profile_set("check_text_tabs",self.checkTabsVar.get())
return True
def GetIcon(self):
return getTextIcon() | PypiClean |
/FliKISS-0.1.tar.gz/FliKISS-0.1/flikiss/static/js/editor.js | * Editor class
* @param textarea_id : Textarea id to create CodeMirror instance
* @param upload_url : Url used to upload files with DnD
* @param convert_url : Url used to convert Markdown into html
**/
function Editor(textarea_id, upload_url, convert_url) {
var self = this;
// codemirror
self.codemirror = CodeMirror.fromTextArea(
document.getElementById(textarea_id), {
"theme": "xq-light",
"mode": "markdown",
"lineWrapping": "true"
});
// manage drag and drop
self.upload_url = upload_url;
self.codemirror.setOption('onDragEvent', function(data, event) {
if ( event.type === 'drop' ) {
event.stopPropagation();
event.preventDefault();
// test formdata
if ( !!window.FormData ) {
var formdata = new FormData();
// number of files
if ( event.dataTransfer.files.length === 1 ) {
// images
if ( event.dataTransfer.files[0].type.match(/image.*/) ) {
// add wait text on editor
var text = '![Please wait during upload...]()';
var cursor = self.codemirror.getCursor('start');
var line = self.codemirror.getLine(cursor.line);
var new_value = line.slice(0, cursor.ch) + text + line.slice(cursor.ch);
self.codemirror.setLine(cursor.line, new_value);
self.codemirror.focus();
// Ajax
formdata.append('file', event.dataTransfer.files[0]);
var httpRequest = new XMLHttpRequest();
httpRequest.onreadystatechange = function() {
if (httpRequest.readyState === 4) {
if (httpRequest.status === 200) {
var data = JSON.parse(httpRequest.responseText);
if (data.error) {
self.codemirror.setLine(cursor.line, new_value.replace(text, ''));
alert(data.error);
}
else {
self.codemirror.setLine(cursor.line, new_value.replace(text, ''));
}
self.codemirror.focus();
}
}
};
httpRequest.open('POST', self.upload_url);
httpRequest.send(formdata);
}
// plain text
else if ( event.dataTransfer.files[0].type.match(/text.*/) ) {
// read file
if ( typeof FileReader != 'undefined' ) {
var reader = new FileReader();
// paste its content
reader.onload = function() {
var text = reader.result;
var cursor = self.codemirror.getCursor('start');
var line = self.codemirror.getLine(cursor.line);
var new_value = line.slice(0, cursor.ch) + text + line.slice(cursor.ch);
self.codemirror.setLine(cursor.line, new_value);
self.codemirror.focus();
}
reader.readAsText(event.dataTransfer.files[0]);
}
else alert('FileReader not supported');
}
else alert('File format not supported');
}
else alert('You can upload only one file');
}
else alert('Your browser does not seem to support HTML5 Drag and drop API');
return true;
}
});
self.convert_url = convert_url;
/*
* Define if current line is in given state
*/
self._getState = function(state) {
var pos = self.codemirror.getCursor('start');
var stat = self.codemirror.getTokenAt(pos);
if (!stat.type) return false;
var types = stat.type.split(' ');
for (ii = 0; ii < types.length; ii++) {
var data = types[ii];
if (data === 'strong' && state === 'bold') {
return true;
}
else if (data === 'em' && state === 'italic') {
return true;
}
}
return false;
}
/*
* Replace selection adding prefix and suffix
*/
self._replaceSelection = function(prefix, suffix) {
var start = self.codemirror.getCursor('start');
var stop = self.codemirror.getCursor('end');
var text = self.codemirror.getSelection();
self.codemirror.replaceSelection(prefix + text + suffix);
start.ch += prefix.length;
stop.ch += suffix.length;
self.codemirror.setSelection(start, stop);
self.codemirror.focus();
}
/*
* Toggle state for selection
*/
self._toggleSelection = function(prefix, suffix, state) {
var state = self._getState(state);
// already in state
if (state) {
// get cursor
var cursor = self.codemirror.getCursor('start');
// get cursor line
var line = self.codemirror.getLine(cursor.line);
// split to keep only value with cursor
var prefix_index = line.slice(0, cursor.ch).lastIndexOf(prefix);
var suffix_index = cursor.ch + line.slice(cursor.ch).indexOf(suffix);
// replace line
line = line.slice(0, prefix_index) + line.slice(prefix_index + prefix.length, suffix_index) + line.slice(suffix_index + suffix.length)
self.codemirror.setLine(cursor.line, line);
self.codemirror.focus();
}
else {
self._replaceSelection(prefix, suffix);
}
}
/**
* draw editor toolbar
*/
var container = document.createElement('div');
container.className = 'toolbar';
// bold
var bold = document.createElement('span');
bold.className = 'fa fa-bold';
bold.onclick = function(in_event) {
self._toggleSelection('**', '**', 'bold');
}
container.appendChild(bold);
// italic
var italic = document.createElement('span');
italic.className = 'fa fa-italic';
italic.onclick = function(in_event) {
self._toggleSelection('*', '*', 'italic');
}
container.appendChild(italic);
// img
var img = document.createElement('span');
img.className = 'fa fa-photo'
img.onclick = function(in_event) {
self._replaceSelection('');
}
container.appendChild(img);
// link
var link = document.createElement('span');
link.className = 'fa fa-link';
link.onclick = function(in_event) {
self._replaceSelection('[', '](http://)');
}
container.appendChild(link);
// undo
var undo = document.createElement('span');
undo.onclick = function() {
self.codemirror.undo();
}
undo.className = 'fa fa-undo';
container.appendChild(undo);
// preview
var preview = document.createElement('span');
preview.onclick = function(in_event) {
// create div if not exists
var div = document.getElementById('markdown_preview');
var wrapper = self.codemirror.getWrapperElement();
var btn = this;
if (div == null) {
div = document.createElement('div');
div.setAttribute('id', 'markdown_preview');
div.style.display = 'none';
wrapper.parentNode.insertBefore(div, wrapper.nextSibling);
}
// show div
if (wrapper.style.display != 'none') {
// send request to server with value
btn.className = btn.className.replace('fa-eye', 'fa-spin fa-spinner');
var text = self.codemirror.getValue();
var httpRequest = new XMLHttpRequest();
httpRequest.onreadystatechange = function() {
if (httpRequest.readyState === 4) {
if (httpRequest.status === 200) {
var data = JSON.parse(httpRequest.responseText);
div.innerHTML = data.value;
div.style.display = 'block';
wrapper.style.display = 'none';
btn.className = btn.className.replace('fa-spin fa-spinner', 'fa-eye-slash');
}
}
};
httpRequest.open('POST', self.convert_url);
httpRequest.setRequestHeader('Content-type', 'application/x-www-form-urlencoded');
httpRequest.send('content=' + text);
}
else {
div.style.display = 'none';
wrapper.style.display = 'block';
btn.className = btn.className.replace('fa-eye-slash', 'fa-eye');
}
}
preview.className = 'fa fa-eye';
container.appendChild(preview);
// append container
var wrapper = self.codemirror.getWrapperElement();
wrapper.parentNode.insertBefore(container, wrapper);
} | PypiClean |
/KratosMultilevelMonteCarloApplication-9.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl/KratosMultiphysics/MultilevelMonteCarloApplication/XMC/xmc/methodDefs_momentEstimator/powerSums.py | import warnings
import numpy as np
from exaqute import (
task,
Type,
Depth,
COLLECTION_IN,
INOUT,
)
from xmc.tools import flatten
from .types import (
PowerSumsDict,
SampleArray,
PowerSumsDictUL,
CombinedSampleArray,
)
from typing import Union
def updatedPowerSums(powerSums: PowerSumsDict, samples: SampleArray) -> PowerSumsDict:
"""
Increments power sums from an array of samples.
Supports multi-valued random variables, and Monte Carlo index sets of dimension up to 1.
Input arguments:
- powerSums: dictionary following the format of MomentEstimator._powerSums
- samples: array of samples (see below).
Output arguments:
- powerSums: same as input, with updated values.
The array of samples is expected to be an array of dimension at least three, viz.
1. random event;
2. Monte Carlo index (e.g. solver, model or level);
3. (and beyond) random variable component.
Any dimension beyond the third will be collapsed into this latter one.
"""
# Ensure proper format
# Convert to NumPy multi-dimensional arrays
samples = np.array(samples)
# Ensure that this is a tridimensional array
if len(samples.shape) > 3:
# Assumption: components of the multi-valued random variable were split into sub-lists.
# Unpacked these sublist by reshaping:
# all dimensions beyond the 2nd one are collapsed into one.
warnings.warn(
(
"The samples are nested too deep. I assume that you misused the split-update"
" mechanism, and will try to fix the data structure. However, this behaviour"
" is only supported until 2020-11. Consider using the normal update mechanism"
" or fixing your solver wrapper."
),
FutureWarning,
)
samples = samples.reshape((*samples.shape[:2], -1))
elif len(samples.shape) < 3:
# This should not happen
raise ValueError(
f"Input argument has {len(samples.shape)} dimensions, "
"whereas I expected at least 3."
)
# Proper format has been ensured.
# Now check the dimension of the Monte Carlo index set
if samples.shape[1] == 2:
# Operator of differences for an index set of dimension 1
diffOperator = np.array([[1, 1], [1, -1]])
# Apply the operator to the second dimension of the array of samples
# then restore the order of dimensions
samples = np.tensordot(diffOperator, samples, axes=([1], [1])).transpose(1, 0, 2)
# Iterate over keys to compute each corresponding power sum
for key in powerSums.keys():
# Deduce array of powers from key (e.g. '21' -> [2,1]) then broadcast it
# along the second dimension of a NumPY array of dimension 3
powers = np.array(list(map(int, key))).reshape((1, -1, 1))
# Increment the power sum.
# Elementwise exponentiation and multiplication across dimension 2 (levels)
# then summation across dimension 1 (random events).
powerSums[key] += np.sum(np.prod(samples ** powers, axis=1), axis=0)
return powerSums
@task(keep=True, returns=1,
samples={Type: COLLECTION_IN, Depth: 3},
powerSums=INOUT,
)
def updatedPowerSums_Task(powerSums: PowerSumsDict, samples: SampleArray) -> PowerSumsDict:
return updatedPowerSums(powerSums, samples)
def addPowerSumsAndCounter(
psDict: Union[PowerSumsDict, PowerSumsDictUL],
counter: int,
psArray: CombinedSampleArray,
keyOrder: tuple = None,
) -> (Union[PowerSumsDict, PowerSumsDictUL], int):
"""
Increment existing dictionary of power sums and sample counter for an array of such. This
function is meant as a defintion for MultiCombinedPowerSums._powerSumsUpdater.
Input argument:
- psDict: dictionary of power sums to be incremented; in the format of
MultiCombinedPowerSums._powerSums
- psArray: multidimensional array of power sums and sample count to add; in the format
expected by MultiCombinedMomentEstimator.update.
- counter: sample counter to be incremented.
- keyOrder: (optional) keys of entries of power sums in psDict, in their order in psArray.
Default value: ('1', '2', ..., 'p'), with 'p' the number of distinct power sums in psArray.
Output arguments:
- psDict: same format as input (depends on dimension of the MC index set)
- counter: same format as input
See the documentation of MultiCombinedPowerSums for more details.
"""
# First consider the consider the case of samples from several MC levels
if "upper" in psDict.keys():
# If the index set dimension is 1, then psDict is of the form
# {'upper': psDictUpper, 'lower': psDictLower}
# So we recurse over each sub-dictionary
#
# First, check that we have the expected keys
if sorted(psDict.keys()) != sorted(("lower", "upper")):
raise ValueError(
"Expected the dictionary of power sums to have keys ('upper', 'lower'). "
f"Found {tuple(psDict.keys())} instead."
)
# Get samples for upper level only (first element along second axis)
psArrayUpper = [[oneEvent[0]] for oneEvent in psArray]
# Update power sums for upper level by recursion
psDict["upper"], counterUpper = addPowerSumsAndCounter(
psDict["upper"], counter, psArrayUpper, keyOrder
)
# Now do the same for lower level
psArrayLower = [[oneEvent[1]] for oneEvent in psArray]
psDict["lower"], counterLower = addPowerSumsAndCounter(
psDict["lower"], counter, psArrayLower, keyOrder
)
# Check that the new counters are equal and return updated arguments
# Special case: if the lower level is fictitious (dummy sample), then
# its counter is expected to be None and a warning has been issue (see below).
# This case will removed onced this workaround is not supported any more.
is_lower_dummy = counterLower is None
if not (counterUpper == counterLower or is_lower_dummy):
raise ValueError(
"Expected upper and lower levels to have equal sample count. "
f"Received {counterUpper-counter} and {counterLower-counter}, respectively."
)
return psDict, counterUpper
#
# Code below is for index set of dimension 0
# This is reached either because this is a single-level moment estimation,
# or because this is a recursion entered from the conditional statement above
#
# Try to detect if this is actually a dummy sample
is_at_least_4d = hasattr(psArray[0][0][0], "__iter__")
has_nonzero_value = any(flatten(psArray))
if not is_at_least_4d and not has_nonzero_value:
# This is a sample full of zeros, with the wrong shape.
# We are into a recursion. Warn about future deprecation
warnings.warn(
(
"Dummy samples simulating a solver level are not needed here. "
"They will be deprecated in the near future."
),
FutureWarning,
)
# Do nothing and return power sums as they are.
# Return counter as None, to let caller know that this is a dummy sample.
# Note: the caller is this function, from the conditional statement above.
return psDict, None
#
# Assign default value of keyOrder, if undefined
if keyOrder is None:
# Default key order
# Let us not assume the maximal order of power sums
# We substract 1 because of the counter
keyOrder = tuple(str(i + 1) for i in range(len(psArray[0][0][0]) - 1))
# Check that all keys exist
if sorted(keyOrder) != sorted(psDict.keys()):
raise ValueError(
"Failed to match keys of new power sums and existing ones: "
f"{sorted(keyOrder)} versus {sorted(psDict.keys())}."
)
# Let's reformat psArray
# Collapse 'solver' (i.e. second) axis, since its length is 1
psArray = [oneEvent[0] for oneEvent in psArray]
# Increment counter and remove count from psArray
for oneEvent in psArray:
# We remove the counter from every component
for oneComponent in oneEvent:
increment = oneComponent.pop(-1)
# However, we increment the counter only once
counter += increment
# Now we expect to have a homogenous tri-dimensional array: event, component, power
# There may be an extra axis, e.g. because of an unwanted split of solver output
# We will convert it to a NumPy array and collapse any axis beyond the third
psArray = np.array(psArray)
if len(psArray.shape) > 3:
psArray = psArray.reshape((*psArray.shape[:2], -1))
# Now we surely have a tri-dimensional array of axes: event, component, power
# Sum over 'events' (i.e. first) axis
psArray = np.sum(psArray, axis=0)
# We permute the axes to be (power, component) instead of (component, power)
psArray = psArray.transpose()
# Increment power sums for each power
for i, key in enumerate(keyOrder):
# This is actually a vector operation, using NumPy's vector addition
psDict[key] += psArray[i]
return psDict, counter
# The depth value is necessary here
@task(keep=True, returns=2, psDict=INOUT, psArray={Type: COLLECTION_IN, Depth: 4})
def addPowerSumsAndCounter_Task(
psDict: Union[PowerSumsDict, PowerSumsDictUL],
counter: int,
psArray: CombinedSampleArray,
keyOrder: tuple = None,
) -> (Union[PowerSumsDict, PowerSumsDictUL], int):
return addPowerSumsAndCounter(psDict, counter, psArray, keyOrder) | PypiClean |
/Banyan-0.1.5.tar.gz/Banyan-0.1.5/banyan/_min_gap_updator.py | from __future__ import print_function
import banyan_c
class MinGapUpdator(object):
"""
Updates nodes by the minimum, maximum, and min-gaps of their subtrees. Allows trees employing this to
efficiently answer what is the smallest gap in their keys.
Example:
>>> t = SortedSet([1, 3, 2], updator = MinGapUpdator)
>>>
>>> t
SortedSet([1, 2, 3])
>>> print(t.min_gap())
1
>>>
>>> t.remove(2)
>>> t
SortedSet([1, 3])
>>> print(t.min_gap())
2
>>> t.add(1.0001)
>>> t
SortedSet([1, 1.0001, 3])
>>> print(t.min_gap())
0.0001
.. Note::
The keys used with this updator must support the `number protocol`_ and be `rich comparable`_, otherwise an exception will be thrown.
.. _`number protocol`: http://docs.python.org/2/c-api/number.html
.. _`rich comparable`: http://docs.python.org/2/c-api/object.html?highlight=pyobject_richcomparebool#PyObject_RichCompareBool
Example:
>>> t = SortedSet(['1', '3', '2'], updator = MinGapUpdator)
Traceback (most recent call last):
...
TypeError: Failed to subtract
"""
# Metadata appended to each node.
class Metadata(banyan_c.MinGapMetadata):
# Overwritten by C implementation
pass
def min_gap(self):
"""
:returns: Smallest gap between the keys.
:raises: :py:exc:`RuntimeError` if there are fewer than two keys in the set.
Example:
>>> t = SortedSet([1, 3, 2], updator = MinGapUpdator)
>>>
>>> t.min_gap()
1
>>> t.remove(2)
>>> t.min_gap()
2
>>> t.remove(1)
>>> # The min gap is now undefined.
>>> t.min_gap()
Traceback (most recent call last):
...
RuntimeError: Min-gap undefined
"""
return self._min_gap_updator_min_gap() | PypiClean |
/Instrumental-lib-0.7.zip/Instrumental-lib-0.7/docs/faq.rst | FAQs
====
My instrument isn't showing up in ``list_instruments()``. What now?
-------------------------------------------------------------------
If you're using this particular driver for the first time, make sure you've followed the install directions fully. You should also check that the device works with any vendor-provided software (e.g. a camera viewer GUI). If the device still isn't showing up, you should import the driver module directly to reveal any errors (see :ref:`list_instruments-no-error`).
.. _list_instruments-no-error:
Why isn't ``list_instruments()`` producing any errors?
------------------------------------------------------
``list_instruments()`` is designed to check all Instrumental drivers that are available, importing each driver in turn. If a driver fails to import, this is often because you haven't installed its requirements (because you're not using it), so ``list_instruments()`` simply ignores the error and moves on.
Where is the ``instrumental.conf`` configuration file stored?
-------------------------------------------------------------
The location of ``instrumental.conf`` is platform-dependent. To find where the file is located on
your system, run::
>>> from instrumental.conf import user_conf_dir
>>> user_conf_dir
u'C:\\Users\\Lab\\AppData\\Local\\MabuchiLab\\Instrumental'
| PypiClean |
/GaiaAlertsPy-1.0.0.tar.gz/GaiaAlertsPy-1.0.0/licenses/LICENSE.rst | BSD 3-Clause License
====================
Copyright (c) 2023, Andy Tzanidakis
Redistribution and use in source and binary forms, with or without modification,
are permitted provided that the following conditions are met:
1. Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
3. Neither the name of the [organization] nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| PypiClean |
/FamcyDev-0.3.71-py3-none-any.whl/Famcy/bower_components/bootstrap-table/src/constants/index.js | const VERSION = '1.18.3'
let bootstrapVersion = 4
try {
const rawVersion = $.fn.dropdown.Constructor.VERSION
// Only try to parse VERSION if it is defined.
// It is undefined in older versions of Bootstrap (tested with 3.1.1).
if (rawVersion !== undefined) {
bootstrapVersion = parseInt(rawVersion, 10)
}
} catch (e) {
// ignore
}
try {
// eslint-disable-next-line no-undef
const rawVersion = bootstrap.Tooltip.VERSION
if (rawVersion !== undefined) {
bootstrapVersion = parseInt(rawVersion, 10)
}
} catch (e) {
// ignore
}
const CONSTANTS = {
3: {
iconsPrefix: 'glyphicon',
icons: {
paginationSwitchDown: 'glyphicon-collapse-down icon-chevron-down',
paginationSwitchUp: 'glyphicon-collapse-up icon-chevron-up',
refresh: 'glyphicon-refresh icon-refresh',
toggleOff: 'glyphicon-list-alt icon-list-alt',
toggleOn: 'glyphicon-list-alt icon-list-alt',
columns: 'glyphicon-th icon-th',
detailOpen: 'glyphicon-plus icon-plus',
detailClose: 'glyphicon-minus icon-minus',
fullscreen: 'glyphicon-fullscreen',
search: 'glyphicon-search',
clearSearch: 'glyphicon-trash'
},
classes: {
buttonsPrefix: 'btn',
buttons: 'default',
buttonsGroup: 'btn-group',
buttonsDropdown: 'btn-group',
pull: 'pull',
inputGroup: 'input-group',
inputPrefix: 'input-',
input: 'form-control',
paginationDropdown: 'btn-group dropdown',
dropup: 'dropup',
dropdownActive: 'active',
paginationActive: 'active',
buttonActive: 'active'
},
html: {
toolbarDropdown: ['<ul class="dropdown-menu" role="menu">', '</ul>'],
toolbarDropdownItem: '<li class="dropdown-item-marker" role="menuitem"><label>%s</label></li>',
toolbarDropdownSeparator: '<li class="divider"></li>',
pageDropdown: ['<ul class="dropdown-menu" role="menu">', '</ul>'],
pageDropdownItem: '<li role="menuitem" class="%s"><a href="#">%s</a></li>',
dropdownCaret: '<span class="caret"></span>',
pagination: ['<ul class="pagination%s">', '</ul>'],
paginationItem: '<li class="page-item%s"><a class="page-link" aria-label="%s" href="javascript:void(0)">%s</a></li>',
icon: '<i class="%s %s"></i>',
inputGroup: '<div class="input-group">%s<span class="input-group-btn">%s</span></div>',
searchInput: '<input class="%s%s" type="text" placeholder="%s">',
searchButton: '<button class="%s" type="button" name="search" title="%s">%s %s</button>',
searchClearButton: '<button class="%s" type="button" name="clearSearch" title="%s">%s %s</button>'
}
},
4: {
iconsPrefix: 'fa',
icons: {
paginationSwitchDown: 'fa-caret-square-down',
paginationSwitchUp: 'fa-caret-square-up',
refresh: 'fa-sync',
toggleOff: 'fa-toggle-off',
toggleOn: 'fa-toggle-on',
columns: 'fa-th-list',
detailOpen: 'fa-plus',
detailClose: 'fa-minus',
fullscreen: 'fa-arrows-alt',
search: 'fa-search',
clearSearch: 'fa-trash'
},
classes: {
buttonsPrefix: 'btn',
buttons: 'secondary',
buttonsGroup: 'btn-group',
buttonsDropdown: 'btn-group',
pull: 'float',
inputGroup: 'btn-group',
inputPrefix: 'form-control-',
input: 'form-control',
paginationDropdown: 'btn-group dropdown',
dropup: 'dropup',
dropdownActive: 'active',
paginationActive: 'active',
buttonActive: 'active'
},
html: {
toolbarDropdown: ['<div class="dropdown-menu dropdown-menu-right">', '</div>'],
toolbarDropdownItem: '<label class="dropdown-item dropdown-item-marker">%s</label>',
pageDropdown: ['<div class="dropdown-menu">', '</div>'],
pageDropdownItem: '<a class="dropdown-item %s" href="#">%s</a>',
toolbarDropdownSeparator: '<div class="dropdown-divider"></div>',
dropdownCaret: '<span class="caret"></span>',
pagination: ['<ul class="pagination%s">', '</ul>'],
paginationItem: '<li class="page-item%s"><a class="page-link" aria-label="%s" href="javascript:void(0)">%s</a></li>',
icon: '<i class="%s %s"></i>',
inputGroup: '<div class="input-group">%s<div class="input-group-append">%s</div></div>',
searchInput: '<input class="%s%s" type="text" placeholder="%s">',
searchButton: '<button class="%s" type="button" name="search" title="%s">%s %s</button>',
searchClearButton: '<button class="%s" type="button" name="clearSearch" title="%s">%s %s</button>'
}
},
5: {
iconsPrefix: 'fa',
icons: {
paginationSwitchDown: 'fa-caret-square-down',
paginationSwitchUp: 'fa-caret-square-up',
refresh: 'fa-sync',
toggleOff: 'fa-toggle-off',
toggleOn: 'fa-toggle-on',
columns: 'fa-th-list',
detailOpen: 'fa-plus',
detailClose: 'fa-minus',
fullscreen: 'fa-arrows-alt',
search: 'fa-search',
clearSearch: 'fa-trash'
},
classes: {
buttonsPrefix: 'btn',
buttons: 'secondary',
buttonsGroup: 'btn-group',
buttonsDropdown: 'btn-group',
pull: 'float',
inputGroup: 'btn-group',
inputPrefix: 'form-control-',
input: 'form-control',
paginationDropdown: 'btn-group dropdown',
dropup: 'dropup',
dropdownActive: 'active',
paginationActive: 'active',
buttonActive: 'active'
},
html: {
dataToggle: 'data-bs-toggle',
toolbarDropdown: ['<div class="dropdown-menu dropdown-menu-right">', '</div>'],
toolbarDropdownItem: '<label class="dropdown-item dropdown-item-marker">%s</label>',
pageDropdown: ['<div class="dropdown-menu">', '</div>'],
pageDropdownItem: '<a class="dropdown-item %s" href="#">%s</a>',
toolbarDropdownSeparator: '<div class="dropdown-divider"></div>',
dropdownCaret: '<span class="caret"></span>',
pagination: ['<ul class="pagination%s">', '</ul>'],
paginationItem: '<li class="page-item%s"><a class="page-link" aria-label="%s" href="javascript:void(0)">%s</a></li>',
icon: '<i class="%s %s"></i>',
inputGroup: '<div class="input-group">%s<div class="input-group-append">%s</div></div>',
searchInput: '<input class="%s%s" type="text" placeholder="%s">',
searchButton: '<button class="%s" type="button" name="search" title="%s">%s %s</button>',
searchClearButton: '<button class="%s" type="button" name="clearSearch" title="%s">%s %s</button>'
}
}
}[bootstrapVersion]
const DEFAULTS = {
height: undefined,
classes: 'table table-bordered table-hover',
buttons: {},
theadClasses: '',
headerStyle (column) {
return {}
},
rowStyle (row, index) {
return {}
},
rowAttributes (row, index) {
return {}
},
undefinedText: '-',
locale: undefined,
virtualScroll: false,
virtualScrollItemHeight: undefined,
sortable: true,
sortClass: undefined,
silentSort: true,
sortName: undefined,
sortOrder: undefined,
sortReset: false,
sortStable: false,
rememberOrder: false,
serverSort: true,
customSort: undefined,
columns: [
[]
],
data: [],
url: undefined,
method: 'get',
cache: true,
contentType: 'application/json',
dataType: 'json',
ajax: undefined,
ajaxOptions: {},
queryParams (params) {
return params
},
queryParamsType: 'limit', // 'limit', undefined
responseHandler (res) {
return res
},
totalField: 'total',
totalNotFilteredField: 'totalNotFiltered',
dataField: 'rows',
footerField: 'footer',
pagination: false,
paginationParts: ['pageInfo', 'pageSize', 'pageList'],
showExtendedPagination: false,
paginationLoop: true,
sidePagination: 'client', // client or server
totalRows: 0,
totalNotFiltered: 0,
pageNumber: 1,
pageSize: 10,
pageList: [10, 25, 50, 100],
paginationHAlign: 'right', // right, left
paginationVAlign: 'bottom', // bottom, top, both
paginationDetailHAlign: 'left', // right, left
paginationPreText: '‹',
paginationNextText: '›',
paginationSuccessivelySize: 5, // Maximum successively number of pages in a row
paginationPagesBySide: 1, // Number of pages on each side (right, left) of the current page.
paginationUseIntermediate: false, // Calculate intermediate pages for quick access
search: false,
searchHighlight: false,
searchOnEnterKey: false,
strictSearch: false,
searchSelector: false,
visibleSearch: false,
showButtonIcons: true,
showButtonText: false,
showSearchButton: false,
showSearchClearButton: false,
trimOnSearch: true,
searchAlign: 'right',
searchTimeOut: 500,
searchText: '',
customSearch: undefined,
showHeader: true,
showFooter: false,
footerStyle (column) {
return {}
},
searchAccentNeutralise: false,
showColumns: false,
showColumnsToggleAll: false,
showColumnsSearch: false,
minimumCountColumns: 1,
showPaginationSwitch: false,
showRefresh: false,
showToggle: false,
showFullscreen: false,
smartDisplay: true,
escape: false,
filterOptions: {
filterAlgorithm: 'and'
},
idField: undefined,
selectItemName: 'btSelectItem',
clickToSelect: false,
ignoreClickToSelectOn ({ tagName }) {
return ['A', 'BUTTON'].includes(tagName)
},
singleSelect: false,
checkboxHeader: true,
maintainMetaData: false,
multipleSelectRow: false,
uniqueId: undefined,
cardView: false,
detailView: false,
detailViewIcon: true,
detailViewByClick: false,
detailViewAlign: 'left',
detailFormatter (index, row) {
return ''
},
detailFilter (index, row) {
return true
},
toolbar: undefined,
toolbarAlign: 'left',
buttonsToolbar: undefined,
buttonsAlign: 'right',
buttonsOrder: ['paginationSwitch', 'refresh', 'toggle', 'fullscreen', 'columns'],
buttonsPrefix: CONSTANTS.classes.buttonsPrefix,
buttonsClass: CONSTANTS.classes.buttons,
icons: CONSTANTS.icons,
iconSize: undefined,
iconsPrefix: CONSTANTS.iconsPrefix, // glyphicon or fa(font-awesome)
loadingFontSize: 'auto',
loadingTemplate (loadingMessage) {
return `<span class="loading-wrap">
<span class="loading-text">${loadingMessage}</span>
<span class="animation-wrap"><span class="animation-dot"></span></span>
</span>
`
},
onAll (name, args) {
return false
},
onClickCell (field, value, row, $element) {
return false
},
onDblClickCell (field, value, row, $element) {
return false
},
onClickRow (item, $element) {
return false
},
onDblClickRow (item, $element) {
return false
},
onSort (name, order) {
return false
},
onCheck (row) {
return false
},
onUncheck (row) {
return false
},
onCheckAll (rows) {
return false
},
onUncheckAll (rows) {
return false
},
onCheckSome (rows) {
return false
},
onUncheckSome (rows) {
return false
},
onLoadSuccess (data) {
return false
},
onLoadError (status) {
return false
},
onColumnSwitch (field, checked) {
return false
},
onPageChange (number, size) {
return false
},
onSearch (text) {
return false
},
onToggle (cardView) {
return false
},
onPreBody (data) {
return false
},
onPostBody () {
return false
},
onPostHeader () {
return false
},
onPostFooter () {
return false
},
onExpandRow (index, row, $detail) {
return false
},
onCollapseRow (index, row) {
return false
},
onRefreshOptions (options) {
return false
},
onRefresh (params) {
return false
},
onResetView () {
return false
},
onScrollBody () {
return false
}
}
const EN = {
formatLoadingMessage () {
return 'Loading, please wait'
},
formatRecordsPerPage (pageNumber) {
return `${pageNumber} rows per page`
},
formatShowingRows (pageFrom, pageTo, totalRows, totalNotFiltered) {
if (totalNotFiltered !== undefined && totalNotFiltered > 0 && totalNotFiltered > totalRows) {
return `Showing ${pageFrom} to ${pageTo} of ${totalRows} rows (filtered from ${totalNotFiltered} total rows)`
}
return `Showing ${pageFrom} to ${pageTo} of ${totalRows} rows`
},
formatSRPaginationPreText () {
return 'previous page'
},
formatSRPaginationPageText (page) {
return `to page ${page}`
},
formatSRPaginationNextText () {
return 'next page'
},
formatDetailPagination (totalRows) {
return `Showing ${totalRows} rows`
},
formatSearch () {
return 'Search'
},
formatClearSearch () {
return 'Clear Search'
},
formatNoMatches () {
return 'No matching records found'
},
formatPaginationSwitch () {
return 'Hide/Show pagination'
},
formatPaginationSwitchDown () {
return 'Show pagination'
},
formatPaginationSwitchUp () {
return 'Hide pagination'
},
formatRefresh () {
return 'Refresh'
},
formatToggle () {
return 'Toggle'
},
formatToggleOn () {
return 'Show card view'
},
formatToggleOff () {
return 'Hide card view'
},
formatColumns () {
return 'Columns'
},
formatColumnsToggleAll () {
return 'Toggle all'
},
formatFullscreen () {
return 'Fullscreen'
},
formatAllRows () {
return 'All'
}
}
const COLUMN_DEFAULTS = {
field: undefined,
title: undefined,
titleTooltip: undefined,
class: undefined,
width: undefined,
widthUnit: 'px',
rowspan: undefined,
colspan: undefined,
align: undefined, // left, right, center
halign: undefined, // left, right, center
falign: undefined, // left, right, center
valign: undefined, // top, middle, bottom
cellStyle: undefined,
radio: false,
checkbox: false,
checkboxEnabled: true,
clickToSelect: true,
showSelectTitle: false,
sortable: false,
sortName: undefined,
order: 'asc', // asc, desc
sorter: undefined,
visible: true,
switchable: true,
cardVisible: true,
searchable: true,
formatter: undefined,
footerFormatter: undefined,
detailFormatter: undefined,
searchFormatter: true,
searchHighlightFormatter: false,
escape: false,
events: undefined
}
const METHODS = [
'getOptions',
'refreshOptions',
'getData',
'getSelections',
'load', 'append', 'prepend',
'remove', 'removeAll',
'insertRow', 'updateRow',
'getRowByUniqueId', 'updateByUniqueId', 'removeByUniqueId',
'updateCell', 'updateCellByUniqueId',
'showRow', 'hideRow', 'getHiddenRows',
'showColumn', 'hideColumn',
'getVisibleColumns', 'getHiddenColumns',
'showAllColumns', 'hideAllColumns',
'mergeCells',
'checkAll', 'uncheckAll', 'checkInvert',
'check', 'uncheck',
'checkBy', 'uncheckBy',
'refresh',
'destroy',
'resetView',
'showLoading', 'hideLoading',
'togglePagination', 'toggleFullscreen', 'toggleView',
'resetSearch',
'filterBy',
'scrollTo', 'getScrollPosition',
'selectPage', 'prevPage', 'nextPage',
'toggleDetailView',
'expandRow', 'collapseRow', 'expandRowByUniqueId', 'collapseRowByUniqueId',
'expandAllRows', 'collapseAllRows',
'updateColumnTitle', 'updateFormatText'
]
const EVENTS = {
'all.bs.table': 'onAll',
'click-row.bs.table': 'onClickRow',
'dbl-click-row.bs.table': 'onDblClickRow',
'click-cell.bs.table': 'onClickCell',
'dbl-click-cell.bs.table': 'onDblClickCell',
'sort.bs.table': 'onSort',
'check.bs.table': 'onCheck',
'uncheck.bs.table': 'onUncheck',
'check-all.bs.table': 'onCheckAll',
'uncheck-all.bs.table': 'onUncheckAll',
'check-some.bs.table': 'onCheckSome',
'uncheck-some.bs.table': 'onUncheckSome',
'load-success.bs.table': 'onLoadSuccess',
'load-error.bs.table': 'onLoadError',
'column-switch.bs.table': 'onColumnSwitch',
'page-change.bs.table': 'onPageChange',
'search.bs.table': 'onSearch',
'toggle.bs.table': 'onToggle',
'pre-body.bs.table': 'onPreBody',
'post-body.bs.table': 'onPostBody',
'post-header.bs.table': 'onPostHeader',
'post-footer.bs.table': 'onPostFooter',
'expand-row.bs.table': 'onExpandRow',
'collapse-row.bs.table': 'onCollapseRow',
'refresh-options.bs.table': 'onRefreshOptions',
'reset-view.bs.table': 'onResetView',
'refresh.bs.table': 'onRefresh',
'scroll-body.bs.table': 'onScrollBody'
}
Object.assign(DEFAULTS, EN)
export default {
VERSION,
THEME: `bootstrap${bootstrapVersion}`,
CONSTANTS,
DEFAULTS,
COLUMN_DEFAULTS,
METHODS,
EVENTS,
LOCALES: {
en: EN,
'en-US': EN
}
} | PypiClean |
/Flask-CKEditor-0.4.6.tar.gz/Flask-CKEditor-0.4.6/flask_ckeditor/static/full/plugins/specialchar/dialogs/lang/zh.js | /*
Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license
*/
CKEDITOR.plugins.setLang("specialchar","zh",{euro:"歐元符號",lsquo:"左單引號",rsquo:"右單引號",ldquo:"左雙引號",rdquo:"右雙引號",ndash:"短破折號",mdash:"長破折號",iexcl:"倒置的驚嘆號",cent:"美分符號",pound:"英鎊符號",curren:"貨幣符號",yen:"日圓符號",brvbar:"破折號",sect:"章節符號",uml:"分音符號",copy:"版權符號",ordf:"雌性符號",laquo:"左雙角括號",not:"Not 符號",reg:"註冊商標符號",macr:"長音符號",deg:"度數符號",sup2:"上標字 2",sup3:"上標字 3",acute:"尖音符號",micro:"微",para:"段落符號",middot:"中間點",cedil:"字母 C 下面的尾型符號 ",sup1:"上標",ordm:"雄性符號",raquo:"右雙角括號",frac14:"四分之一符號",frac12:"二分之一符號",frac34:"四分之三符號",
iquest:"倒置的問號",Agrave:"拉丁大寫字母 A 帶抑音符號",Aacute:"拉丁大寫字母 A 帶尖音符號",Acirc:"拉丁大寫字母 A 帶揚抑符",Atilde:"拉丁大寫字母 A 帶波浪號",Auml:"拉丁大寫字母 A 帶分音符號",Aring:"拉丁大寫字母 A 帶上圓圈",AElig:"拉丁大寫字母 Æ",Ccedil:"拉丁大寫字母 C 帶下尾符號",Egrave:"拉丁大寫字母 E 帶抑音符號",Eacute:"拉丁大寫字母 E 帶尖音符號",Ecirc:"拉丁大寫字母 E 帶揚抑符",Euml:"拉丁大寫字母 E 帶分音符號",Igrave:"拉丁大寫字母 I 帶抑音符號",Iacute:"拉丁大寫字母 I 帶尖音符號",Icirc:"拉丁大寫字母 I 帶揚抑符",Iuml:"拉丁大寫字母 I 帶分音符號",ETH:"拉丁大寫字母 Eth",Ntilde:"拉丁大寫字母 N 帶波浪號",Ograve:"拉丁大寫字母 O 帶抑音符號",Oacute:"拉丁大寫字母 O 帶尖音符號",Ocirc:"拉丁大寫字母 O 帶揚抑符",Otilde:"拉丁大寫字母 O 帶波浪號",
Ouml:"拉丁大寫字母 O 帶分音符號",times:"乘號",Oslash:"拉丁大寫字母 O 帶粗線符號",Ugrave:"拉丁大寫字母 U 帶抑音符號",Uacute:"拉丁大寫字母 U 帶尖音符號",Ucirc:"拉丁大寫字母 U 帶揚抑符",Uuml:"拉丁大寫字母 U 帶分音符號",Yacute:"拉丁大寫字母 Y 帶尖音符號",THORN:"拉丁大寫字母 Thorn",szlig:"拉丁小寫字母 s",agrave:"拉丁小寫字母 a 帶抑音符號",aacute:"拉丁小寫字母 a 帶尖音符號",acirc:"拉丁小寫字母 a 帶揚抑符",atilde:"拉丁小寫字母 a 帶波浪號",auml:"拉丁小寫字母 a 帶分音符號",aring:"拉丁小寫字母 a 帶上圓圈",aelig:"拉丁小寫字母 æ",ccedil:"拉丁小寫字母 c 帶下尾符號",egrave:"拉丁小寫字母 e 帶抑音符號",eacute:"拉丁小寫字母 e 帶尖音符號",ecirc:"拉丁小寫字母 e 帶揚抑符",euml:"拉丁小寫字母 e 帶分音符號",igrave:"拉丁小寫字母 i 帶抑音符號",
iacute:"拉丁小寫字母 i 帶尖音符號",icirc:"拉丁小寫字母 i 帶揚抑符",iuml:"拉丁小寫字母 i 帶分音符號",eth:"拉丁小寫字母 eth",ntilde:"拉丁小寫字母 n 帶波浪號",ograve:"拉丁小寫字母 o 帶抑音符號",oacute:"拉丁小寫字母 o 帶尖音符號",ocirc:"拉丁小寫字母 o 帶揚抑符",otilde:"拉丁小寫字母 o 帶波浪號",ouml:"拉丁小寫字母 o 帶分音符號",divide:"除號",oslash:"拉丁小寫字母 o 帶粗線符號",ugrave:"拉丁小寫字母 u 帶抑音符號",uacute:"拉丁小寫字母 u 帶尖音符號",ucirc:"拉丁小寫字母 u 帶揚抑符",uuml:"拉丁小寫字母 u 帶分音符號",yacute:"拉丁小寫字母 y 帶尖音符號",thorn:"拉丁小寫字母 thorn",yuml:"拉丁小寫字母 y 帶分音符號",OElig:"拉丁大寫字母 OE",oelig:"拉丁小寫字母 oe",372:"拉丁大寫字母 W 帶揚抑符",374:"拉丁大寫字母 Y 帶揚抑符",373:"拉丁小寫字母 w 帶揚抑符",
375:"拉丁小寫字母 y 帶揚抑符",sbquo:"低 9 單引號",8219:"高 9 反轉單引號",bdquo:"低 9 雙引號",hellip:"水平刪節號",trade:"商標符號",9658:"黑色向右指箭號",bull:"項目符號",rarr:"向右箭號",rArr:"向右雙箭號",hArr:"左右雙箭號",diams:"黑鑽套裝",asymp:"約等於"}); | PypiClean |
/GRR-M2Crypto-0.22.6.tar.gz/GRR-M2Crypto-0.22.6/M2Crypto/SSL/cb.py | __all__ = ['unknown_issuer', 'ssl_verify_callback_stub', 'ssl_verify_callback',
'ssl_verify_callback_allow_unknown_ca', 'ssl_info_callback']
# Python
import sys
# M2Crypto
import Context
from M2Crypto import m2
def ssl_verify_callback_stub(ssl_ctx_ptr, x509_ptr, errnum, errdepth, ok):
# Deprecated
return ok
unknown_issuer = [
m2.X509_V_ERR_DEPTH_ZERO_SELF_SIGNED_CERT,
m2.X509_V_ERR_UNABLE_TO_GET_ISSUER_CERT_LOCALLY,
m2.X509_V_ERR_UNABLE_TO_VERIFY_LEAF_SIGNATURE,
m2.X509_V_ERR_CERT_UNTRUSTED,
]
def ssl_verify_callback(ssl_ctx_ptr, x509_ptr, errnum, errdepth, ok):
# Deprecated
ssl_ctx = Context.map()[long(ssl_ctx_ptr)]
if errnum in unknown_issuer:
if ssl_ctx.get_allow_unknown_ca():
sys.stderr.write("policy: %s: permitted...\n" % (m2.x509_get_verify_error(errnum)))
sys.stderr.flush()
ok = 1
# CRL checking goes here...
if ok:
if ssl_ctx.get_verify_depth() >= errdepth:
ok = 1
else:
ok = 0
return ok
def ssl_verify_callback_allow_unknown_ca(ok, store):
errnum = store.get_error()
if errnum in unknown_issuer:
ok = 1
return ok
# Cribbed from OpenSSL's apps/s_cb.c.
def ssl_info_callback(where, ret, ssl_ptr):
w = where & ~m2.SSL_ST_MASK
if (w & m2.SSL_ST_CONNECT):
state = "SSL connect"
elif (w & m2.SSL_ST_ACCEPT):
state = "SSL accept"
else:
state = "SSL state unknown"
if (where & m2.SSL_CB_LOOP):
sys.stderr.write("LOOP: %s: %s\n" % (state, m2.ssl_get_state_v(ssl_ptr)))
sys.stderr.flush()
return
if (where & m2.SSL_CB_EXIT):
if not ret:
sys.stderr.write("FAILED: %s: %s\n" % (state, m2.ssl_get_state_v(ssl_ptr)))
sys.stderr.flush()
else:
sys.stderr.write("INFO: %s: %s\n" % (state, m2.ssl_get_state_v(ssl_ptr)))
sys.stderr.flush()
return
if (where & m2.SSL_CB_ALERT):
if (where & m2.SSL_CB_READ):
w = 'read'
else:
w = 'write'
sys.stderr.write("ALERT: %s: %s: %s\n" % \
(w, m2.ssl_get_alert_type_v(ret), m2.ssl_get_alert_desc_v(ret)))
sys.stderr.flush()
return | PypiClean |
/ChIP_R-1.2.0-py3-none-any.whl/chipr/chipr.py |
import argparse
import sys
from chipr import rankprod, bed
class RankProdAnalysis(object):
def __init__(self):
parser = argparse.ArgumentParser(prog='chipr',
description="Combine multiple ChIP-seq files and return a union of all peak "
"locations and a set confident, reproducible peaks as determined by "
"rank product analysis")
parser.add_argument("-i", "--input",
help="ChIP-seq input files. These files must be in either narrowPeak, broadPeak, "
"or regionPeak format. Multiple inputs are separeted by a single space",
dest="input",
type=argparse.FileType('r'),
nargs='+',
required=True)
parser.add_argument("-o", "--output",
help="ChIP-seq output filename prefix",
dest="output",
type=str,
default="rankprod",
required=False)
parser.set_defaults(bigbed=False)
parser.add_argument("-m", "--minentries",
help="The minimum peaks between replicates required to form an "
"intersection of the peaks \n"
"Default: 1",
dest="minentries",
default=1,
type=int,
required=False)
parser.add_argument("--rankmethod",
help="The ranking method used to rank peaks within replicates. "
"Options: 'signalvalue', 'pvalue', 'qvalue'. \n"
"Default: pvalue",
dest="rankmethod",
default='pvalue',
type=str,
required=False)
parser.set_defaults(broadpeaks=False)
parser.add_argument("--fragment",
help="Specifies whether the input peaks will be subject to high levels of fragmentation",
dest="fragment",
action="store_true",
required=False)
parser.add_argument("--duphandling",
help="Specifies how to handle entries that are ranked equally within a replicate "
"Can either take the 'average' ranks or a 'random' rearrangement of the ordinal ranks \n"
"Options: 'average', 'random' \n"
"Default: 'average'",
dest="duphandling",
default='average',
type=str,
required=False)
parser.add_argument("--seed",
help="Specify a seed to be used in conjunction with the 'random' option for -duphandling "
"Must be between 0 and 1 \n"
"Default: 0.5",
dest="random_seed",
default=0.5,
type=float,
required=False)
parser.add_argument("-a","--alpha",
help="Alpha specifies the user cut-off value for set of reproducible peaks "
"The analysis will still produce results including peaks within the threshold calculated"
"using the binomial method \n"
"Default: 0.05",
dest="alpha",
default=0.05,
type=float,
required=False)
parser.add_argument("-s", "--size",
help="Sets the default minimum peak size when peaks are reconnected after fragmentation. \n"
"Usually the minimum peak size is determined by the size of surrounding peaks, \n "
"but in the case that there are no surrounding peaks this value will be used \n"
"Default: 20",
dest="size",
default=20,
type=int,
required=False)
args = parser.parse_args(sys.argv[1:])
self.run_1(args)
def run_1(self, args):
for i in args.input:
print(str(i.name))
print('Processing Input...')
bedfs = [bed.BedFile(str(i.name), 'Peaks') for i in args.input]
rankprod.performrankprod(bedfs,
args.minentries,
args.rankmethod,
'all',
args.duphandling,
args.random_seed,
args.alpha,
args.output,
args.size,
False,
args.fragment)
def main():
RankProdAnalysis()
if __name__ == "__main__":
main() | PypiClean |
/Nuitka_fixed-1.1.2-cp310-cp310-win_amd64.whl/nuitka/build/inline_copy/lib/scons-3.1.2/SCons/Tool/clang.py | #
# Copyright (c) 2001 - 2019 The SCons Foundation
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
#
# __revision__ = "src/engine/SCons/Tool/clang.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
# Based on SCons/Tool/gcc.py by Paweł Tomulik 2014 as a separate tool.
# Brought into the SCons mainline by Russel Winder 2017.
import os
import re
import subprocess
import sys
import SCons.Util
import SCons.Tool.cc
from SCons.Tool.clangCommon import get_clang_install_dirs
compilers = ['clang']
def generate(env):
"""Add Builders and construction variables for clang to an Environment."""
SCons.Tool.cc.generate(env)
if env['PLATFORM'] == 'win32':
# Ensure that we have a proper path for clang
clang = SCons.Tool.find_program_path(env, compilers[0],
default_paths=get_clang_install_dirs(env['PLATFORM']))
if clang:
clang_bin_dir = os.path.dirname(clang)
env.AppendENVPath('PATH', clang_bin_dir)
env['CC'] = env.Detect(compilers) or 'clang'
if env['PLATFORM'] in ['cygwin', 'win32']:
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS')
else:
env['SHCCFLAGS'] = SCons.Util.CLVar('$CCFLAGS -fPIC')
# determine compiler version
if env['CC']:
#pipe = SCons.Action._subproc(env, [env['CC'], '-dumpversion'],
pipe = SCons.Action._subproc(env, [env['CC'], '--version'],
stdin='devnull',
stderr='devnull',
stdout=subprocess.PIPE)
if pipe.wait() != 0: return
# clang -dumpversion is of no use
with pipe.stdout:
line = pipe.stdout.readline()
if sys.version_info[0] > 2:
line = line.decode()
match = re.search(r'clang +version +([0-9]+(?:\.[0-9]+)+)', line)
if match:
env['CCVERSION'] = match.group(1)
def exists(env):
return env.Detect(compilers)
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4: | PypiClean |
/Encrypt_Lab-0.1.6-py3-none-any.whl/Encrypt_Lab/Input.py | from PyQt5 import QtCore, QtGui
from PyQt5.QtWidgets import (QFileDialog, QComboBox,QRadioButton, QApplication, QLabel, QLineEdit,QDoubleSpinBox,QSlider,
QCheckBox,
QPushButton, QWidget,QGroupBox,QSpinBox, QColorDialog, QFrame,QVBoxLayout)
from PyQt5.QtCore import Qt
import numpy as np
from keyword import kwlist
import builtins
import os
import re
# TODO create also oo hash that saves all needed data, so [o,oo] contains all need to save an open anything
# from TextEditor import TextEditor as HTMLEDITOR
try:
from AWidget import AWidget
except:
from Encrypt_Lab.AWidget import AWidget
# from codeeditor import QCodeEditor
# from code_editor.d import CodeEditor
class Input(AWidget):
def __init__(self, parent, tp, name='', opts={}):
super().__init__(parent, opts)
self.real_parent = self.get_real_parent()
self.float_to_int = 100
if hasattr(self.real_parent, 'inputs_list'):
self.input_list = self.real_parent.inputs_list.append(self)
else:
self.real_parent.inputs_list = [self]
self.var_name = 'var_name' in opts and opts['var_name']
if tp == 'name':
self.var_name = True
self.valid = True
self.error_box = None
self.opts = opts
self.tp = tp
self.my_parent = parent # try self.parent()
value = name in self.o and self.o[name]
if not value and 'def' in opts:
value = opts['def']
self.name = self.force_name(name)
l = None
if not ('hide name' in opts and opts['hide name']):
label = self.name
if 'title' in opts:
label = opts['title']
l = QLabel(label)
if 'center_title' in opts and opts['center_title']:
self.layout().addWidget(l, alignment=Qt.AlignCenter)
else:
self.layout().addWidget(QLabel(label))
if 'font' in opts and opts['font']:
l.setStyleSheet(f"font-size: {opts['font']}px")
if tp in ['string', 'python', 'code', 'varName', 'name', 'coded text']:
value = value or ''
func_name, w = ['textChanged', QLineEdit(value)]
if 'center_text' in opts and opts['center_text']:
w.setAlignment(Qt.AlignCenter)
if tp in ['texteditor','code','coded text']:
value = value or ''
func_name, w = ['textChanged', CodeEditor(value)]
# w.setStyleSheet("font: 12pt")
if tp == 'integer':
value = value or 0
func_name, w = ['valueChanged', QSpinBox()]
w.setRange(-2147483648, 2147483647) # TODO add expression for max value
w.setValue(value)
if tp == 'float':
value = value or 0.0
func_name, w = ['valueChanged', QDoubleSpinBox()]
w.setDecimals('decimals' in opts and opts['decimals'] or 8)
w.setSingleStep(0.1)
w.setRange(-2147483648, 2147483647)
w.setValue(value)
if tp == 'color':
value = value or '#aaaaaa'
func_name, w = ['textChanged', QLineEdit(value)]
b = QPushButton('Pick Color')
self.layout().addWidget(b)
b.setStyleSheet(f'background-color: {value}')
b.clicked.connect(lambda: w.setText(QColorDialog().getColor().name()))
b.clicked.connect(lambda: b.setStyleSheet(f'background-color: {w.text()}'))
if tp == 'HTML':
# 1
value = value or ''
func_name, w = ['textChanged', QLineEdit(value)]
b = QPushButton('Pick Color')
self.layout().addWidget(b)
b.setStyleSheet(f'background-color: {value}')
b.clicked.connect(lambda: w.setText(QColorDialog().getColor().name()))
b.clicked.connect(lambda: b.setStyleSheet(f'background-color: {w.text()}'))
if tp == 'slider':
if 'slide_minmax' in opts:
minmax = opts['slide_minmax']
else:
minmax = (1, 20)
float = False
if 'float' in opts and opts['float']:
float = True
minmax = (self.float_to_int*minmax[0], self.float_to_int*minmax[1])
value *= self.float_to_int
if not value:
if minmax:
value = int((minmax[1] + minmax[0])/2)
else:
value = 10
func_name, w = ['valueChanged', QSlider()]
if float:
l = QLabel(str(np.round(value/self.float_to_int, 2)))
else:
l = QLabel(str(np.round(value,2)))
self.label = l
self.layout().addWidget(l)
w = QSlider(Qt.Horizontal)
w.setMinimum(minmax[0])
w.setMaximum(minmax[1])
if float:
w.setValue(value)
else:
w.setValue(value)
w.setSingleStep(1)
if tp == 'radio':
self.radios = []
for a in opts['radios']:
if not value:
if 'def_index' in opts:
value = value or opts['radios'][opts['def_index']]
elif 'def' in opts:
value = value or opts['def']
radiobutton = QRadioButton(a)
radiobutton.val = a
radiobutton.setChecked(a == value)
self.radios.append(a)
radiobutton.toggled.connect(lambda v: self.update_dic(self.sender().val))
self.layout().addWidget(radiobutton)
w = None
if tp == 'single_radio':
value = value or 11
func_name, w = ['toggled', QRadioButton(name)]
if tp == 'bool':
value = value or 0
func_name, w = ['stateChanged', QCheckBox()]
w.setChecked(value)
if tp in ['file', 'new_file', 'folder', 'server files', 'files']:
value = value or ''
func_name, w = ['textChanged', QLineEdit(value)]
b = QPushButton(f"Open {tp}")
if tp != 'server files':
self.layout().addWidget(b)
def safe_relpath(str):
if str == '':
return ''
return os.path.relpath(str)
if tp in ['file', 'server files', 'files']:
b.clicked.connect(lambda: w.setText(safe_relpath(QFileDialog.getOpenFileName(self)[0])))
b2 = QPushButton(f"Open from server")
self.layout().addWidget(b2)
from server.FileTree import FileTree
def lambd():
x = FileTree(w, uniq = tp != 'server files')
x.show()
self.addg = x
b2.clicked.connect(lambda: lambd())
if tp == 'new_file':
b.clicked.connect(lambda: w.setText(safe_relpath(QFileDialog.getSaveFileName(self)[0])))
if tp == 'files':
b.clicked.connect(lambda: w.setText(safe_relpath(QFileDialog.getOpenFileNames(self)[0])))
if tp == 'folder':
b.clicked.connect(lambda: w.setText(safe_relpath(QFileDialog.getExistingDirectory(None, 'Select Directory'))))
if tp in ['select', 'type']:
if tp == 'type':
opts['group'] = ('group' in opts and opts['group']) or 'general input'
groups = {
'general input': ['string', 'file','server files', 'folder', 'coded text', 'integer', 'float', 'color','bool']#'files',
}
func_name, w = ['currentTextChanged', QComboBox()]
if 'allowed types' in opts:
at = opts['allowed types']
if type(at) == str:
types = [at]
else:
types = at
else:
types = groups[opts['group']]
[w.addItem(t) for t in types]
value = value or w.itemText(0)
index = w.findText(value, QtCore.Qt.MatchFixedString)
w.setCurrentIndex(index)
if tp == 'group':
groups_types = {
'view_definer': ['title', 'type', 'view data'],
'input_definer': ['name', 'type', 'value'],
'h': ['type', 'value']
}
hide_name = opts['group_type'] == 'h'
titles = groups_types[opts['group_type']]
w = False
if not value:
value = {}
self.o[self.name] = value
if 'input_definer_values' in opts:
idv = opts['input_definer_values']
if type(idv) == list:
for i in range(0, len(idv)):
value[titles[i]] = idv[i]
else: # hash
for k, v in idv.items():
value[k] = v
save_until_next = None
for i in range(0, len(titles)):
title = titles[i]
if title is 'type':
temp_opts = {'o': value, 'group': 'general input', 'hide name': hide_name}
if 'allowed types' in opts:
temp_opts['allowed types'] = opts['allowed types']
save_until_next = Input(self, 'type', title, temp_opts)
else:
if not save_until_next:
Input(self, 'string', title, {'o': value})
else:
if 'type' in value:
generic_name = value['type']
else:
generic_name = 'string'
input = Input(self, generic_name, title, {'o': value, 'hide name': hide_name})
save_until_next.opts['call_on_update'] = input.transform
save_until_next = None
if w:
self.w = w
w.setMinimumHeight(23)
if tp == 'texteditor':
w.setMinimumHeight(200)
try:
if (parent.parent().parent().__class__.__name__)=='InputGroup':
w.setMinimumHeight(78)
except:
1
if tp == 'bool':
wid = QWidget()
wid_lay = QVBoxLayout()
wid.setLayout(wid_lay)
wid_lay.addWidget(w)
self.layout().addWidget(wid)
else:
self.layout().addWidget(w)
getattr(w, func_name).connect(self.update_dic)
if w or tp == 'radio':
if self.tp == 'slider' and 'float' in self.opts and self.opts['float']:
value /= self.float_to_int
self.o[self.name] = value
if 'add_delete' in opts:
btn = QPushButton('clear')
btn.clicked.connect(self.clear)
btn.setIcon(QtGui.QIcon('assets/icons/delete.png'))
self.layout().addWidget(btn)
if 'update_on_init' in opts and opts['update_on_init']:
self.update_dic(value)
if tp == 'code':
self.setMinimumSize(500,200)
self.present()
def clipboardChanged(self):
text = QApplication.clipboard().text()
QApplication.clipboard().setText('banana') # TODO
def update_dic(self, value=None):
old_val = self.o[self.name]
try:
value = value or self.w.toPlainText()
except:
5 # TODO
if self.tp == 'slider' and 'float' in self.opts and self.opts['float']:
value/=self.float_to_int
try:
self.label.setText(str(np.round(value, 2)))
except:
5 # TODO
if self.tp == 'float':
value = float(value)
self.o[self.name] = value
if self.tp == 'text':
1/0 # to raise error
self.o[self.name] = '"""{0}"""'.format(value)
if 'call_on_update' in self.opts:
self.opts['call_on_update'](self)
if 'on_update' in self.opts:
self.opts['on_update']()
if 'connect' in self.opts:
try:
self.opts['connect'](value)
except:
self.opts['connect'](value, self.name)
if 'filter' in self.opts:
filtered_value = self.opts['filter'](value)
if filtered_value != value:
self.w.setText(filtered_value)
value = filtered_value
if 'connect' in self.opts: # TODO should be extea connect
self.opts['connect'](value)
self.value = value
self.is_valid(value)
def is_valid(self, value):
if not self.var_name and 'regex' not in self.opts:
return True
regex = "\A[a-zA-Z][a-zA-Z_0-9]*\Z"
if 'regex' in self.opts:
regex = self.opts['regex']
self.valid = re.search(regex, value) and (value not in self.reserved_words())
self.update_style()
def update_style(self):
if self.valid:
self.w.setStyleSheet('')
else:
self.w.setStyleSheet('border-color:red; border-width: 1px;border-style: solid')
def clear(self):
self.setStyleSheet("background-color:{0};".format('blue'))
if self.name in self.o:
self.o.pop(self.name)
try:
self.my_parent.layout().removeWidget(self)
except:
'never mind'
self.real_parent.inputs_list.remove(self)
self.setParent(None)
self.hide()
def set_disable(self, flag):
self.w.setDisabled(flag)
def input_value(self):
return self.o[self.name]
def transform(self, input):
my_index = self.my_parent.layout().indexOf(self)
self.clear()
# self.setStyleSheet("background-color:{0};".format('blue'))
self.setStyleSheet("background-color:{0};".format('blue'))
new_input = Input(self.my_parent, input.input_value(), self.name, {'o': self.o, 'index': my_index, 'hide name': (
'hide name' in self.opts and self.opts['hide name'])})
input.opts['call_on_update'] = new_input.transform
def reserved_words(self):
generals = kwlist + dir(builtins)
project_files_names = [file for file in os.listdir('../') if '.py' in file]
return generals + project_files_names
def get_real_parent(self):
prnt = self.parent()
while(prnt.__class__.__name__ in ['InputGroup','Input', 'QWidget', 'QScrollArea','QStackedWidget','QTabWidget' ]):
prnt = prnt.parent()
return prnt | PypiClean |
/JacksonQuery-0.0.1-py3-none-any.whl/jacksonquery/tenure_extractor.py | import datetime
import re
import pandas as pd
import numpy as np
def extract_tenures(sublists):
"""
Extracts the portfolio manager names, start dates, and titles from the sublists.
:param sublists: (list) List of sublists from the Jackson Holdings website.
:return: (dict) Dictionary of portfolio manager names, start dates, and titles.
"""
avg_tenures = {}
for sublist in sublists:
# Extract the subaccount name from the sublist
subaccount_name = sublist[0] # Update this depending on where the name is in your sublist
# Check if 'Portfolio Manager' or 'Portfolio Managers' is in the sublist
if not any(s in sublist for s in ['Portfolio Manager: ', 'Portfolio Managers: ']):
print(f"No 'Portfolio Manager' section found in subaccount: {subaccount_name}")
avg_tenures[subaccount_name] = None
continue
# Default values
start_index = None
end_index = None
# Find the indices of 'Portfolio Managers: ' or 'Portfolio Manager: ' and 'Purchase and Redemption of Fund
# Shares '
try:
if 'Portfolio Managers: ' in sublist:
start_index = sublist.index('Portfolio Managers: ') + 1
else:
start_index = sublist.index('Portfolio Manager: ') + 1
end_index = sublist.index('Purchase and Redemption of Fund Shares ')
except ValueError:
print("Couldn't find the start or end of the table.")
# Get the lines that form the table, joining lines that end with a comma
raw_table_lines = sublist[start_index:end_index]
table_lines = []
for line in raw_table_lines:
if table_lines and line.endswith(','):
table_lines[-1] += ' ' + line
else:
table_lines.append(line)
# Initialize empty lists to store the data
names = []
joined_dates = []
titles = []
# Initialize a regex pattern for matching
pattern = r"(?P<name>[\w\s\.]+)\s*(?P<date>[A-Za-z]*\s*\d{4}\*?)\s+(?P<title>.+)"
for line in table_lines:
match = re.search(pattern, line.strip())
if match:
full_name = match.group('name').strip()
if full_name: # Only proceed if full_name is not empty
date_str = match.group('date').strip().replace('*', '') # Remove '*' character
title = match.group('title').strip()
# Check if a month is present in the name field
name_parts = full_name.split()
if name_parts[-1] in ['January', 'February', 'March', 'April', 'May', 'June', 'July', 'August',
'September', 'October', 'November', 'December']:
# If the last part of the name is a month, join it with the date
date_str = name_parts[-1] + ' ' + date_str
name = ' '.join(name_parts[:-1]) # Remove the month from the name
else:
name = full_name
# Check if a month is present in the date string
if len(date_str.split()) > 1: # If there's a month and a year
joined_dates.append(datetime.datetime.strptime(date_str, '%B %Y'))
else: # If only year is present
joined_dates.append(datetime.datetime.strptime('July ' + date_str, '%B %Y'))
names.append(name)
titles.append(title)
# Create a dataframe from the data
df = pd.DataFrame({
'Name': names,
'Joined Fund Management Team In': joined_dates,
'Title': titles
})
# Calculate the tenure in years including partial years and get the average
today = datetime.datetime.now()
df['Tenure'] = (today - df['Joined Fund Management Team In']).dt.days / 365.25
avg_tenure = df['Tenure'].mean()
# Store the average tenure in the dictionary
avg_tenures[subaccount_name] = avg_tenure
# Convert the dictionary into a DataFrame
tenure_df = pd.DataFrame.from_dict(avg_tenures, orient='index', columns=['Manager Tenure (Years)'])
# Replace None values with NaN
tenure_df.replace({np.nan: tenure_df.squeeze().mean()}, inplace=True)
tenure_df.index = tenure_df.index.map(lambda x: x[:-5] if x.endswith(" Fund") else x)
tenure_df.index = tenure_df.index.str.replace(
'/', '_', regex=False).str.replace(
'®', '', regex=False).str.replace(
'.', '', regex=False).str.replace(
' ', '_', regex=False).str.replace(
'-', '_', regex=False)
tenure_df.index.name = 'Subaccount Name'
tenure_df = round(tenure_df, 2)
# Save the dataframe to a CSV file
tenure_df.to_csv('../data/manager_tenures.csv')
return tenure_df | PypiClean |
/HydPy-5.0.1-cp38-cp38-win_amd64.whl/hydpy/auxs/validtools.py | """This module implements features for the validation of (numerical) input data."""
# import...
# ...from site-packages
import numpy
# ...from hydpy
from hydpy.core import objecttools
def test_equal_shape(**kwargs) -> None:
"""Raise a ValueError if the shapes of the objects given as keywords are not equal.
If all shapes are equal, nothing happens:
>>> from hydpy.auxs.validtools import test_equal_shape
>>> test_equal_shape(arr1=numpy.array([1.0, 2.0]),
... arr2=numpy.array([3.0, 4.0]),
... arr3=numpy.array([5.0, 6.0]))
If at least one shape differs, the following error is raised:
>>> test_equal_shape(arr1=numpy.array([1.0, 2.0]),
... arr2=numpy.array([3.0]),
... arr3=numpy.array([5.0, 6.0]))
Traceback (most recent call last):
...
ValueError: The shapes of the following objects are not equal: \
arr1 (2,), arr2 (1,), and arr3 (2,).
For flexibility in the functions application, it is allowed to pass only one array
or no arrays at all:
>>> test_equal_shape(arr1=numpy.array([1.0, 2.0]))
>>> test_equal_shape()
"""
names = list(kwargs.keys())
shapes = numpy.array([numpy.array(array).shape for array in kwargs.values()])
if any(shapes[:-1] != shapes[1:]):
string = objecttools.enumeration(
f"{name} {tuple(shape)}" for (name, shape) in sorted(zip(names, shapes))
)
raise ValueError(
f"The shapes of the following objects are not equal: {string}."
)
def test_non_negative(**kwargs) -> None:
"""Raise a ValueError if at least one value of the objects given as keywords is
negative.
If all values are non negative, nothing happens:
>>> from hydpy.auxs.validtools import test_non_negative
>>> test_non_negative(arr1=numpy.array([1.0, 2.0]),
... arr2=numpy.array([3.0, 4.0]),
... arr3=numpy.array([5.0, 6.0]))
If at least one value is negative, the following error is raised:
>>> test_non_negative(arr1=numpy.array([1.0, 2.0]),
... arr2=numpy.array([-3.0, 4.0]),
... arr3=numpy.array([5.0, 6.0]))
Traceback (most recent call last):
...
ValueError: For the following objects, at least one value is negative: arr2.
For flexibility in the functions application, it is allowed to pass no array at all:
>>> test_non_negative()
"""
names = list(kwargs.keys())
negs = [numpy.nanmin(array) < 0.0 for array in kwargs.values()]
if any(negs):
string = objecttools.enumeration(
name for name, neg in sorted(zip(names, negs)) if neg
)
raise ValueError(
f"For the following objects, at least one value is negative: {string}."
) | PypiClean |
/Marmir-0.1.3.tar.gz/Marmir-0.1.3/mm/lib/xlwt_0_7_2/Worksheet.py | import BIFFRecords
import Bitmap
import Formatting
import Style
import tempfile
class Worksheet(object):
from Workbook import Workbook
#################################################################
## Constructor
#################################################################
def __init__(self, sheetname, parent_book, cell_overwrite_ok=False):
import Row
self.Row = Row.Row
import Column
self.Column = Column.Column
self.__name = sheetname
self.__parent = parent_book
self._cell_overwrite_ok = cell_overwrite_ok
self.__rows = {}
self.__cols = {}
self.__merged_ranges = []
self.__bmp_rec = ''
self.__show_formulas = 0
self.__show_grid = 1
self.__show_headers = 1
self.__panes_frozen = 0
### self.__show_empty_as_zero = 1 ### deprecated with extreme prejudice 2009-05-19
self.show_zero_values = 1
self.__auto_colour_grid = 1
self.__cols_right_to_left = 0
self.__show_outline = 1
self.__remove_splits = 0
self.__selected = 0
# RED HERRING ALERT: "sheet_visible" is a clone of the "selected" attribute.
# Typically a workbook created by the Excel UI will have one sheet
# (the sheet that was selected when the user saved it)
# with both bits set to 1, and all other sheets will have both
# bits set to 0. The true visibility of the sheet is found in the "visibility"
# attribute obtained from the BOUNDSHEET record.
self.__sheet_visible = 0
self.__page_preview = 0
self.__first_visible_row = 0
self.__first_visible_col = 0
self.__grid_colour = 0x40
self.__preview_magn = 60 # percent
self.__normal_magn = 100 # percent
self.visibility = 0 # from/to BOUNDSHEET record.
self.__vert_split_pos = None
self.__horz_split_pos = None
self.__vert_split_first_visible = None
self.__horz_split_first_visible = None
self.__split_active_pane = None
self.__row_gut_width = 0
self.__col_gut_height = 0
self.__show_auto_page_breaks = 1
self.__dialogue_sheet = 0
self.__auto_style_outline = 0
self.__outline_below = 0
self.__outline_right = 0
self.__fit_num_pages = 0
self.__show_row_outline = 1
self.__show_col_outline = 1
self.__alt_expr_eval = 0
self.__alt_formula_entries = 0
self.__row_default_height = 0x00FF
self.row_default_height_mismatch = 0
self.row_default_hidden = 0
self.row_default_space_above = 0
self.row_default_space_below = 0
self.__col_default_width = 0x0008
self.__calc_mode = 1
self.__calc_count = 0x0064
self.__RC_ref_mode = 1
self.__iterations_on = 0
self.__delta = 0.001
self.__save_recalc = 0
self.__print_headers = 0
self.__print_grid = 0
self.__grid_set = 1
self.__vert_page_breaks = []
self.__horz_page_breaks = []
self.__header_str = '&P'
self.__footer_str = '&F'
self.__print_centered_vert = 0
self.__print_centered_horz = 1
self.__left_margin = 0.3 #0.5
self.__right_margin = 0.3 #0.5
self.__top_margin = 0.61 #1.0
self.__bottom_margin = 0.37 #1.0
self.__paper_size_code = 9 # A4
self.__print_scaling = 100
self.__start_page_number = 1
self.__fit_width_to_pages = 1
self.__fit_height_to_pages = 1
self.__print_in_rows = 1
self.__portrait = 1
self.__print_not_colour = 0
self.__print_draft = 0
self.__print_notes = 0
self.__print_notes_at_end = 0
self.__print_omit_errors = 0
self.__print_hres = 0x012C # 300 dpi
self.__print_vres = 0x012C # 300 dpi
self.__header_margin = 0.1
self.__footer_margin = 0.1
self.__copies_num = 1
self.__wnd_protect = 0
self.__obj_protect = 0
self.__protect = 0
self.__scen_protect = 0
self.__password = ''
self.last_used_row = 0
self.first_used_row = 65535
self.last_used_col = 0
self.first_used_col = 255
self.row_tempfile = None
self.__flushed_rows = {}
self.__row_visible_levels = 0
#################################################################
## Properties, "getters", "setters"
#################################################################
def set_name(self, value):
self.__name = value
def get_name(self):
return self.__name
name = property(get_name, set_name)
#################################################################
def get_parent(self):
return self.__parent
parent = property(get_parent)
#################################################################
def get_rows(self):
return self.__rows
rows = property(get_rows)
#################################################################
def get_cols(self):
return self.__cols
cols = property(get_cols)
#################################################################
def get_merged_ranges(self):
return self.__merged_ranges
merged_ranges = property(get_merged_ranges)
#################################################################
def get_bmp_rec(self):
return self.__bmp_rec
bmp_rec = property(get_bmp_rec)
#################################################################
def set_show_formulas(self, value):
self.__show_formulas = int(value)
def get_show_formulas(self):
return bool(self.__show_formulas)
show_formulas = property(get_show_formulas, set_show_formulas)
#################################################################
def set_show_grid(self, value):
self.__show_grid = int(value)
def get_show_grid(self):
return bool(self.__show_grid)
show_grid = property(get_show_grid, set_show_grid)
#################################################################
def set_show_headers(self, value):
self.__show_headers = int(value)
def get_show_headers(self):
return bool(self.__show_headers)
show_headers = property(get_show_headers, set_show_headers)
#################################################################
def set_panes_frozen(self, value):
self.__panes_frozen = int(value)
def get_panes_frozen(self):
return bool(self.__panes_frozen)
panes_frozen = property(get_panes_frozen, set_panes_frozen)
#################################################################
### def set_show_empty_as_zero(self, value):
### self.__show_empty_as_zero = int(value)
### def get_show_empty_as_zero(self):
### return bool(self.__show_empty_as_zero)
### show_empty_as_zero = property(get_show_empty_as_zero, set_show_empty_as_zero)
#################################################################
def set_auto_colour_grid(self, value):
self.__auto_colour_grid = int(value)
def get_auto_colour_grid(self):
return bool(self.__auto_colour_grid)
auto_colour_grid = property(get_auto_colour_grid, set_auto_colour_grid)
#################################################################
def set_cols_right_to_left(self, value):
self.__cols_right_to_left = int(value)
def get_cols_right_to_left(self):
return bool(self.__cols_right_to_left)
cols_right_to_left = property(get_cols_right_to_left, set_cols_right_to_left)
#################################################################
def set_show_outline(self, value):
self.__show_outline = int(value)
def get_show_outline(self):
return bool(self.__show_outline)
show_outline = property(get_show_outline, set_show_outline)
#################################################################
def set_remove_splits(self, value):
self.__remove_splits = int(value)
def get_remove_splits(self):
return bool(self.__remove_splits)
remove_splits = property(get_remove_splits, set_remove_splits)
#################################################################
def set_selected(self, value):
self.__selected = int(value)
def get_selected(self):
return bool(self.__selected)
selected = property(get_selected, set_selected)
#################################################################
def set_sheet_visible(self, value):
self.__sheet_visible = int(value)
def get_sheet_visible(self):
return bool(self.__sheet_visible)
sheet_visible = property(get_sheet_visible, set_sheet_visible)
#################################################################
def set_page_preview(self, value):
self.__page_preview = int(value)
def get_page_preview(self):
return bool(self.__page_preview)
page_preview = property(get_page_preview, set_page_preview)
#################################################################
def set_first_visible_row(self, value):
self.__first_visible_row = value
def get_first_visible_row(self):
return self.__first_visible_row
first_visible_row = property(get_first_visible_row, set_first_visible_row)
#################################################################
def set_first_visible_col(self, value):
self.__first_visible_col = value
def get_first_visible_col(self):
return self.__first_visible_col
first_visible_col = property(get_first_visible_col, set_first_visible_col)
#################################################################
def set_grid_colour(self, value):
self.__grid_colour = value
def get_grid_colour(self):
return self.__grid_colour
grid_colour = property(get_grid_colour, set_grid_colour)
#################################################################
def set_preview_magn(self, value):
self.__preview_magn = value
def get_preview_magn(self):
return self.__preview_magn
preview_magn = property(get_preview_magn, set_preview_magn)
#################################################################
def set_normal_magn(self, value):
self.__normal_magn = value
def get_normal_magn(self):
return self.__normal_magn
normal_magn = property(get_normal_magn, set_normal_magn)
#################################################################
def set_vert_split_pos(self, value):
self.__vert_split_pos = abs(value)
def get_vert_split_pos(self):
return self.__vert_split_pos
vert_split_pos = property(get_vert_split_pos, set_vert_split_pos)
#################################################################
def set_horz_split_pos(self, value):
self.__horz_split_pos = abs(value)
def get_horz_split_pos(self):
return self.__horz_split_pos
horz_split_pos = property(get_horz_split_pos, set_horz_split_pos)
#################################################################
def set_vert_split_first_visible(self, value):
self.__vert_split_first_visible = abs(value)
def get_vert_split_first_visible(self):
return self.__vert_split_first_visible
vert_split_first_visible = property(get_vert_split_first_visible, set_vert_split_first_visible)
#################################################################
def set_horz_split_first_visible(self, value):
self.__horz_split_first_visible = abs(value)
def get_horz_split_first_visible(self):
return self.__horz_split_first_visible
horz_split_first_visible = property(get_horz_split_first_visible, set_horz_split_first_visible)
#################################################################
#def set_split_active_pane(self, value):
# self.__split_active_pane = abs(value) & 0x03
#
#def get_split_active_pane(self):
# return self.__split_active_pane
#
#split_active_pane = property(get_split_active_pane, set_split_active_pane)
#################################################################
#def set_row_gut_width(self, value):
# self.__row_gut_width = value
#
#def get_row_gut_width(self):
# return self.__row_gut_width
#
#row_gut_width = property(get_row_gut_width, set_row_gut_width)
#
#################################################################
#
#def set_col_gut_height(self, value):
# self.__col_gut_height = value
#
#def get_col_gut_height(self):
# return self.__col_gut_height
#
#col_gut_height = property(get_col_gut_height, set_col_gut_height)
#
#################################################################
def set_show_auto_page_breaks(self, value):
self.__show_auto_page_breaks = int(value)
def get_show_auto_page_breaks(self):
return bool(self.__show_auto_page_breaks)
show_auto_page_breaks = property(get_show_auto_page_breaks, set_show_auto_page_breaks)
#################################################################
def set_dialogue_sheet(self, value):
self.__dialogue_sheet = int(value)
def get_dialogue_sheet(self):
return bool(self.__dialogue_sheet)
dialogue_sheet = property(get_dialogue_sheet, set_dialogue_sheet)
#################################################################
def set_auto_style_outline(self, value):
self.__auto_style_outline = int(value)
def get_auto_style_outline(self):
return bool(self.__auto_style_outline)
auto_style_outline = property(get_auto_style_outline, set_auto_style_outline)
#################################################################
def set_outline_below(self, value):
self.__outline_below = int(value)
def get_outline_below(self):
return bool(self.__outline_below)
outline_below = property(get_outline_below, set_outline_below)
#################################################################
def set_outline_right(self, value):
self.__outline_right = int(value)
def get_outline_right(self):
return bool(self.__outline_right)
outline_right = property(get_outline_right, set_outline_right)
#################################################################
def set_fit_num_pages(self, value):
self.__fit_num_pages = value
def get_fit_num_pages(self):
return self.__fit_num_pages
fit_num_pages = property(get_fit_num_pages, set_fit_num_pages)
#################################################################
def set_show_row_outline(self, value):
self.__show_row_outline = int(value)
def get_show_row_outline(self):
return bool(self.__show_row_outline)
show_row_outline = property(get_show_row_outline, set_show_row_outline)
#################################################################
def set_show_col_outline(self, value):
self.__show_col_outline = int(value)
def get_show_col_outline(self):
return bool(self.__show_col_outline)
show_col_outline = property(get_show_col_outline, set_show_col_outline)
#################################################################
def set_alt_expr_eval(self, value):
self.__alt_expr_eval = int(value)
def get_alt_expr_eval(self):
return bool(self.__alt_expr_eval)
alt_expr_eval = property(get_alt_expr_eval, set_alt_expr_eval)
#################################################################
def set_alt_formula_entries(self, value):
self.__alt_formula_entries = int(value)
def get_alt_formula_entries(self):
return bool(self.__alt_formula_entries)
alt_formula_entries = property(get_alt_formula_entries, set_alt_formula_entries)
#################################################################
def set_row_default_height(self, value):
self.__row_default_height = value
def get_row_default_height(self):
return self.__row_default_height
row_default_height = property(get_row_default_height, set_row_default_height)
#################################################################
def set_col_default_width(self, value):
self.__col_default_width = value
def get_col_default_width(self):
return self.__col_default_width
col_default_width = property(get_col_default_width, set_col_default_width)
#################################################################
def set_calc_mode(self, value):
self.__calc_mode = value & 0x03
def get_calc_mode(self):
return self.__calc_mode
calc_mode = property(get_calc_mode, set_calc_mode)
#################################################################
def set_calc_count(self, value):
self.__calc_count = value
def get_calc_count(self):
return self.__calc_count
calc_count = property(get_calc_count, set_calc_count)
#################################################################
def set_RC_ref_mode(self, value):
self.__RC_ref_mode = int(value)
def get_RC_ref_mode(self):
return bool(self.__RC_ref_mode)
RC_ref_mode = property(get_RC_ref_mode, set_RC_ref_mode)
#################################################################
def set_iterations_on(self, value):
self.__iterations_on = int(value)
def get_iterations_on(self):
return bool(self.__iterations_on)
iterations_on = property(get_iterations_on, set_iterations_on)
#################################################################
def set_delta(self, value):
self.__delta = value
def get_delta(self):
return self.__delta
delta = property(get_delta, set_delta)
#################################################################
def set_save_recalc(self, value):
self.__save_recalc = int(value)
def get_save_recalc(self):
return bool(self.__save_recalc)
save_recalc = property(get_save_recalc, set_save_recalc)
#################################################################
def set_print_headers(self, value):
self.__print_headers = int(value)
def get_print_headers(self):
return bool(self.__print_headers)
print_headers = property(get_print_headers, set_print_headers)
#################################################################
def set_print_grid(self, value):
self.__print_grid = int(value)
def get_print_grid(self):
return bool(self.__print_grid)
print_grid = property(get_print_grid, set_print_grid)
#################################################################
#
#def set_grid_set(self, value):
# self.__grid_set = int(value)
#
#def get_grid_set(self):
# return bool(self.__grid_set)
#
#grid_set = property(get_grid_set, set_grid_set)
#
#################################################################
def set_vert_page_breaks(self, value):
self.__vert_page_breaks = value
def get_vert_page_breaks(self):
return self.__vert_page_breaks
vert_page_breaks = property(get_vert_page_breaks, set_vert_page_breaks)
#################################################################
def set_horz_page_breaks(self, value):
self.__horz_page_breaks = value
def get_horz_page_breaks(self):
return self.__horz_page_breaks
horz_page_breaks = property(get_horz_page_breaks, set_horz_page_breaks)
#################################################################
def set_header_str(self, value):
if isinstance(value, str):
value = unicode(value, self.__parent.encoding)
self.__header_str = value
def get_header_str(self):
return self.__header_str
header_str = property(get_header_str, set_header_str)
#################################################################
def set_footer_str(self, value):
if isinstance(value, str):
value = unicode(value, self.__parent.encoding)
self.__footer_str = value
def get_footer_str(self):
return self.__footer_str
footer_str = property(get_footer_str, set_footer_str)
#################################################################
def set_print_centered_vert(self, value):
self.__print_centered_vert = int(value)
def get_print_centered_vert(self):
return bool(self.__print_centered_vert)
print_centered_vert = property(get_print_centered_vert, set_print_centered_vert)
#################################################################
def set_print_centered_horz(self, value):
self.__print_centered_horz = int(value)
def get_print_centered_horz(self):
return bool(self.__print_centered_horz)
print_centered_horz = property(get_print_centered_horz, set_print_centered_horz)
#################################################################
def set_left_margin(self, value):
self.__left_margin = value
def get_left_margin(self):
return self.__left_margin
left_margin = property(get_left_margin, set_left_margin)
#################################################################
def set_right_margin(self, value):
self.__right_margin = value
def get_right_margin(self):
return self.__right_margin
right_margin = property(get_right_margin, set_right_margin)
#################################################################
def set_top_margin(self, value):
self.__top_margin = value
def get_top_margin(self):
return self.__top_margin
top_margin = property(get_top_margin, set_top_margin)
#################################################################
def set_bottom_margin(self, value):
self.__bottom_margin = value
def get_bottom_margin(self):
return self.__bottom_margin
bottom_margin = property(get_bottom_margin, set_bottom_margin)
#################################################################
def set_paper_size_code(self, value):
self.__paper_size_code = value
def get_paper_size_code(self):
return self.__paper_size_code
paper_size_code = property(get_paper_size_code, set_paper_size_code)
#################################################################
def set_print_scaling(self, value):
self.__print_scaling = value
def get_print_scaling(self):
return self.__print_scaling
print_scaling = property(get_print_scaling, set_print_scaling)
#################################################################
def set_start_page_number(self, value):
self.__start_page_number = value
def get_start_page_number(self):
return self.__start_page_number
start_page_number = property(get_start_page_number, set_start_page_number)
#################################################################
def set_fit_width_to_pages(self, value):
self.__fit_width_to_pages = value
def get_fit_width_to_pages(self):
return self.__fit_width_to_pages
fit_width_to_pages = property(get_fit_width_to_pages, set_fit_width_to_pages)
#################################################################
def set_fit_height_to_pages(self, value):
self.__fit_height_to_pages = value
def get_fit_height_to_pages(self):
return self.__fit_height_to_pages
fit_height_to_pages = property(get_fit_height_to_pages, set_fit_height_to_pages)
#################################################################
def set_print_in_rows(self, value):
self.__print_in_rows = int(value)
def get_print_in_rows(self):
return bool(self.__print_in_rows)
print_in_rows = property(get_print_in_rows, set_print_in_rows)
#################################################################
def set_portrait(self, value):
self.__portrait = int(value)
def get_portrait(self):
return bool(self.__portrait)
portrait = property(get_portrait, set_portrait)
#################################################################
def set_print_colour(self, value):
self.__print_not_colour = int(not value)
def get_print_colour(self):
return not bool(self.__print_not_colour)
print_colour = property(get_print_colour, set_print_colour)
#################################################################
def set_print_draft(self, value):
self.__print_draft = int(value)
def get_print_draft(self):
return bool(self.__print_draft)
print_draft = property(get_print_draft, set_print_draft)
#################################################################
def set_print_notes(self, value):
self.__print_notes = int(value)
def get_print_notes(self):
return bool(self.__print_notes)
print_notes = property(get_print_notes, set_print_notes)
#################################################################
def set_print_notes_at_end(self, value):
self.__print_notes_at_end = int(value)
def get_print_notes_at_end(self):
return bool(self.__print_notes_at_end)
print_notes_at_end = property(get_print_notes_at_end, set_print_notes_at_end)
#################################################################
def set_print_omit_errors(self, value):
self.__print_omit_errors = int(value)
def get_print_omit_errors(self):
return bool(self.__print_omit_errors)
print_omit_errors = property(get_print_omit_errors, set_print_omit_errors)
#################################################################
def set_print_hres(self, value):
self.__print_hres = value
def get_print_hres(self):
return self.__print_hres
print_hres = property(get_print_hres, set_print_hres)
#################################################################
def set_print_vres(self, value):
self.__print_vres = value
def get_print_vres(self):
return self.__print_vres
print_vres = property(get_print_vres, set_print_vres)
#################################################################
def set_header_margin(self, value):
self.__header_margin = value
def get_header_margin(self):
return self.__header_margin
header_margin = property(get_header_margin, set_header_margin)
#################################################################
def set_footer_margin(self, value):
self.__footer_margin = value
def get_footer_margin(self):
return self.__footer_margin
footer_margin = property(get_footer_margin, set_footer_margin)
#################################################################
def set_copies_num(self, value):
self.__copies_num = value
def get_copies_num(self):
return self.__copies_num
copies_num = property(get_copies_num, set_copies_num)
##################################################################
def set_wnd_protect(self, value):
self.__wnd_protect = int(value)
def get_wnd_protect(self):
return bool(self.__wnd_protect)
wnd_protect = property(get_wnd_protect, set_wnd_protect)
#################################################################
def set_obj_protect(self, value):
self.__obj_protect = int(value)
def get_obj_protect(self):
return bool(self.__obj_protect)
obj_protect = property(get_obj_protect, set_obj_protect)
#################################################################
def set_protect(self, value):
self.__protect = int(value)
def get_protect(self):
return bool(self.__protect)
protect = property(get_protect, set_protect)
#################################################################
def set_scen_protect(self, value):
self.__scen_protect = int(value)
def get_scen_protect(self):
return bool(self.__scen_protect)
scen_protect = property(get_scen_protect, set_scen_protect)
#################################################################
def set_password(self, value):
self.__password = value
def get_password(self):
return self.__password
password = property(get_password, set_password)
##################################################################
## Methods
##################################################################
def get_parent(self):
return self.__parent
def write(self, r, c, label="", style=Style.default_style):
self.row(r).write(c, label, style)
def merge(self, r1, r2, c1, c2, style=Style.default_style):
# Stand-alone merge of previously written cells.
# Problems: (1) style to be used should be existing style of
# the top-left cell, not an arg.
# (2) should ensure that any previous data value in
# non-top-left cells is nobbled.
# Note: if a cell is set by a data record then later
# is referenced by a [MUL]BLANK record, Excel will blank
# out the cell on the screen, but OOo & Gnu will not
# blank it out. Need to do something better than writing
# multiple records. In the meantime, avoid this method and use
# write_merge() instead.
if c2 > c1:
self.row(r1).write_blanks(c1 + 1, c2, style)
for r in range(r1+1, r2+1):
self.row(r).write_blanks(c1, c2, style)
self.__merged_ranges.append((r1, r2, c1, c2))
def write_merge(self, r1, r2, c1, c2, label="", style=Style.default_style):
assert 0 <= c1 <= c2 <= 255
assert 0 <= r1 <= r2 <= 65535
self.write(r1, c1, label, style)
if c2 > c1:
self.row(r1).write_blanks(c1 + 1, c2, style) # skip (r1, c1)
for r in range(r1+1, r2+1):
self.row(r).write_blanks(c1, c2, style)
self.__merged_ranges.append((r1, r2, c1, c2))
def insert_bitmap(self, filename, row, col, x = 0, y = 0, scale_x = 1, scale_y = 1):
bmp = Bitmap.ImDataBmpRecord(filename)
obj = Bitmap.ObjBmpRecord(row, col, self, bmp, x, y, scale_x, scale_y)
self.__bmp_rec += obj.get() + bmp.get()
def col(self, indx):
if indx not in self.__cols:
self.__cols[indx] = self.Column(indx, self)
return self.__cols[indx]
def row(self, indx):
if indx not in self.__rows:
if indx in self.__flushed_rows:
raise Exception("Attempt to reuse row index %d of sheet %r after flushing" % (indx, self.__name))
self.__rows[indx] = self.Row(indx, self)
if indx > self.last_used_row:
self.last_used_row = indx
if indx < self.first_used_row:
self.first_used_row = indx
return self.__rows[indx]
def row_height(self, row): # in pixels
if row in self.__rows:
return self.__rows[row].get_height_in_pixels()
else:
return 17
def col_width(self, col): # in pixels
if col in self.__cols:
return self.__cols[col].width_in_pixels()
else:
return 64
##################################################################
## BIFF records generation
##################################################################
def __bof_rec(self):
return BIFFRecords.Biff8BOFRecord(BIFFRecords.Biff8BOFRecord.WORKSHEET).get()
def __update_row_visible_levels(self):
if self.__rows:
temp = max([self.__rows[r].level for r in self.__rows]) + 1
self.__row_visible_levels = max(temp, self.__row_visible_levels)
def __guts_rec(self):
self.__update_row_visible_levels()
col_visible_levels = 0
if len(self.__cols) != 0:
col_visible_levels = max([self.__cols[c].level for c in self.__cols]) + 1
return BIFFRecords.GutsRecord(
self.__row_gut_width, self.__col_gut_height, self.__row_visible_levels, col_visible_levels).get()
def __defaultrowheight_rec(self):
options = 0x0000
options |= (self.row_default_height_mismatch & 1) << 0
options |= (self.row_default_hidden & 1) << 1
options |= (self.row_default_space_above & 1) << 2
options |= (self.row_default_space_below & 1) << 3
defht = self.__row_default_height
return BIFFRecords.DefaultRowHeightRecord(options, defht).get()
def __wsbool_rec(self):
options = 0x00
options |= (self.__show_auto_page_breaks & 0x01) << 0
options |= (self.__dialogue_sheet & 0x01) << 4
options |= (self.__auto_style_outline & 0x01) << 5
options |= (self.__outline_below & 0x01) << 6
options |= (self.__outline_right & 0x01) << 7
options |= (self.__fit_num_pages & 0x01) << 8
options |= (self.__show_row_outline & 0x01) << 10
options |= (self.__show_col_outline & 0x01) << 11
options |= (self.__alt_expr_eval & 0x01) << 14
options |= (self.__alt_formula_entries & 0x01) << 15
return BIFFRecords.WSBoolRecord(options).get()
def __eof_rec(self):
return BIFFRecords.EOFRecord().get()
def __colinfo_rec(self):
result = ''
for col in self.__cols:
result += self.__cols[col].get_biff_record()
return result
def __dimensions_rec(self):
return BIFFRecords.DimensionsRecord(
self.first_used_row, self.last_used_row,
self.first_used_col, self.last_used_col
).get()
def __window2_rec(self):
# Appends SCL record.
options = 0
options |= (self.__show_formulas & 0x01) << 0
options |= (self.__show_grid & 0x01) << 1
options |= (self.__show_headers & 0x01) << 2
options |= (self.__panes_frozen & 0x01) << 3
options |= (self.show_zero_values & 0x01) << 4
options |= (self.__auto_colour_grid & 0x01) << 5
options |= (self.__cols_right_to_left & 0x01) << 6
options |= (self.__show_outline & 0x01) << 7
options |= (self.__remove_splits & 0x01) << 8
options |= (self.__selected & 0x01) << 9
options |= (self.__sheet_visible & 0x01) << 10
options |= (self.__page_preview & 0x01) << 11
if self.__page_preview:
scl_magn = self.__preview_magn
else:
scl_magn = self.__normal_magn
return BIFFRecords.Window2Record(
options, self.__first_visible_row, self.__first_visible_col,
self.__grid_colour,
self.__preview_magn, self.__normal_magn, scl_magn).get()
def __panes_rec(self):
if self.__vert_split_pos is None and self.__horz_split_pos is None:
return ""
if self.__vert_split_pos is None:
self.__vert_split_pos = 0
if self.__horz_split_pos is None:
self.__horz_split_pos = 0
if self.__panes_frozen:
if self.__vert_split_first_visible is None:
self.__vert_split_first_visible = self.__vert_split_pos
if self.__horz_split_first_visible is None:
self.__horz_split_first_visible = self.__horz_split_pos
else:
if self.__vert_split_first_visible is None:
self.__vert_split_first_visible = 0
if self.__horz_split_first_visible is None:
self.__horz_split_first_visible = 0
# inspired by pyXLWriter
self.__horz_split_pos = 20*self.__horz_split_pos + 255
self.__vert_split_pos = 113.879*self.__vert_split_pos + 390
if self.__vert_split_pos > 0 and self.__horz_split_pos > 0:
self.__split_active_pane = 0
elif self.__vert_split_pos > 0 and self.__horz_split_pos == 0:
self.__split_active_pane = 1
elif self.__vert_split_pos == 0 and self.__horz_split_pos > 0:
self.__split_active_pane = 2
else:
self.__split_active_pane = 3
result = BIFFRecords.PanesRecord(self.__vert_split_pos,
self.__horz_split_pos,
self.__horz_split_first_visible,
self.__vert_split_first_visible,
self.__split_active_pane).get()
return result
def __row_blocks_rec(self):
result = []
for row in self.__rows.itervalues():
result.append(row.get_row_biff_data())
result.append(row.get_cells_biff_data())
return ''.join(result)
def __merged_rec(self):
return BIFFRecords.MergedCellsRecord(self.__merged_ranges).get()
def __bitmaps_rec(self):
return self.__bmp_rec
def __calc_settings_rec(self):
result = ''
result += BIFFRecords.CalcModeRecord(self.__calc_mode & 0x01).get()
result += BIFFRecords.CalcCountRecord(self.__calc_count & 0xFFFF).get()
result += BIFFRecords.RefModeRecord(self.__RC_ref_mode & 0x01).get()
result += BIFFRecords.IterationRecord(self.__iterations_on & 0x01).get()
result += BIFFRecords.DeltaRecord(self.__delta).get()
result += BIFFRecords.SaveRecalcRecord(self.__save_recalc & 0x01).get()
return result
def __print_settings_rec(self):
result = ''
result += BIFFRecords.PrintHeadersRecord(self.__print_headers).get()
result += BIFFRecords.PrintGridLinesRecord(self.__print_grid).get()
result += BIFFRecords.GridSetRecord(self.__grid_set).get()
result += BIFFRecords.HorizontalPageBreaksRecord(self.__horz_page_breaks).get()
result += BIFFRecords.VerticalPageBreaksRecord(self.__vert_page_breaks).get()
result += BIFFRecords.HeaderRecord(self.__header_str).get()
result += BIFFRecords.FooterRecord(self.__footer_str).get()
result += BIFFRecords.HCenterRecord(self.__print_centered_horz).get()
result += BIFFRecords.VCenterRecord(self.__print_centered_vert).get()
result += BIFFRecords.LeftMarginRecord(self.__left_margin).get()
result += BIFFRecords.RightMarginRecord(self.__right_margin).get()
result += BIFFRecords.TopMarginRecord(self.__top_margin).get()
result += BIFFRecords.BottomMarginRecord(self.__bottom_margin).get()
setup_page_options = (self.__print_in_rows & 0x01) << 0
setup_page_options |= (self.__portrait & 0x01) << 1
setup_page_options |= (0x00 & 0x01) << 2
setup_page_options |= (self.__print_not_colour & 0x01) << 3
setup_page_options |= (self.__print_draft & 0x01) << 4
setup_page_options |= (self.__print_notes & 0x01) << 5
setup_page_options |= (0x00 & 0x01) << 6
setup_page_options |= (0x01 & 0x01) << 7
setup_page_options |= (self.__print_notes_at_end & 0x01) << 9
setup_page_options |= (self.__print_omit_errors & 0x03) << 10
result += BIFFRecords.SetupPageRecord(self.__paper_size_code,
self.__print_scaling,
self.__start_page_number,
self.__fit_width_to_pages,
self.__fit_height_to_pages,
setup_page_options,
self.__print_hres,
self.__print_vres,
self.__header_margin,
self.__footer_margin,
self.__copies_num).get()
return result
def __protection_rec(self):
result = ''
result += BIFFRecords.ProtectRecord(self.__protect).get()
result += BIFFRecords.ScenProtectRecord(self.__scen_protect).get()
result += BIFFRecords.WindowProtectRecord(self.__wnd_protect).get()
result += BIFFRecords.ObjectProtectRecord(self.__obj_protect).get()
result += BIFFRecords.PasswordRecord(self.__password).get()
return result
def get_biff_data(self):
result = [
self.__bof_rec(),
self.__calc_settings_rec(),
self.__guts_rec(),
self.__defaultrowheight_rec(),
self.__wsbool_rec(),
self.__colinfo_rec(),
self.__dimensions_rec(),
self.__print_settings_rec(),
self.__protection_rec(),
]
if self.row_tempfile:
self.row_tempfile.flush()
self.row_tempfile.seek(0)
result.append(self.row_tempfile.read())
result.extend([
self.__row_blocks_rec(),
self.__merged_rec(),
self.__bitmaps_rec(),
self.__window2_rec(),
self.__panes_rec(),
self.__eof_rec(),
])
return ''.join(result)
def flush_row_data(self):
if self.row_tempfile is None:
self.row_tempfile = tempfile.TemporaryFile()
self.row_tempfile.write(self.__row_blocks_rec())
for rowx in self.__rows:
self.__flushed_rows[rowx] = 1
self.__update_row_visible_levels()
self.__rows = {} | PypiClean |
/Dolly-0.4.0.tar.gz/Dolly-0.4.0/README.md | # Dolly
[](https://travis-ci.org/xp-forge/dolly)
```
_ ,--. | |
_-(_)- | |,---.| | , .
`(___) | || || | | |
// \\ `--' `---'`---'`---'`---|
`---'
```
Dolly manages multiple Git and SVN repos.
## Usage
```
dolly [-h] [-v] [-r ROOTDIR] [-c CONFIG] command [project]
```
Dolly can be run in the command line with `dolly` or `dly`.
It takes a command argument and an optional project argument.
Valid commands are:
* `help` to print the help menu
* `list` to list the repositories of the specified project (and all included projects)
* `status` to get uncommitted changes and unpushed commits
* `update` to pull and clone
* `install` to clone repositories that aren't yet on disk
* `list-dirs` to print all local repo paths (useful in scripts)
The action will run for every repository in the specified projects tree and all included projects.
If no project parameter is given Dolly will look for a `default` project.
## Configuration
The config file can be specified with the `-c` parameter.
If no file is specified it looks in `~/.dolly.yml` and `/etc/dolly/dolly.yml`
(in that order).
### Example
```
games:
description: "HTML5 / JS games"
post_update: "echo $(date) > /tmp/games"
tree:
games/html/js:
- '2048': https://github.com/gabrielecirulli/2048.git
design-essentials:
description: "GitHub featured"
tree:
design/html/js:
- flint: https://github.com/pengwynn/flint
- normalize: https://github.com/necolas/normalize.css
html:
description: "Stuff"
tree:
design/basic:
- moderinzr: https://github.com/Modernizr/Modernizr.git
gameshtml:
description: "TL;DW"
includes:
- html
- games
default:
description: "TL;DW"
includes:
- design-essentials
- gameshtml
tree:
foo/bar:
- testproj: [email protected]:lbentrup/automation-tools.git
bar/foo:
- clumsybird: https://github.com/ellisonleao/clumsy-bird.git
```
### Extended repository syntax
At the moment, the properties "tag" and "branch" are only supported by git.
```
...
tree;
- name: 'modernizer'
repository: https://github.com/Modernizr/Modernizr.git
tag: v2.8.1
post_update: echo done
- name: 'modernizerback'
repository: https://github.com/Modernizr/Modernizr.git
branch: back
post_update: echo done
...
```
In the example the repo '2048' will be placed in `$ROOT_DIR/games/html/js`.
The `default` project also includes the `design-essentials` and `html` projects. So when the `default` project is processed, all of the repos in this config file will be processed.
If a project is included multiple times, it is only processed once.
## Installation
Dolly can be installed using pip with
```bash
pip install dolly # Add --upgrade to update dolly
```
This will also install the following dependencies:
* argparse
* pyyaml
To install from this git repo:
```
git clone https://github.com/xp-forge/dolly.git
cd dolly
sudo python setup.py install
```
| PypiClean |
/OctoBot-Trading-2.4.23.tar.gz/OctoBot-Trading-2.4.23/octobot_trading/storage/trades_storage.py | import octobot_commons.channels_name as channels_name
import octobot_commons.enums as commons_enums
import octobot_commons.authentication as authentication
import octobot_commons.databases as commons_databases
import octobot_trading.enums as enums
import octobot_trading.constants as constants
import octobot_trading.storage.abstract_storage as abstract_storage
import octobot_trading.storage.util as storage_util
class TradesStorage(abstract_storage.AbstractStorage):
LIVE_CHANNEL = channels_name.OctoBotTradingChannelsName.TRADES_CHANNEL.value
HISTORY_TABLE = commons_enums.DBTables.TRADES.value
@abstract_storage.AbstractStorage.hard_reset_and_retry_if_necessary
async def _live_callback(
self,
exchange: str,
exchange_id: str,
cryptocurrency: str,
symbol: str,
trade: dict,
old_trade: bool
):
if trade[enums.ExchangeConstantsOrderColumns.STATUS.value] != enums.OrderStatus.CANCELED.value:
await self._get_db().log(
self.HISTORY_TABLE,
_format_trade(
trade,
self.exchange_manager,
self.plot_settings.chart,
self.plot_settings.x_multiplier,
self.plot_settings.kind,
self.plot_settings.mode
)
)
await self.trigger_debounced_flush()
self._to_update_auth_data_ids_buffer.add(trade[enums.ExchangeConstantsOrderColumns.ID.value])
await self.trigger_debounced_update_auth_data(False)
async def _update_auth_data(self, reset):
authenticator = authentication.Authenticator.instance()
history = [
trade.to_dict()
for trade in self.exchange_manager.exchange_personal_data.trades_manager.trades.values()
if trade.status is not enums.OrderStatus.CANCELED and trade.trade_id in self._to_update_auth_data_ids_buffer
]
if (history or reset) and authenticator.is_initialized():
# also update when history is empty to reset trade history
await authenticator.update_trades(history, self.exchange_manager.exchange_name, reset)
self._to_update_auth_data_ids_buffer.clear()
@abstract_storage.AbstractStorage.hard_reset_and_retry_if_necessary
async def _store_history(self):
database = self._get_db()
await database.replace_all(
self.HISTORY_TABLE,
[
_format_trade(
trade.to_dict(),
self.exchange_manager,
self.plot_settings.chart,
self.plot_settings.x_multiplier,
self.plot_settings.kind,
self.plot_settings.mode
)
for trade in self.exchange_manager.exchange_personal_data.trades_manager.trades.values()
if trade.status is not enums.OrderStatus.CANCELED
],
cache=False,
)
await database.flush()
def _get_db(self):
return commons_databases.RunDatabasesProvider.instance().get_trades_db(
self.exchange_manager.bot_id,
storage_util.get_account_type_suffix_from_exchange_manager(self.exchange_manager),
self.exchange_manager.exchange_name,
)
def _format_trade(trade_dict, exchange_manager, chart, x_multiplier, kind, mode):
tag = f"{trade_dict[enums.ExchangeConstantsOrderColumns.TAG.value]} " \
if trade_dict[enums.ExchangeConstantsOrderColumns.TAG.value] else ""
symbol = trade_dict[enums.ExchangeConstantsOrderColumns.SYMBOL.value]
trade_side = trade_dict[enums.ExchangeConstantsOrderColumns.SIDE.value]
is_using_positions = False
color = shape = None
if exchange_manager.is_future:
positions = exchange_manager.exchange_personal_data.positions_manager.get_symbol_positions(symbol=symbol)
if positions:
is_using_positions = True
# trading_side = next(iter(positions)).side
# if trading_side is enums.PositionSide.LONG:
if "stop_loss" in trade_dict[enums.ExchangeConstantsOrderColumns.TYPE.value]:
shape = "x"
color = "orange"
elif trade_dict[enums.ExchangeConstantsOrderColumns.REDUCE_ONLY.value] is True:
if trade_side == enums.TradeOrderSide.SELL.value:
# long tp
color = "magenta"
shape = "arrow-bar-left"
else:
# short tp
color = "blue"
shape = "arrow-bar-left"
else:
if trade_side == enums.TradeOrderSide.BUY.value:
# long entry
color = "green"
shape = "arrow-bar-right"
else:
# short entry
color = "red"
shape = "arrow-bar-right"
if not is_using_positions:
if trade_side == enums.TradeOrderSide.BUY.value:
color = "blue"
shape = "arrow-bar-right"
elif "stop_loss" in trade_dict[enums.ExchangeConstantsOrderColumns.TYPE.value]:
color = "orange"
shape = "x"
else:
color = "magenta"
shape = "arrow-bar-left"
fee = trade_dict[enums.ExchangeConstantsOrderColumns.FEE.value]
fee_cost = float(fee[enums.FeePropertyColumns.COST.value] if
fee and fee[enums.FeePropertyColumns.COST.value] else 0)
return {
constants.STORAGE_ORIGIN_VALUE: TradesStorage.sanitize_for_storage(trade_dict),
commons_enums.DisplayedElementTypes.CHART.value: chart,
commons_enums.DBRows.SYMBOL.value: trade_dict[enums.ExchangeConstantsOrderColumns.SYMBOL.value],
commons_enums.DBRows.FEES_AMOUNT.value: fee_cost,
commons_enums.DBRows.FEES_CURRENCY.value: fee[enums.FeePropertyColumns.CURRENCY.value]
if trade_dict[enums.ExchangeConstantsOrderColumns.FEE.value] else "",
commons_enums.DBRows.ID.value: trade_dict[enums.ExchangeConstantsOrderColumns.ID.value],
commons_enums.DBRows.TRADING_MODE.value: exchange_manager.trading_modes[0].get_name(),
commons_enums.PlotAttributes.X.value: trade_dict[enums.ExchangeConstantsOrderColumns.TIMESTAMP.value] * x_multiplier,
commons_enums.PlotAttributes.TEXT.value: f"{tag}{trade_dict[enums.ExchangeConstantsOrderColumns.TYPE.value]} "
f"{trade_dict[enums.ExchangeConstantsOrderColumns.SIDE.value]} "
f"{trade_dict[enums.ExchangeConstantsOrderColumns.AMOUNT.value]} "
f"{trade_dict[enums.ExchangeConstantsOrderColumns.QUANTITY_CURRENCY.value]} "
f"at {trade_dict[enums.ExchangeConstantsOrderColumns.PRICE.value]}",
commons_enums.PlotAttributes.TYPE.value: trade_dict[enums.ExchangeConstantsOrderColumns.TYPE.value],
commons_enums.PlotAttributes.VOLUME.value: float(trade_dict[enums.ExchangeConstantsOrderColumns.AMOUNT.value]),
commons_enums.PlotAttributes.Y.value: float(trade_dict[enums.ExchangeConstantsOrderColumns.PRICE.value]),
commons_enums.PlotAttributes.KIND.value: kind,
commons_enums.PlotAttributes.SIDE.value: trade_dict[enums.ExchangeConstantsOrderColumns.SIDE.value],
commons_enums.PlotAttributes.MODE.value: mode,
commons_enums.PlotAttributes.SHAPE.value: shape,
commons_enums.PlotAttributes.COLOR.value: color,
commons_enums.PlotAttributes.SIZE.value: "10",
"cost": float(trade_dict[enums.ExchangeConstantsOrderColumns.COST.value]),
"state": trade_dict[enums.ExchangeConstantsOrderColumns.STATUS.value],
} | PypiClean |
/GxAutoTestManager-1.0.1.tar.gz/GxAutoTestManager-1.0.1/AutoTestManager/client/ai_client.py |
import socket
import time
import sys
import threading
import json
import os
import multiprocessing
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
sys.path.append(BASE_DIR)
sys.path.append(os.path.join(BASE_DIR,"../server/modules"))
import log
import ftp
import msg
import sendmsg
class AiClient(object):
def __init__(self):
self.rl = log.RunningLog()
self.local_host = '192.168.111.101'
self.local_port = '29999'
self.sm = sendmsg.SendMsg()
self.gm = msg.GenMsg()
self.test_entrance = 'Test_Entrance'
def tcp_bind(self):
ip = ''
port = int(self.local_port)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.bind((ip, port))
s.listen(30)
return s
def recv_msg(self, sock, addr):
msg = ''.encode()
start = 0
end = 0
buffer_size = 4096
while True:
data = sock.recv(buffer_size)
if data.find('§§START§§'.encode()) >= 0:
start = 1
if start:
msg += data
if msg.find('§§END§§'.encode()) >= 0:
start = 0
end = 1
if end:
msg = self.sm.unpack(msg)
break
sock.close()
return msg
def process_msg(self):
tfd = self.tcp_bind()
print(tfd)
while True:
sock, addr = tfd.accept()
print(sock, addr)
msg = self.recv_msg(sock, addr)
self.msg_handler(msg)
def _uploadfile(self, local_bin = '', bin_url = '', jenkins_name = '', jenkins_build = '',\
board = '', dvb = 'dvb', now = ''):
if local_bin:
fl = ftp.FtpFunctionLib(\
Host = '192.168.111.101', UserName = 'guoxin', Password = 'Guoxin88156088',\
FtpLoginBasicPath= './' + dvb, LoginPort = 2121)
ret = fl.FtpLogin()
print(ret)
if not jenkins_name:
jenkins_name = 'no_jenkins'
if not jenkins_build:
jenkins_build = 'no_build'
if bin_url:
remote_path = os.path.dirname(bin_url)
remote_bin = os.path.dirname(bin_url) + '/' + now + '_' + \
os.path.basename(bin_url)
else:
#remote_path = jenkins_name + '/' + jenkins_build + '/' + board
#remote_bin = remote_path + '/' + now + '_' + os.path.basename(local_bin)
remote_path = 'Test_Entrance'
remote_bin = remote_path + '/' + os.path.basename(local_bin)
fl.Mkdirs(remote_path)
fl.UploadFile(local_bin, remote_bin)
fl.FtpQuit()
bin_url = dvb + '/' + remote_bin
return bin_url
def _movefile(self, bin_url = '', move_url = ''):
fl = ftp.FtpFunctionLib(\
Host = '192.168.111.101', UserName = 'guoxin', Password = 'Guoxin88156088',\
FtpLoginBasicPath= './', LoginPort = 2121)
ret = fl.FtpLogin()
print(ret)
new_name = os.path.dirname(bin_url) + '/' + os.path.basename(bin_url).split('@@')[-1]
fl.RenameFile(bin_url, new_name)
fl.MoveFile(new_name, move_url)
#fl.RenameFile(new_name, bin_url)
fl.FtpQuit()
def _renamefile(self):
fl = ftp.FtpFunctionLib(\
Host = '192.168.111.101', UserName = 'guoxin', Password = 'Guoxin88156088',\
FtpLoginBasicPath= './', LoginPort = 2121)
ret = fl.FtpLogin()
print(ret)
local_file = '[STOP]'
os.system('touch a.txt')
remote_path = 'Test_Entrance'
remote_bin = remote_path + '/' + local_file
fl.Mkdirs(remote_path)
fl.UploadFile('a.txt', remote_bin)
fl.FtpQuit()
os.system('rm "' + local_file + '"')
def _download(self, remote_path):
fl = ftp.FtpFunctionLib(\
Host = '192.168.111.101', UserName = 'guoxin', Password = 'Guoxin88156088',\
FtpLoginBasicPath= './', LoginPort = 2121)
ret = fl.FtpLogin()
local_path = os.path.basename(remote_path)
fl.DownloadFile(remote_path, local_path)
fl.FtpQuit()
return local_path
def start_test(self, msg):
bin_url = msg['message_content']['ai_info']['task_info']['bin_url']
#move_url = self.test_entrance + '/' + os.path.basename(bin_url).split('@@')[-1]
#self._movefile(bin_url, move_url)
self._movefile(bin_url, self.test_entrance)
def stop_test(self, msg):
self._renamefile()
def _find_result(self, task_bin):
fl = ftp.FtpFunctionLib(\
Host = '192.168.111.101', UserName = 'guoxin', Password = 'Guoxin88156088',\
FtpLoginBasicPath= './', LoginPort = 2121)
ret = fl.FtpLogin()
file_list = fl.QueryRemoteFileList('./Test_Entrance/history')
print(file_list)
result_path = None
excel_list = []
for i in file_list:
if i.endswith(task_bin):
result_path = './Test_Entrance/history/' + i + '/' + i
print('>>>>>>>>>>>>>>>')
print(result_path)
excel_list = fl.QueryRemoteFileList(result_path)
print(excel_list)
print('>>>>>>>>>>>>>>>')
break
for i in excel_list:
if i.endswith('xlsx'):
result_path += '/' + i
fl.FtpQuit()
return result_path
def get_test_result(self, msg):
print('>>>>>>>>>>>>>')
print(msg)
print('>>>>>>>>>>>>>')
task_bin = msg['message_content']['task_bin']
result_path = self._find_result(task_bin)
print(result_path)
filename = self._download(result_path)
filesize_bytes = os.path.getsize(filename)
info = {
'filename': filename,
'filesize_bytes': filesize_bytes,
}
msg['answer'] = info
if msg['sync'] == True:
print(11111111111)
self.sm.answer_msg(msg)
with open(filename, 'rb') as f:
data = f.read()
self.sm.sendall(msg, data)
#self._download()
def msg_handler(self, msg):
if msg['message_name'] == 'start':
self.start_test(msg)
elif msg['message_name'] == 'stop':
self.stop_test(msg)
elif msg['message_name'] == 'get_test_result':
self.get_test_result(msg)
def heartbeat(self):
while True:
u = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
data = {
'client_ip':self.local_host,
'client_port':self.local_port,
}
data_dumped = json.dumps(data)
u.sendto(data_dumped.encode(), (self.remote_ip, self.remote_heartbeet_port))
u.close()
time.sleep(5)
def start(self):
#p_heartbeat = multiprocessing.Process(target = self.heartbeat)
#p_heartbeat.start()
self.process_msg()
def main():
ac = AiClient()
ac.start()
if __name__ == "__main__":
main() | PypiClean |
/JoUtil-1.3.3-py3-none-any.whl/JoTools/operateDeteRes.py |
import os
import cv2
import random
import collections
from collections import Counter
import numpy as np
import prettytable
from .txkjRes.deteRes import DeteRes
from .txkjRes.deteAngleObj import DeteAngleObj
from .txkjRes.deteObj import DeteObj
from .utils.DecoratorUtil import DecoratorUtil
from .utils.FileOperationUtil import FileOperationUtil
from .txkjRes.deteXml import parse_xml, parse_xml_as_txt
from .utils.NumberUtil import NumberUtil
from .txkjRes.resTools import ResTools
from .txkjRes.deteObj import DeteObj
from .utils.StrUtil import StrUtil
import prettytable as pt
from multiprocessing import Pool
from functools import partial
from .txkj.imageAugmentation import ImageAugmentation
# todo 重写 OperateDeteRes 中的函数,很多函数功能的实现已经移植到 DeteRes 类中了,使用调用里面的方法比较好
class DeteAcc(object):
"""检测结果验证相关函数"""
def __init__(self):
self.label_list = ["Fnormal", "fzc_broken"] # xml 中的分类
self.iou_thershold = 0.4 # 判定两矩形重合的 iou 阈值
self.color_dict = {"extra":(0,0,255), "correct":(0,255,0), "mistake":(203,192,255), "miss":(0,255,255)} # 颜色表
@staticmethod
def _update_check_res(res, each_res):
"""更新字典"""
for each in each_res:
if each in res:
res[each] += each_res[each]
else:
res[each] = each_res[each]
def compare_customer_and_standard(self, dete_res_standard, dete_res_customized, assign_img_path=None, save_path=None, save_xml=False, save_img=False):
"""对比 两个 DeteRes 实例 之间的差异, 自己算出来的和标准数据集之间的差异"""
check_res = []
check_dict = collections.defaultdict(lambda: 0)
# 对比标准数据集和找到的结果
for obj_s in dete_res_standard.alarms:
# 增加是否被检查出来,新属性
if not hasattr(obj_s, "be_detect"):
obj_s.be_detect = False
for obj_c in dete_res_customized.alarms:
# 增加一个新属性
if not hasattr(obj_c, "is_correct"):
obj_c.is_correct = None
# 当两个范围 iou 在一定范围内,认为识别正确,此时,给 customized 的 dete_obj 增加一个已被检测的标签
if obj_c.is_correct is None:
each_iou = ResTools.cal_iou(obj_s, obj_c, ignore_tag=True)
if each_iou >= self.iou_thershold:
if obj_s.tag == obj_c.tag:
obj_c.is_correct = True
obj_s.be_detect = True
else:
obj_c.is_correct = False
obj_c.correct_tag = obj_s.tag
obj_s.be_detect = True
# 多检,正确,错检
for obj_c in dete_res_customized.alarms:
if not hasattr(obj_c, "is_correct") or obj_c.is_correct is None:
new_tag = "extra_{0}".format(obj_c.tag)
check_dict[new_tag] += 1
if new_tag not in self.color_dict:
self.color_dict[new_tag] = self.color_dict["extra"]
obj_c.tag = new_tag
elif obj_c.is_correct is True:
new_tag = "correct_{0}".format(obj_c.tag)
check_dict[new_tag] += 1
if new_tag not in self.color_dict:
self.color_dict[new_tag] = self.color_dict["correct"]
obj_c.tag = new_tag
elif obj_c.is_correct is False:
new_tag = "mistake_{0}-{1}".format(obj_c.correct_tag, obj_c.tag)
check_dict[new_tag] += 1
# 每出现一种新类型,保持和 mistake 颜色一致
if new_tag not in self.color_dict:
self.color_dict[new_tag] = self.color_dict["mistake"]
obj_c.tag = new_tag
else:
raise ValueError("多余结果")
check_res.append(obj_c)
# 漏检
for obj_s in dete_res_standard.alarms:
if obj_s.be_detect is False:
new_tag = "miss_{0}".format(obj_s.tag)
check_dict[new_tag] += 1
if new_tag not in self.color_dict:
self.color_dict[new_tag] = self.color_dict["miss"]
obj_s.tag = new_tag
check_res.append(obj_s)
# 不画图直接返回对比统计结果
if save_path is False or assign_img_path is None:
return check_dict
# 重置目标框
dete_res_standard.reset_alarms(check_res)
# 保存图片
if save_img:
dete_res_standard.imgPath = assign_img_path
dete_res_standard.draw_dete_res(save_path, color_dict=self.color_dict)
# 保存 xml
if save_xml:
save_xml_path = save_path[:-4] + '.xml'
dete_res_standard.save_to_xml(save_xml_path)
return check_dict
def cal_model_acc(self, standard_xml_dir, customized_xml_dir, assign_img_dir, save_dir=None, assign_conf=None, save_xml=False, save_img=False):
"""计算模型的性能,通过对比标准结果和跑出来的结果,save_dir 不为 None 就保存结果"""
standard_xml_path_set = set(FileOperationUtil.re_all_file(standard_xml_dir, lambda x:str(x).endswith('.xml')))
customized_xml_path_set = set(FileOperationUtil.re_all_file(customized_xml_dir, lambda x:str(x).endswith('.xml')))
check_res = {} # 检验结果
# 对比
index = 0
for xml_path_s in standard_xml_path_set:
index += 1
print(index, xml_path_s)
xml_name = os.path.split(xml_path_s)[1]
xml_path_c = os.path.join(customized_xml_dir, xml_name)
assign_img_path = os.path.join(assign_img_dir, xml_name[:-3] + 'jpg')
save_img_path = os.path.join(save_dir, xml_name[:-3] + 'jpg')
# jpg 文件不存在就不进行画图了
if not os.path.isfile(assign_img_path):
# fixme 支持 jpg 和 JPG 两种格式
assign_img_path = os.path.join(assign_img_dir, xml_name[:-3] + 'JPG')
if not os.path.isfile(assign_img_path):
assign_img_path = None
save_img_path = None
#
if xml_path_c in customized_xml_path_set:
# 对比两个结果的差异
c_dete_res = DeteRes(xml_path_c)
if assign_conf:
c_dete_res.filter_by_conf(assign_conf)
each_check_res = self.compare_customer_and_standard(DeteRes(xml_path_s), c_dete_res, assign_img_path=assign_img_path, save_path=save_img_path, save_img=save_img, save_xml=save_xml)
# 对比完了之后在 customized_xml_path_set 中删除这个对比过的 xml 路径
customized_xml_path_set.remove(xml_path_c)
else:
# 算作漏检,新建一个空的 customized_xml_path 放进去检查
each_check_res = self.compare_customer_and_standard(DeteRes(xml_path_s), DeteRes(), assign_img_path=assign_img_path, save_path=save_img_path, save_img=save_img, save_xml=save_xml)
# 更新统计字典
self._update_check_res(check_res, each_check_res)
# 剩下的都算多检
for xml_path_c in customized_xml_path_set:
xml_name = os.path.split(xml_path_c)[1]
xml_path_c = os.path.join(customized_xml_dir, xml_name)
assign_img_path = os.path.join(assign_img_dir, xml_name[:-3] + 'jpg')
save_img_path = os.path.join(save_dir, xml_name[:-3] + 'jpg')
# 不进行画图
if not os.path.isfile(assign_img_path):
assign_img_path = None
save_img_path = None
each_check_res = self.compare_customer_and_standard(DeteRes(), DeteRes(xml_path_c), assign_img_path=assign_img_path, save_path=save_img_path, save_xml=save_xml, save_img=save_img)
self._update_check_res(check_res, each_check_res)
return check_res
# return self.cal_acc_rec(check_res)
@staticmethod
def cal_acc_rec(check_res, tag_list=None):
"""根据结果得到正确率和召回率"""
# todo 返回总体的召回率和精确率,而不是某一个标签的
res = {}
extra_dict, miss_dict, correct_dict, mistake_dict = {}, {}, {}, {}
# 获得字典
for each_key in check_res:
if str(each_key).startswith('extra_'):
new_key = each_key[len('extra_'):]
extra_dict[new_key] = check_res[each_key]
elif str(each_key).startswith('correct_'):
new_key = each_key[len('correct_'):]
correct_dict[new_key] = check_res[each_key]
elif str(each_key).startswith('miss_'):
new_key = each_key[len('miss_'):]
miss_dict[new_key] = check_res[each_key]
elif str(each_key).startswith('mistake_'):
new_key = each_key[len('mistake_'):]
mistake_dict[new_key] = check_res[each_key]
# 计算准确率和召回率
# 准确率,预测为正样本的有多少正样本 correct_a / (correct_a + mistake_x_a + extra_a)
# 召回率:是针对我们原来的样本而言的,它表示的是样本中的正例有多少被预测正确了
if tag_list is None:
tag_list = list(correct_dict.keys())
#
for each_tag in tag_list:
each_mistake_num_to = 0
each_mistake_num_from = 0
each_correct_num = 0
each_extra_num = 0
each_miss_num = 0
#
if each_tag in correct_dict:
each_correct_num = correct_dict[each_tag]
if each_tag in extra_dict:
each_extra_num = extra_dict[each_tag]
if each_tag in miss_dict:
each_miss_num = miss_dict[each_tag]
# 计算错检数
for each_mistake_tag in mistake_dict:
each_from, each_to = each_mistake_tag.split('-')
if each_to == each_tag:
each_mistake_num_to += mistake_dict[each_mistake_tag]
if each_from == each_tag:
each_mistake_num_from += mistake_dict[each_mistake_tag]
# 计算准确率和召回率
if float(sum([each_correct_num, each_mistake_num_to, each_extra_num])) != 0:
each_acc = each_correct_num / float(sum([each_correct_num, each_mistake_num_to, each_extra_num]))
else:
each_acc = -1
if float(sum([each_correct_num, each_miss_num])) != 0:
each_rec = each_correct_num / float(sum([each_correct_num, each_miss_num, each_mistake_num_from]))
else:
each_rec = -1
#
res[each_tag] = {'acc': each_acc, 'rec': each_rec}
return res
@staticmethod
def cal_acc_classify(standard_img_dir, customized_img_dir):
""""对比两个分类结果文件夹,分类就是将原图进行了重新的排列"""
# 拿到标签
return_res = []
standard_dict = {}
stand_label_count = {}
res_dict = {}
for each_img_path in FileOperationUtil.re_all_file(standard_img_dir, lambda x:str(x).endswith(('.jpg', '.JPG', '.png'))):
# 拿到第一级别文件夹名,作为 label
img_label = each_img_path[len(standard_img_dir):].strip(os.sep).split(os.sep)[0]
img_name = os.path.split(each_img_path)[1]
standard_dict[img_name] = img_label
if img_label in stand_label_count:
stand_label_count[img_label] += 1
else:
stand_label_count[img_label] = 1
#
for each_img_path in FileOperationUtil.re_all_file(customized_img_dir, lambda x:str(x).endswith(('.jpg', '.JPG', '.png'))):
# 拿到第一级别文件夹名,作为 label
img_label = each_img_path[len(customized_img_dir):].strip(os.sep).split(os.sep)[0]
img_name = os.path.split(each_img_path)[1]
#
standard_img_label = standard_dict[img_name]
#
if standard_img_label == img_label:
correct_str = "correct_{0}".format(standard_img_label)
if correct_str in res_dict:
res_dict[correct_str].append(each_img_path)
else:
res_dict[correct_str] = [each_img_path]
else:
mistake_str = "mistake_{0}_{1}".format(standard_img_label, img_label)
if mistake_str in res_dict:
res_dict[mistake_str].append(each_img_path)
else:
res_dict[mistake_str] = [each_img_path]
stand_label_list = list(stand_label_count.keys())
tb = prettytable.PrettyTable()
tb.field_names = [" ", "class", "num", "per"]
# 计算每一个类型的召回率
for each in stand_label_list:
correct_str = "correct_{0}".format(each)
if correct_str in res_dict:
# print(correct_str, len(res_dict[correct_str]), NumberUtil.format_float(len(res_dict[correct_str])/stand_label_count[each], 2))
rec = NumberUtil.format_float(len(res_dict[correct_str])/stand_label_count[each], 2)
one_row = ['rec', each, "{0} | {1}".format(len(res_dict[correct_str]), stand_label_count[each]), rec]
tb.add_row(one_row)
return_res.append(one_row)
# 计算每一个类型的准确率
for i in stand_label_list:
correct_str = "correct_{0}".format(i)
# 去掉没检测出来的类型
if correct_str not in res_dict:
continue
#
correct_num = len(res_dict[correct_str])
all_num = correct_num
for j in stand_label_list:
mistake_str = "mistake_{0}_{1}".format(j, i)
if mistake_str in res_dict:
all_num += len(res_dict[mistake_str])
# print("rec {0} : {1}".format(i, NumberUtil.format_float(correct_num/all_num), 2))
acc = NumberUtil.format_float(correct_num/all_num, 2)
one_row = ['acc', i, "{0} | {1}".format(correct_num, all_num), acc]
tb.add_row(one_row)
return_res.append(one_row)
mistake_tb = prettytable.PrettyTable()
mistake_tb.field_names = ["correct", "mistake", "num"]
for i in stand_label_list:
for j in stand_label_list:
mistake_str = "mistake_{0}_{1}".format(i, j)
if mistake_str in res_dict:
# print(mistake_str, len(res_dict[mistake_str]))
mistake_tb.add_row([i, j, len(res_dict[mistake_str])])
print(tb)
print(mistake_tb)
return return_res
@staticmethod
def compare_customer_and_standard_mul_classify(standard, customized):
"""对比一张多标签的标准结果和测试结果"""
dete_res_s = DeteRes(xml_path=standard)
dete_res_c = DeteRes(xml_path=customized)
standard_tags = set()
customized_tags = set()
for each_obj in dete_res_s:
standard_tags.add(each_obj.tag)
for each_obj in dete_res_c:
customized_tags.add(each_obj.tag)
miss_list = []
extra_list = []
correct_list = []
for each_tag in standard_tags:
if each_tag in customized_tags:
correct_list.append(each_tag)
else:
miss_list.append(each_tag)
for each_tag in customized_tags:
if each_tag not in standard_tags:
extra_list.append(each_tag)
check_dict = {'correct': correct_list, 'extra': extra_list, 'miss': miss_list}
return check_dict
@staticmethod
def cal_model_acc_mul_classify(standard_dir, customized_dir):
check_res = {'correct':[], 'miss':[], 'extra':[]}
for each_xml_path in FileOperationUtil.re_all_file(standard_dir, endswitch=['.xml']):
xml_name = os.path.split(each_xml_path)[1]
customized_xml_path = os.path.join(customized_dir, xml_name)
if os.path.exists(customized_xml_path):
each_check_res = DeteAcc.compare_customer_and_standard_mul_classify(each_xml_path, customized_xml_path)
for each in each_check_res:
check_res[each].extend(each_check_res[each])
check_res['correct'] = Counter(check_res['correct'])
check_res['miss'] = Counter(check_res['miss'])
check_res['extra'] = Counter(check_res['extra'])
return check_res
@staticmethod
def cal_acc_rec_mul_classify(check_res, tag_list):
"""计算多标签的精确率和召回率"""
res = {}
correct = check_res['correct']
miss = check_res['miss']
extra = check_res['extra']
for each_tag in tag_list:
if (correct[each_tag] + extra[each_tag]) == 0:
res[f'acc_{each_tag}'] = -1
else:
res[f'acc_{each_tag}'] = correct[each_tag] / (correct[each_tag] + extra[each_tag])
if (correct[each_tag] + miss[each_tag]) == 0:
res[f'rec_{each_tag}'] = -1
else:
res[f'rec_{each_tag}'] = correct[each_tag] / (correct[each_tag] + miss[each_tag])
correct_sum = sum(correct.values())
miss_sum = sum(miss.values())
extra_sum = sum(extra.values())
if (correct_sum + extra_sum) == 0:
res['acc_all'] = -1
else:
res['acc_all'] = correct_sum / (correct_sum + extra_sum)
if (correct_sum + miss_sum) == 0:
res['res_all'] = -1
else:
res['res_all'] = correct_sum / (correct_sum + miss_sum)
return res
class OperateDeteRes(object):
"""基于 DeteRes 的批量数据操作"""
@staticmethod
def filter_by_area_ratio(xml_dir, area_ratio_threshold=0.0006, save_dir=None):
"""根据面积比例阈值进行筛选"""
for each_xml_path in FileOperationUtil.re_all_file(xml_dir, lambda x: str(x).endswith(".xml")):
a = DeteRes(each_xml_path)
a.filter_by_area_ratio(area_ratio_threshold)
if save_dir is None:
os.remove(each_xml_path)
a.save_to_xml(each_xml_path)
else:
new_save_xml = os.path.join(save_dir, os.path.split(each_xml_path)[1])
a.save_to_xml(new_save_xml)
# ------------------------------------------------------------------------------------------------------------------
@staticmethod
def _get_loc_list_angle(img_name):
"""提取截图中的图片位置"""
loc_str = ""
start = False
#
for each_i in img_name[::-1]:
#
if start is True:
loc_str += each_i
if each_i == ']':
start = True
elif each_i == '[':
break
loc_list = loc_str[::-1].strip('[]').split("_")
loc_list = list(map(lambda x: float(x), loc_list))
return loc_list
@staticmethod
def get_xml_from_crop_img_angle(img_dir, region_img_dir, save_xml_dir=None):
"""从小图构建 xml,用于快速指定标签和核对问题,可以将 labelimg 设置为使用固定标签进行标注(等待修改)"""
# todo 原先的标签和现在的标签不一致,就打印出内容
if save_xml_dir is None:
save_xml_dir = region_img_dir
dete_res_dict = {}
# 小截图信息获取
for each_xml_path in FileOperationUtil.re_all_file(img_dir, lambda x: str(x).endswith('.jpg')):
each_img_dir, img_name = os.path.split(each_xml_path)
# 位置
# loc = OperateDeteRes._get_loc_list(img_name)
loc = OperateDeteRes._get_loc_list_angle(img_name)
# 原先的标签
region_tag = OperateDeteRes._get_crop_img_tag(img_name)
# 现在的标签
each_tag = each_img_dir[len(img_dir) + 1:]
# 原先的文件名
region_img_name = OperateDeteRes._get_region_img_name(img_name)
# 拿到最新的 tag 信息
a = DeteAngleObj(cx=loc[0], cy=loc[1], w=loc[2], h=loc[3],angle=loc[4], tag=each_tag)
#
if region_img_name in dete_res_dict:
dete_res_dict[region_img_name].append(a)
else:
dete_res_dict[region_img_name] = [a]
# 将小图信息合并为大图
for each_img_name in dete_res_dict:
region_img_path = os.path.join(region_img_dir, "{0}.jpg".format(each_img_name))
# 去除找不到文件
if not os.path.exists(region_img_path):
continue
# 保存文件
a = DeteRes(assign_img_path=region_img_path)
a.reset_alarms(dete_res_dict[each_img_name])
xml_path = os.path.join(save_xml_dir, "{0}.xml".format(each_img_name))
a.save_to_xml(xml_path)
# ------------------------------------------------------------------------------------------------------------------
@staticmethod
def get_xml_from_crop_img(crop_dir, region_img_dir, save_xml_dir=None):
"""从小图构建 xml,用于快速指定标签和核对问题,可以将 labelimg 设置为使用固定标签进行标注(等待修改)"""
if save_xml_dir is None:
save_xml_dir = region_img_dir
dete_res_dict = {}
# 小截图信息获取
for each_xml_path in FileOperationUtil.re_all_file(crop_dir, lambda x: str(x).endswith('.jpg')):
each_img_dir, img_name, _ = FileOperationUtil.bang_path(each_xml_path)
if img_name.count('-+-') == 1:
region_img_name = img_name.split('-+-')[0]
img_name = img_name.split('-+-')[-1]
elif img_name.count('-+-') > 1:
region_img_name = "-+-".join(img_name.split('-+-')[:-1])
img_name = img_name.split('-+-')[-1]
else:
raise ValueError("img_name need -+- : ", img_name)
# 现在的标签
each_tag = each_img_dir[len(crop_dir) + 1:]
# 构造新的 deteObj 实例
a = DeteObj()
a.load_from_name_str(img_name)
a.tag = each_tag
if region_img_name in dete_res_dict:
dete_res_dict[region_img_name].append(a)
else:
dete_res_dict[region_img_name] = [a]
# 将小图信息合并为大图
for each_img_name in dete_res_dict:
# todo 这边指定只能使用 .jpg 文件
region_img_path = os.path.join(region_img_dir, "{0}.jpg".format(each_img_name))
# 去除找不到文件
if not os.path.exists(region_img_path):
print("找不到图片路径 : ", region_img_path)
continue
# 保存文件
a = DeteRes(assign_img_path=region_img_path)
a.reset_alarms(dete_res_dict[each_img_name])
xml_path = os.path.join(save_xml_dir, "{0}.xml".format(each_img_name))
a.refresh_obj_id()
a.save_to_xml(xml_path)
@staticmethod
def get_xml_from_crop_xml(xml_dir, region_img_dir, save_xml_dir):
"""对裁剪后再标注的小图 xml 转为大图对应的 xml 并对由同一个大图裁剪出的小图 xml 进行合并"""
# 从文件名中获取偏移位置
# todo 不支持斜框,斜框的位置信息,前两个也是对应的中心点坐标,需要计算偏移量
get_offset_from_name = lambda x: eval(x.split('-+-')[1].strip(".xml"))[:2]
# 按照原始文件名进行分组
xml_name_dict = {}
for each_xml_path in FileOperationUtil.re_all_file(xml_dir, lambda x:str(x).endswith('.xml')):
each_xml_name = FileOperationUtil.bang_path(each_xml_path)[1]
# 去除非截图 xml
if "-+-" not in each_xml_name:
continue
#
region_xml_name = each_xml_name[:str(each_xml_name).rfind('-+-')]
#
if region_xml_name in xml_name_dict:
xml_name_dict[region_xml_name].append(each_xml_path)
else:
xml_name_dict[region_xml_name] = [each_xml_path]
# 对同一个组中的 xml 进行合并
for each_xml_name in xml_name_dict:
xml_path_list = xml_name_dict[each_xml_name]
xml_name = os.path.split(xml_path_list[0])[1]
save_path = os.path.join(save_xml_dir, each_xml_name + '.xml')
# 获取第一个要素
dete_res = DeteRes(xml_path=xml_path_list[0])
# 获取 xml 中记录的图像大小
img_path = os.path.join(region_img_dir, each_xml_name+'.jpg')
off_x, off_y = get_offset_from_name(xml_path_list[0])
dete_res.offset(off_x, off_y)
# 合并其他 xml 信息
if len(xml_path_list) > 1:
for each in xml_path_list[1:]:
each_dete_res = DeteRes(xml_path=each)
off_x, off_y = get_offset_from_name(each)
each_dete_res.offset(off_x, off_y)
dete_res += each_dete_res
# 完善 xml 中的信息
if os.path.exists(img_path):
dete_res.img_path = img_path
dete_res.file_name = os.path.split(img_path)[1]
else:
continue
dete_res.save_to_xml(save_path)
# ------------------------------------------------------------------------------------------------------------------
@staticmethod
def get_assign_file_path(file_name, file_dir, suffix_list=None):
"""查找支持格式的文件,返回第一个找到的文件路径,找不到的话就返回 None,suffix --> ['.jpg', '.JPG', '.png', '.PNG']"""
if suffix_list is None:
suffix_list = ['.jpg', '.JPG', '.png', '.PNG']
for each_suffix in suffix_list:
each_file_path = os.path.join(file_dir, file_name + each_suffix)
if os.path.exists(each_file_path):
return each_file_path
return None
@staticmethod
def crop_imgs(img_dir, xml_dir, save_dir, split_by_tag=False, exclude_tag_list=None, augment_parameter=None, include_tag_list=None, save_augment=False):
"""将文件夹下面的所有 xml 进行裁剪, save_augment 保存的范围是不是扩展的范围"""
index = 0
for each_xml_path in FileOperationUtil.re_all_file(xml_dir, lambda x: str(x).endswith(".xml")):
each_img_dir, each_img_name = FileOperationUtil.bang_path(each_xml_path)[:2]
each_img_path = OperateDeteRes.get_assign_file_path(each_img_name, img_dir, suffix_list=['.jpg', '.JPG', '.png', '.PNG'])
if not each_img_path:
continue
print(index, each_xml_path)
a = DeteRes(each_xml_path)
# 将旋转矩形转为正常矩形
#a.angle_obj_to_obj()
a.img_path = each_img_path
try:
# a.crop_and_save(save_dir, split_by_tag=split_by_tag, exclude_tag_list=exclude_tag_list, augment_parameter=augment_parameter, include_tag_list=include_tag_list, save_augment=save_augment)
a.crop_dete_obj(save_dir, split_by_tag=split_by_tag, exclude_tag_list=exclude_tag_list, augment_parameter=augment_parameter, include_tag_list=include_tag_list, save_augment=save_augment)
index += 1
except Exception as e:
print(e)
# fixme 这个函数要重写,先要设计好,
@staticmethod
def crop_imgs_angles(img_dir, xml_dir, save_dir, split_by_tag=False, exclude_tag_list=None, augment_parameter=None):
"""将文件夹下面的所有 xml 进行裁剪"""
# todo 增加裁剪指定类型
index = 0
for each_xml_path in FileOperationUtil.re_all_file(xml_dir, lambda x: str(x).endswith(".xml")):
each_img_dir, each_img_name = FileOperationUtil.bang_path(each_xml_path)[:2]
each_img_path = OperateDeteRes.get_assign_file_path(each_img_name, img_dir, suffix_list=['.jpg', '.JPG', '.png', '.PNG'])
if not each_img_path:
continue
print(index, each_xml_path)
a = DeteRes(each_xml_path)
a.img_path = each_img_path
a.crop_angle_dete_obj(save_dir, split_by_tag=split_by_tag, exclude_tag_list=exclude_tag_list, augment_parameter=augment_parameter)
index += 1
# ------------------------------------------------------------------------------------------------------------------
# @DecoratorUtil.time_this
@staticmethod
@DecoratorUtil.time_this
def get_class_count(xml_folder, print_count=False, filter_func=None):
"""查看 voc xml 的标签"""
xml_info, name_dict = [], {}
error_file = 0
# 遍历 xml 统计 xml 信息
xml_list = list(FileOperationUtil.re_all_file(xml_folder, lambda x: str(x).endswith('.xml')))
#
for xml_index, each_xml_path in enumerate(xml_list):
try:
# each_xml_info = parse_xml(each_xml_path)
each_xml_info = parse_xml_as_txt(each_xml_path)
xml_info.append(each_xml_info)
for each_name in each_xml_info['object']:
if each_name['name'] not in name_dict:
name_dict[each_name['name']] = 1
else:
name_dict[each_name['name']] += 1
except Exception as e:
print("* xml error : {0}".format(each_xml_path))
error_file += 1
print(e)
# 打印结果
if print_count:
tb = pt.PrettyTable()
# id, 等待检测的图片数量,端口,使用的 gpu_id, 消耗的 gpu 资源
tb.field_names = ["Name", "Count"]
#
sum = 0
# key 进行排序
for each_name in sorted(name_dict.keys()):
tb.add_row((each_name, name_dict[each_name]))
sum += name_dict[each_name]
tb.add_row(('sum', sum))
tb.add_row(('file', len(list(xml_list))))
tb.add_row(('error file', error_file))
print(tb)
return name_dict
@staticmethod
def draw_tags(img_dir, xml_dir, save_dir, conf_threshold=None, color_dict=None):
"""将对应的 xml 和 img 进行画图"""
index = 0
if color_dict is None:
color_dict = {}
tag_count_dict = OperateDeteRes.get_class_count(xml_dir)
print(tag_count_dict.keys())
for each_tag in tag_count_dict:
color_dict[each_tag] = [random.randint(0, 255), random.randint(0, 255), random.randint(0, 255)]
for each_xml_path in FileOperationUtil.re_all_file(xml_dir, lambda x: str(x).endswith(".xml")):
each_img_name = os.path.split(each_xml_path)[1][:-3] + 'jpg'
each_img_path = os.path.join(img_dir, each_img_name)
each_save_img_path = os.path.join(save_dir, each_img_name)
if not os.path.exists(each_img_path):
continue
print(index, each_xml_path)
a = DeteRes(each_xml_path)
a.img_path = each_img_path
# 对重复标签进行处理
a.do_nms(threshold=0.1, ignore_tag=True)
# 置信度阈值过滤
if conf_threshold is not None:
a.filter_by_conf(conf_threshold)
# 画出结果
a.draw_dete_res(each_save_img_path, color_dict=color_dict)
index += 1
# ---------------------------------------------------- spared-------------------------------------------------------
@staticmethod
def get_area_speard(xml_dir, assign_pt=None):
"""获得面积的分布"""
area_list = []
# 遍历 xml 统计 xml 信息
xml_list = FileOperationUtil.re_all_file(xml_dir, lambda x: str(x).endswith('.xml'))
#
for xml_index, each_xml_path in enumerate(xml_list):
each_dete_res = DeteRes(each_xml_path)
for each_dete_obj in each_dete_res.alarms:
area_list.append(each_dete_obj.get_area())
#
if assign_pt:
return np.percentile(area_list, assign_pt)
else:
for i in range(10, 95, 10):
each_area = int(np.percentile(area_list, i))
print("{0}% : {1}".format(i, each_area))
# ------------------------------------------------------------------------------------------------------------------
@staticmethod
def update_tags(xml_dir, update_dict):
"""更新标签信息"""
xml_list = FileOperationUtil.re_all_file(xml_dir, lambda x: str(x).endswith('.xml'))
#
for xml_index, each_xml_path in enumerate(xml_list):
#
each_dete_res = DeteRes(each_xml_path)
each_dete_res.update_tags(update_dict)
each_dete_res.save_to_xml(each_xml_path)
@staticmethod
def resize_one_img_xml(save_dir, resize_ratio, img_xml):
"""将一张训练图片进行 resize"""
# 解析读到的数据
img_path, xml_path = img_xml
#
a = DeteRes(xml_path)
#
if (not os.path.exists(img_path)) or (not os.path.exists(xml_path)):
return
#
if len(a) < 1:
return
#
im = cv2.imdecode(np.fromfile(img_path, dtype=np.uint8), 1)
im_height, im_width = im.shape[:2]
im_height_new, im_width_new = int(im_height * resize_ratio), int(im_width * resize_ratio)
im_new = cv2.resize(im, (im_width_new, im_height_new))
#
# a.height = im_height_new
# a.width = im_width_new
# a.img_path =
# 将每一个 obj 进行 resize
for each_obj in a:
each_obj.x1 = max(1, int(each_obj.x1 * resize_ratio))
each_obj.x2 = min(im_width_new-1, int(each_obj.x2 * resize_ratio))
each_obj.y1 = max(1, int(each_obj.y1 * resize_ratio))
each_obj.y2 = min(im_height_new-1, int(each_obj.y2 * resize_ratio))
# 保存 img
save_img_path = os.path.join(save_dir, 'JPEGImages', FileOperationUtil.bang_path(xml_path)[1] + '.jpg')
cv2.imwrite(save_img_path, im_new)
# 保存 xml
a.img_path = save_img_path
save_xml_path = os.path.join(save_dir, 'Annotations', FileOperationUtil.bang_path(xml_path)[1] + '.xml')
a.save_to_xml(save_xml_path)
# @DecoratorUtil.time_this
@staticmethod
def resize_train_data(img_dir, xml_dir, save_dir, resize_ratio=0.5):
"""对训练数据进行resize,resize img 和 xml """
save_img_dir = os.path.join(save_dir, 'JPEGImages')
save_xml_dir = os.path.join(save_dir, 'Annotations')
os.makedirs(save_xml_dir, exist_ok=True)
os.makedirs(save_img_dir, exist_ok=True)
for each_xml_path in FileOperationUtil.re_all_file(xml_dir, endswitch=['.xml']):
each_img_path = os.path.join(img_dir, FileOperationUtil.bang_path(each_xml_path)[1] + '.jpg')
OperateDeteRes.resize_one_img_xml(save_dir, resize_ratio, (each_img_path, each_xml_path))
@staticmethod
def count_assign_dir(dir_path, endswitc=None):
"""获取一层文件夹下面需要的文件的个数"""
dir_list, file_numb = [], 0
#
for each in os.listdir(dir_path):
each = os.path.join(dir_path, each)
if os.path.isdir(each):
dir_list.append(each)
else:
if endswitc is None:
file_numb += 1
else:
_, end_str = os.path.splitext(each)
if end_str in endswitc:
file_numb += 1
#
tb = pt.PrettyTable()
tb.field_names = ["dir", "count"]
tb.add_row(["self", file_numb])
for each_dir in dir_list:
each_file_count = len(list(FileOperationUtil.re_all_file(each_dir, endswitch=endswitc)))
file_numb += each_file_count
tb.add_row([os.path.split(each_dir)[1], each_file_count])
tb.add_row(["sum", file_numb])
print(tb)
class OperateTrainData(object):
"""对训练数据集进行处理"""
@staticmethod
def augmente_classify_img(img_dir, expect_img_num=20000):
"""扩展分类数据集, expect_img_num 每个子类的数据数目"""
"""
数据必须按照一定的格式进行排序
* img_dir
* tag_a
* tag_a_1
* tag_a_2
* tag_a_3
* tag_b
* tag_b_1
* tag_c
* tag_c_1
* tag_c_2
"""
img_count_dict = {}
augmente_index_dict = {}
# get img_count_dict
for each_dir in os.listdir(img_dir):
# class 1
tag_dir = os.path.join(img_dir, each_dir)
if not os.path.isdir(tag_dir):
continue
img_count_dict[each_dir] = {}
# class 2
for each_child_dir in os.listdir(tag_dir):
child_dir = os.path.join(tag_dir, each_child_dir)
if not os.path.isdir(child_dir):
continue
# record
img_count_dict[each_dir][each_child_dir] = len(list(FileOperationUtil.re_all_file(child_dir, endswitch=['.jpg', '.JPG'])))
# get augmente_index_dict
for each_tag in img_count_dict:
child_dir_num = len(img_count_dict[each_tag])
for each_child in img_count_dict[each_tag]:
each_child_img_need_num = int(expect_img_num / child_dir_num)
each_child_real_num = img_count_dict[each_tag][each_child]
# augmente_index
augmente_index = each_child_img_need_num / each_child_real_num if (each_child_img_need_num > each_child_real_num) else None
each_img_dir = os.path.join(img_dir, each_tag, each_child)
augmente_index_dict[each_img_dir] = augmente_index
# print
print(each_tag, each_child, augmente_index)
# do augmente
for each_img_dir in augmente_index_dict:
# create new dir
augmente_dir = each_img_dir + "_augmente"
os.makedirs(augmente_dir, exist_ok=True)
#
imgs_list = FileOperationUtil.re_all_file(each_img_dir, endswitch=['.jpg', '.JPG'])
# if need augmente, augmente_index is not None
if augmente_index_dict[each_img_dir]:
a = ImageAugmentation(imgs_list, augmente_dir, prob=augmente_index_dict[each_img_dir] / 12)
# 只在原图上进行变换
a.mode = 0
a.do_process()
print(augmente_index_dict[each_img_dir], each_img_dir)
class PR_chat():
"""PR曲线图表,自动出 excel 里面有 PR 曲线表"""
# todo 支持多个标签的 pr 曲线, | PypiClean |
/GSEIMIITB-1.1.tar.gz/GSEIMIITB-1.1/src/grc/gui/canvas/port.py | from __future__ import absolute_import, division
import math
from gi.repository import Gtk, PangoCairo, Pango
from . import colors
from .drawable import Drawable
from .. import Actions, Utils, Constants
from ...core.utils.descriptors import nop_write
from ...core.ports import Port as CorePort
class Port(CorePort, Drawable):
"""The graphical port."""
def __init__(self, parent, direction, **n):
"""
Port constructor.
Create list of connector coordinates.
"""
super(self.__class__, self).__init__(parent, direction, **n)
Drawable.__init__(self)
self._connector_coordinate = (0, 0)
self._hovering = False
self.force_show_label = False
self._area = []
self._bg_color = self._border_color = 0, 0, 0, 0
self._font_color = list(colors.PORT_NAME_COLOR)
self._line_width_factor = 1.0
self._label_layout_offsets = 0, 0
self.width_with_label = self.height = 0
self.label_layout = None
self.port_category = direction
self.port_index = n['id']
@property
def width(self):
# return self.width_with_label if self._show_label else Constants.PORT_LABEL_HIDDEN_WIDTH
return Constants.PORT_DIM
@width.setter
def width(self, value):
self.width_with_label = value
self.label_layout.set_width(value * Pango.SCALE)
def create_shapes(self):
"""Create new areas and labels for the port."""
# Note: is_horizontal indicates rotation of the block -> even though
# the top and bottom nodes are different in orientation than the left
# and right nodes, we will allocate the same rotation value to all of
# them, i.e., all of them would be horizontal or all of them would be
# vertical.
if self.port_category in ('e_top', 'b_top', 'e_bottom', 'b_bottom'):
if self.is_horizontal():
self._area = (0, 0, self.height, self.width)
elif self.is_vertical():
self._area = (0, 0, self.width, self.height)
else:
if self.parent_block.name.startswith('connector_f'):
if self.is_source:
index1 = self.port_index
pos = self.parent_block.params['output' + str(int(index1)+1)].get_value()
if pos == 'top' or pos == 'bottom':
if self.is_horizontal():
self._area = (0, 0, self.height, self.width)
elif self.is_vertical():
self._area = (0, 0, self.width, self.height)
elif pos == 'right':
if self.is_horizontal():
self._area = (0, 0, self.width, self.height)
elif self.is_vertical():
self._area = (0, 0, self.height, self.width)
else:
if self.is_horizontal():
self._area = (0, 0, self.width, self.height)
elif self.is_vertical():
self._area = (0, 0, self.height, self.width)
else:
if self.is_horizontal():
self._area = (0, 0, self.width, self.height)
elif self.is_vertical():
self._area = (0, 0, self.height, self.width)
if self.parent_block.name.startswith('connector_'):
l = self._area
else:
delta = Constants.PORT_HOVER_DELTA
l = [x - delta for x in self._area[:2]] + [x + 2*delta for x in self._area[2:]]
self.bounds_from_area(l)
self._connector_coordinate = {
0: (self.width, self.height / 2),
90: (self.height / 2, 0),
180: (0, self.height / 2),
270: (self.height / 2, self.width)
}[self.connector_direction]
def create_shapes_1(self):
if self.port_category in ('e_top', 'b_top', 'e_bottom', 'b_bottom'):
if self.is_horizontal():
self._area = (0, 0, self.height, self.width)
elif self.is_vertical():
self._area = (0, 0, self.width, self.height)
else:
if self.is_horizontal():
self._area = (0, 0, self.width, self.height)
elif self.is_vertical():
self._area = (0, 0, self.height, self.width)
delta = Constants.PORT_HOVER_DELTA
l = [x - delta for x in self._area[:2]] + [x + 2*delta for x in self._area[2:]]
self.bounds_from_area(l)
self._connector_coordinate = {
0: (self.width, self.height / 2),
90: (self.height / 2, 0),
180: (0, self.height / 2),
270: (self.height / 2, self.width)
}[self.connector_direction]
def create_labels(self, cr=None):
"""Create the labels for the socket."""
self.label_layout = Gtk.DrawingArea().create_pango_layout('')
if cr:
PangoCairo.update_layout(cr, self.label_layout)
self._line_width_factor = 1.0
layout = self.label_layout
# self.width = Constants.PORT_LABEL_HIDDEN_WIDTH
self.width = Constants.PORT_DIM
# self.height = 13
self.height = Constants.PORT_DIM
self.height += self.height % 2 # uneven height
def draw(self, cr):
"""
Draw the socket with a label.
"""
border_color = colors.BORDER_COLOR
cr.set_line_width(0.5)
cr.translate(*self.coordinate)
# this draws only the rectangles (fill is done below)
if not self.parent_block.name.startswith('connector_'):
cr.rectangle(*self._area)
if self.parent_block.name.startswith('connector_'):
cr.set_source_rgba(*colors.FLOWGRAPH_BACKGROUND_COLOR)
_color1 = colors.CONNECTION_ENABLED_COLOR
cr.move_to(*self._connector_coordinate)
x, y = self._connector_coordinate
if self.connector_direction == 0:
x -= self.width
elif self.connector_direction == 180:
x += self.width
elif self.connector_direction == 90:
y += self.width
elif self.connector_direction == 270:
y -= self.width
cr.set_line_width(Constants.temp1)
cr.line_to(x, y)
else:
if self.port_category in ('e_left', 'e_right', 'e_top', 'e_bottom'):
cr.set_source_rgba(*colors.E_PORT_COLOR)
elif self.port_category in ('b_left', 'b_right', 'b_top', 'b_bottom'):
cr.set_source_rgba(*colors.B_PORT_COLOR)
else:
cr.set_source_rgba(*colors.F_PORT_COLOR)
cr.fill_preserve()
cr.set_source_rgba(*border_color)
# commenting this out removes the fill (but the ports are still drawn)
cr.stroke()
# debug
# return
if not self._show_label:
return # this port is folded (no label)
if self.parent_block.name.startswith('connector_f'):
if self.is_source:
index1 = self.port_index
pos = self.parent_block.params['output' + str(int(index1)+1)].get_value()
if pos == 'top' or pos == 'bottom':
if self.is_horizontal():
cr.rotate(-math.pi / 2)
cr.translate(-self.width, 0)
elif pos == 'right':
if self.is_vertical():
cr.rotate(-math.pi / 2)
cr.translate(-self.width, 0)
else:
if self.is_vertical():
cr.rotate(-math.pi / 2)
cr.translate(-self.width, 0)
else:
if self.port_category in ('sink', 'source', 'e_left', 'e_right', 'b_left', 'b_right'):
if self.is_vertical():
cr.rotate(-math.pi / 2)
cr.translate(-self.width, 0)
if self.port_category in ('e_top', 'e_bottom', 'b_top', 'b_bottom'):
if self.is_horizontal():
cr.rotate(-math.pi / 2)
cr.translate(-self.width, 0)
if self.parent_block.name.startswith('connector_f'):
if self.is_source:
index1 = self.port_index
port_pos = self.parent_block.params['output' + str(int(index1)+1)].get_value()
else:
port_pos = 'left'
else:
_mirror = self.parent_block.mirror
if _mirror == 'none':
port_pos = {
'sink' : 'left',
'source' : 'right',
'e_left' : 'left',
'e_right' : 'right',
'e_top' : 'top',
'e_bottom': 'bottom',
'b_left' : 'left',
'b_right' : 'right',
'b_top' : 'top',
'b_bottom': 'bottom',
}[self.port_category]
elif _mirror == 'v':
port_pos = {
'sink' : 'right',
'source' : 'left',
'e_left' : 'right',
'e_right' : 'left',
'e_top' : 'top',
'e_bottom': 'bottom',
'b_left' : 'right',
'b_right' : 'left',
'b_top' : 'top',
'b_bottom': 'bottom',
}[self.port_category]
elif _mirror == 'h':
port_pos = {
'sink' : 'left',
'source' : 'right',
'e_left' : 'left',
'e_right' : 'right',
'e_top' : 'bottom',
'e_bottom': 'top',
'b_left' : 'left',
'b_right' : 'right',
'b_top' : 'bottom',
'b_bottom': 'top',
}[self.port_category]
_mirror = self.parent_block.mirror
d1 = {
('top' , 0) : 'L',
('top' , 90) : 'R',
('top' , 180) : 'R',
('top' , 270) : 'L',
('bottom', 0) : 'R',
('bottom', 90) : 'L',
('bottom', 180) : 'L',
('bottom', 270) : 'R',
('left' , 0) : 'R',
('left' , 90) : 'R',
('left' , 180) : 'L',
('left' , 270) : 'L',
('right' , 0) : 'L',
('right' , 90) : 'L',
('right' , 180) : 'R',
('right' , 270) : 'R',
}
align = d1[(port_pos, self.rotation)]
s_font = "Sans " + str(Constants.PORT_LABEL_FONTSIZE)
self.label_layout.set_markup('<span font_desc="{font}">{name}</span>'.format(
name=Utils.encode(self.name), font=s_font))
if align == 'L':
self.label_layout.set_alignment(Pango.Alignment.LEFT)
self._label_layout_offsets = [5, -Constants.PORT_LABEL_OFFSET]
elif align == 'R':
self.label_layout.set_alignment(Pango.Alignment.RIGHT)
self._label_layout_offsets = [-5, -Constants.PORT_LABEL_OFFSET]
cr.translate(*self._label_layout_offsets)
cr.set_source_rgba(*self._font_color)
PangoCairo.update_layout(cr, self.label_layout)
PangoCairo.show_layout(cr, self.label_layout)
@property
def connector_coordinate_absolute(self):
"""the coordinate where connections may attach to"""
return [sum(c) for c in zip(
self._connector_coordinate, # relative to port
self.coordinate, # relative to block
self.parent_block.coordinate # abs
)]
@property
def connector_direction(self):
"""Get the direction that the socket points: 0,90,180,270."""
if self.parent_block.name.startswith('connector_f'):
if self.is_source:
index1 = self.port_index
pos = self.parent_block.params['output' + str(int(index1)+1)].get_value()
if pos == 'top':
return (self.rotation + 90) % 360
elif pos == 'bottom':
return (self.rotation + 270) % 360
elif pos == 'right':
return self.rotation
else:
return (self.rotation + 180) % 360
else:
_mirror = self.parent_block.mirror
r1 = self.rotation
if _mirror == 'none':
if self.is_source or self.is_e_right or self.is_b_right:
r = r1
elif self.is_sink or self.is_e_left or self.is_b_left:
r = (r1 + 180) % 360
elif self.is_e_top or self.is_b_top:
r = (r1 + 90) % 360
elif self.is_e_bottom or self.is_b_bottom:
r = (r1 + 270) % 360
elif _mirror == 'v':
if self.is_source or self.is_e_right or self.is_b_right:
r = (r1 + 180) % 360
elif self.is_sink or self.is_e_left or self.is_b_left:
r = r1
elif self.is_e_top or self.is_b_top:
r = (r1 + 90) % 360
elif self.is_e_bottom or self.is_b_bottom:
r = (r1 + 270) % 360
elif _mirror == 'h':
if self.is_source or self.is_e_right or self.is_b_right:
r = r1
elif self.is_sink or self.is_e_left or self.is_b_left:
r = (r1 + 180) % 360
elif self.is_e_top or self.is_b_top:
r = (r1 + 270) % 360
elif self.is_e_bottom or self.is_b_bottom:
r = (r1 + 90) % 360
return r
@nop_write
@property
def rotation(self):
return self.parent_block.rotation
def rotate(self, direction):
"""
Rotate the parent rather than self.
Args:
direction: degrees to rotate
"""
self.parent_block.rotate(direction)
def move(self, delta_coor):
"""Move the parent rather than self."""
self.parent_block.move(delta_coor)
@property
def highlighted(self):
return self.parent_block.highlighted
@highlighted.setter
def highlighted(self, value):
self.parent_block.highlighted = value
@property
def _show_label(self):
"""
Figure out if the label should be hidden
Returns:
true if the label should not be shown
"""
# if self._hovering:
# print('self._hovering:', self._hovering)
# print('self.force_show_label:', self.force_show_label)
# print('Actions.TOGGLE_AUTO_HIDE_PORT_LABELS.get_active():',
# Actions.TOGGLE_AUTO_HIDE_PORT_LABELS.get_active())
# return self._hovering
# return self._hovering or self.force_show_label
return self._hovering or self.force_show_label or not Actions.TOGGLE_AUTO_HIDE_PORT_LABELS.get_active()
def mouse_over(self):
"""
Called from flow graph on mouse-over
"""
# print('port.py: mouse_over:')
changed = not self._show_label
self._hovering = True
return changed
def mouse_out(self):
"""
Called from flow graph on mouse-out
"""
label_was_shown = self._show_label
self._hovering = False
return label_was_shown != self._show_label | PypiClean |
/Flask_Attachment_New-1.0.1-py3-none-any.whl/flask_attachment/Manager.py | import os
import shutil
import sys
import traceback
from typing import List
from flask import current_app
from werkzeug.datastructures import FileStorage
from flask_attachment.data import OutputType
from flask_attachment.data.FileInfo import FileInfo, FileInfoBuilder
from flask_attachment.data.Operation import Operation, Mode
from flask_attachment.handler.BinaryzationHandler import BinaryzationHandler
from flask_attachment.handler.CropSquareHandler import CropSquareHandler
from flask_attachment.handler.MoveHandler import MoveHandler
from flask_attachment.handler.RemoveHandler import RemoveHandler
from flask_attachment.handler.ResizeHandler import ResizeHandler
class Options(object):
def __init__(self, source_path: str = None, archive_path: str = None):
self.source_path = source_path
self.archive_path = archive_path
def get_source_path(self) -> str:
return self.source_path
def set_source_path(self, source_path: str):
self.source_path = source_path
def get_archive_path(self) -> str:
return self.archive_path
def set_archive_path(self, archive_path: str):
self.archive_path = archive_path
class Manager(object):
def __init__(self, options: Options):
self.options = options
def download(self, tag: str, data: bytes, operations: List[Operation], limit_size: int = 0) -> List[FileInfo]:
download_files = []
file_paths = []
try:
# 建立下载文件夹
source_path = self.options.get_source_path()
download_path = os.path.join(source_path, tag)
if not os.path.exists(download_path):
os.makedirs(download_path)
# 保存下载文件
original_file_path = os.path.join(download_path, Operation.ORIGINAL_FILENAME)
with open(original_file_path, "wb") as code:
code.write(data)
file_size = os.stat(original_file_path).st_size
if (limit_size > 0) and (file_size > limit_size):
raise Exception('download file is too large. size: {}'.format(file_size))
# 按步骤处理文件
for operation in operations:
mode = operation.get_mode()
output_type = operation.get_output_type()
# 处理文件
file_path = None
if mode == Mode.NONE:
file_path = self._handle_none(operation)
elif mode == Mode.MOVE:
file_path = self._handle_move(operation)
elif mode == Mode.REMOVE:
self._handle_remove(operation)
elif mode == Mode.CROP_SQUARE:
file_path = self._handle_crop_square(operation)
elif mode == Mode.RESIZE:
file_path = self._handle_resize(operation)
elif mode == Mode.BINARYZATION:
file_path = self._handle_binaryzation(operation)
# 是否保留目标文件
if (output_type is not None) and (output_type != OutputType.NONE) and (file_path is not None):
builder = FileInfoBuilder(tag, file_path, output_type)
download_file = builder.build()
download_files.append(download_file)
file_paths.append(file_path)
except Exception as exception:
self._clear_files(file_paths)
current_app.logger.error('(%s.%s) exception: %s', self.__class__.__name__, sys._getframe().f_code.co_name,
str(exception))
current_app.logger.error(traceback.format_exc())
raise exception
return download_files
def upload(self, tag: str, file: FileStorage, operations: List[Operation], limit_size: int = 0) -> List[FileInfo]:
upload_files = []
file_paths = []
try:
# 建立上传文件夹
_upload_path = self.options.get_source_path()
upload_path = os.path.join(_upload_path, tag)
if not os.path.exists(upload_path):
os.makedirs(upload_path)
# 保存上传文件
original_file_path = os.path.join(upload_path, Operation.ORIGINAL_FILENAME)
file.save(original_file_path)
file_size = os.stat(original_file_path).st_size
if (limit_size > 0) and (file_size > limit_size):
raise Exception('upload file is too large. size: {}'.format(file_size))
# 按步骤处理文件
for operation in operations:
mode = operation.get_mode()
output_type = operation.get_output_type()
# 处理文件
file_path = None
if mode == Mode.NONE:
file_path = self._handle_none(operation)
elif mode == Mode.MOVE:
file_path = self._handle_move(operation)
elif mode == Mode.REMOVE:
self._handle_remove(operation)
elif mode == Mode.CROP_SQUARE:
file_path = self._handle_crop_square(operation)
elif mode == Mode.RESIZE:
file_path = self._handle_resize(operation)
elif mode == Mode.BINARYZATION:
file_path = self._handle_binaryzation(operation)
# 是否保留目标文件
if (output_type is not None) and (output_type != OutputType.NONE) and (file_path is not None):
builder = FileInfoBuilder(tag, file_path, output_type)
upload_file = builder.build()
upload_files.append(upload_file)
file_paths.append(file_path)
except Exception as exception:
self._clear_files(file_paths)
current_app.logger.error('(%s.%s) exception: %s', self.__class__.__name__, sys._getframe().f_code.co_name,
str(exception))
current_app.logger.error(traceback.format_exc())
raise exception
return upload_files
def archive(self, tag: str, operations: List[Operation]) -> List[FileInfo]:
archive_files = []
file_paths = []
try:
# 建立归档文件夹
archive_path = self.options.get_archive_path()
if not os.path.exists(archive_path):
os.makedirs(archive_path)
# 按步骤处理文件
for operation in operations:
mode = operation.get_mode()
output_type = operation.get_output_type()
# 处理文件
file_path = None
if mode == Mode.NONE:
file_path = self._handle_none(operation)
elif mode == Mode.MOVE:
file_path = self._handle_move(operation)
elif mode == Mode.REMOVE:
self._handle_remove(operation)
elif mode == Mode.CROP_SQUARE:
file_path = self._handle_crop_square(operation)
elif mode == Mode.RESIZE:
file_path = self._handle_resize(operation)
elif mode == Mode.BINARYZATION:
file_path = self._handle_binaryzation(operation)
# 是否保留目标文件
if (output_type is not None) and (output_type != OutputType.NONE) and (file_path is not None):
builder = FileInfoBuilder(tag, file_path, output_type)
archive_file = builder.build()
archive_files.append(archive_file)
file_paths.append(file_path)
except Exception as exception:
self._clear_files(file_paths)
current_app.logger.error('(%s.%s) exception: %s', self.__class__.__name__, sys._getframe().f_code.co_name,
str(exception))
current_app.logger.error(traceback.format_exc())
raise exception
return archive_files
def clear(self, tag: str):
# 获取源目录
source_path = self.options.get_source_path()
clear_path = os.path.join(source_path, tag)
# 删除源目录
if os.path.exists(clear_path):
shutil.rmtree(clear_path)
def _handle_none(self, operation: Operation) -> str:
src_file_path = operation.get_src_file_path()
if not os.path.exists(src_file_path):
raise Exception('src file not exists. src_file_path: {}'.format(src_file_path))
return src_file_path
def _handle_move(self, operation: Operation) -> str:
src_file_path = operation.get_src_file_path()
dest_file_path = operation.get_dest_file_path()
if not os.path.exists(src_file_path):
raise Exception('src file not exists. src_file_path: {}'.format(src_file_path))
move_handler = MoveHandler(src_file_path, dest_file_path)
move_handler.handle()
return dest_file_path
def _handle_remove(self, operation: Operation):
src_file_path = operation.get_src_file_path()
if not os.path.exists(src_file_path):
raise Exception('src file not exists. src_file_path: {}'.format(src_file_path))
remove_handler = RemoveHandler(src_file_path)
remove_handler.handle()
def _handle_crop_square(self, operation: Operation) -> str:
src_file_path = operation.get_src_file_path()
dest_file_path = operation.get_dest_file_path()
if not os.path.exists(src_file_path):
raise Exception('src file not exists. src_file_path: {}'.format(src_file_path))
crop_square_handler = CropSquareHandler(src_file_path, dest_file_path)
crop_square_handler.handle()
return dest_file_path
def _handle_resize(self, operation: Operation) -> str:
src_file_path = operation.get_src_file_path()
dest_file_path = operation.get_dest_file_path()
dest_width = operation.get_dest_width()
dest_height = operation.get_dest_height()
if not os.path.exists(src_file_path):
raise Exception('src file not exists. src_file_path: {}'.format(src_file_path))
resize_handler = ResizeHandler(src_file_path, dest_file_path, dest_width, dest_height)
resize_handler.handle()
return dest_file_path
def _handle_binaryzation(self, operation: Operation) -> str:
src_file_path = operation.get_src_file_path()
dest_file_path = operation.get_dest_file_path()
if not os.path.exists(src_file_path):
raise Exception('src file not exists. src_file_path: {}'.format(src_file_path))
binaryzation_handler = BinaryzationHandler(src_file_path, dest_file_path)
binaryzation_handler.handle()
return dest_file_path
def _clear_files(self, file_paths: List[str]):
for file_path in file_paths:
os.unlink(file_path) | PypiClean |
/hft_crypto_api-1.0.6.tar.gz/hft_crypto_api-1.0.6/example.py | from hftcryptoapi.bitmart import Bitmart
from hftcryptoapi.bitmart.data.constants import *
from datetime import datetime, timedelta
import time
if __name__ == '__main__':
api_key = ""
secret_key = ""
memo = ""
to_time = datetime.now()
from_time = to_time - timedelta(days=10)
symbol = "BTCUSDT"
symbol_spot = "BTC_USDT"
symbol_eth = "ETHUSDT"
client = Bitmart.BitmartClient(api_key, secret_key, memo)
# GENERAL PUBLIC
bt_status = client.get_service_status()
items = client.get_system_time()
currency_list = client.get_currency_list()
trading_pairs = client.get_list_of_trading_pairs()
symbols_details = client.get_spot_symbols_details()
contracts_details = client.get_futures_contracts_details()
symbol_details = client.get_spot_ticker_details(symbol_spot)
kline_steps = client.get_kline_steps() # Not used
print(client.get_symbol_kline(symbol="BTC_USDT", tf=TimeFrame.tf_1h, market=Market.SPOT,
from_time=from_time, to_time=to_time))
print(client.get_symbol_kline(symbol=symbol, tf=TimeFrame.tf_1h, market=Market.FUTURES,
from_time=from_time, to_time=to_time))
bt_trades = client.get_symbol_recent_trades(symbol_spot, N=100)
depth_futures = client.get_symbol_depth(symbol=symbol_spot, precision=6, size=50, market=Market.SPOT)
depth_spot = client.get_symbol_depth(symbol=symbol, precision=6, size=50, market=Market.FUTURES)
futures_open_interest = client.get_futures_open_interest(symbol)
funding_rate = client.get_futures_funding_rate(symbol)
[print(b) for b in client.get_account_balance(market=Market.FUTURES).items]
[print(b) for b in client.get_account_balance(market=Market.SPOT).items]
[print(b) for b in client.get_account_balance(market=Market.SPOT_MARGIN).items]
fee_rate = client.get_spot_user_fee_rate()
bt_trade_fee = client.get_spot_trade_fee_rate(symbol_spot)
# ------------- WEB SOCKETS
client.subscribe_private(Market.FUTURES, [BtFuturesTPrivatePositionChannel])
client.subscribe_private(Market.FUTURES, [BtFuturesTPrivateAssetChannel], ['ETH', 'USDT'])
client.subscribe_public(Market.FUTURES, [BtFuturesTickerChannel])
client.subscribe_public(Market.FUTURES, [BtFuturesSocketKlineChannels.K_LINE_CHANNEL_1HOUR,
BtFuturesSocketDepthChannels.DEPTH_CHANNEL_5LEVEL], [symbol])
client.start_websockets(Market.FUTURES, on_message=lambda message: print(f' {message}'))
client.subscribe_public(Market.SPOT, [BtSpotSocketKlineChannels.K_LINE_CHANNEL_1HOUR,
BtSpotSocketDepthChannels.DEPTH_CHANNEL_5LEVEL,
BtSpotTradeChannel,
BtSpotTickerChannel],
symbols=[symbol_spot])
client.subscribe_private(Market.SPOT, [BtSpotOrderChannel], symbols=[symbol_spot])
client.start_websockets(Market.SPOT, on_message=lambda message: print(f' {message}'))
client.wait_for_socket_connection(market=Market.FUTURES)
client.wait_for_socket_connection(market=Market.SPOT, is_public=False)
input("Press any key")
client.unsubscribe_private(Market.FUTURES, [BtFuturesTPrivatePositionChannel])
client.unsubscribe_private(Market.FUTURES, [BtFuturesSocketDepthChannels], [symbol])
client.stop_websockets(Market.FUTURES)
client.stop_websockets(Market.SPOT)
# ------------- ORDER
order = client.submit_order(market=Market.SPOT_MARGIN, symbol="BTC_USDT", side=SpotSide.BUY, size=0.005, price=1000)
order = client.submit_order(market=Market.SPOT_MARGIN, order_type=OrderType.MARKET,
symbol="BTC_USDT", side=SpotSide.BUY, size=6, price=1000)
order = client.submit_order(market=Market.SPOT_MARGIN, order_type=OrderType.MARKET,
symbol="BTC_USDT", side=SpotSide.SELL, size=6, price=1000)
order = client.update_order_details(order)
client.cancel_order(order)
order = client.submit_order(market=Market.FUTURES, symbol="ETHUSDT", side=FuturesSide.BUY_OPEN_LONG,
size=1, price=70, open_type=OrderOpenType.CROSS)
client.update_order_details(order)
client.cancel_order(order)
order = client.update_order_details(order)
print(client.submit_order(market=Market.FUTURES, symbol=symbol_eth, order_type=OrderType.MARKET,
side=FuturesSide.SELL_OPEN_SHORT,
size=1, open_type=OrderOpenType.CROSS))
positions = client.get_futures_position_details(symbol_eth)
amount = [p for p in positions if p.symbol == "ETHUSDT" and p.current_amount != 0][0].current_amount
print(client.close_futures_position(symbol=symbol_eth, position_side=Position.SHORT, open_type=OrderOpenType.CROSS))
print(client.submit_order(market=Market.SPOT, symbol=symbol_spot, order_type=OrderType.MARKET,
side=SpotSide.BUY,
size=10))
print(client.submit_order(market=Market.SPOT, symbol=symbol_spot, order_type=OrderType.MARKET,
side=SpotSide.SELL,
size=0.00050000))
# ------------- MARGIN
rate = client.spot_margin_borrowing_rate(symbol_spot)
b_records = client.spot_margin_get_borrow_record(symbol_spot)
r_records = client.spot_margin_get_repay_record(symbol_spot)
client.spot_margin_borrow(symbol_spot, "BTC", 0.005)
client.spot_margin_repay(symbol_spot, "BTC", 0.005) | PypiClean |
/GSEIMIITB-1.1.tar.gz/GSEIMIITB-1.1/src/grc/gui/Bars.py | from __future__ import absolute_import
import logging
from gi.repository import Gtk, GObject, Gio, GLib
from . import Actions
log = logging.getLogger(__name__)
'''
# Menu/Toolbar Lists:
#
# Sub items can be 1 of 3 types
# - List Creates a section within the current menu
# - Tuple Creates a submenu using a string or action as the parent. The child
# can be another menu list or an identifier used to call a helper function.
# - Action Appends a new menu item to the current menu
#
LIST_NAME = [
[Action1, Action2], # New section
(Action3, [Action4, Action5]), # Submenu with action as parent
("Label", [Action6, Action7]), # Submenu with string as parent
("Label2", "helper") # Submenu with helper function. Calls 'create_helper()'
]
'''
# The list of actions for the toolbar.
TOOLBAR_LIST = [
[(Actions.FLOW_GRAPH_NEW, 'flow_graph_new_type'),
(Actions.FLOW_GRAPH_OPEN, 'flow_graph_recent'),
Actions.FLOW_GRAPH_SAVE, Actions.FLOW_GRAPH_CLOSE],
[Actions.FLOW_GRAPH_SCREEN_CAPTURE],
[Actions.BLOCK_CUT, Actions.BLOCK_COPY, Actions.BLOCK_PASTE, Actions.ELEMENT_DELETE],
[Actions.FLOW_GRAPH_UNDO, Actions.FLOW_GRAPH_REDO],
[Actions.FLOW_GRAPH_GEN, Actions.FLOW_GRAPH_EXEC, Actions.FLOW_GRAPH_PLOT],
[Actions.BLOCK_ROTATE_CCW, Actions.BLOCK_ROTATE_CW],
[Actions.FIND_BLOCKS, Actions.OPEN_HIER],
]
# The list of actions and categories for the menu bar.
MENU_BAR_LIST = [
('_File', [
[(Actions.FLOW_GRAPH_NEW, 'flow_graph_new_type'), Actions.FLOW_GRAPH_DUPLICATE,
Actions.FLOW_GRAPH_OPEN, (Actions.FLOW_GRAPH_OPEN_RECENT, 'flow_graph_recent')],
[Actions.FLOW_GRAPH_SAVE, Actions.FLOW_GRAPH_SAVE_AS, Actions.FLOW_GRAPH_SAVE_COPY],
[Actions.FLOW_GRAPH_SCREEN_CAPTURE],
[Actions.FLOW_GRAPH_CLOSE, Actions.APPLICATION_QUIT]
]),
('_Edit', [
[Actions.FLOW_GRAPH_UNDO, Actions.FLOW_GRAPH_REDO],
[Actions.BLOCK_CUT, Actions.BLOCK_COPY, Actions.BLOCK_PASTE, Actions.ELEMENT_DELETE,
Actions.SELECT_ALL],
[Actions.BLOCK_ROTATE_CCW, Actions.BLOCK_ROTATE_CW, ('_Align', Actions.BLOCK_ALIGNMENTS)],
[Actions.BLOCK_PARAM_MODIFY]
]),
('Gparms', [
[Actions.GPARM_ADD, Actions.GPARM_DEL, Actions.GPARM_EDIT],
]),
('Outvars', [
[Actions.OUTVAR_DEL, Actions.OUTVAR_EDIT],
]),
('SolveBlocks', [
[Actions.SOLVEBLOCK_ADD, Actions.SOLVEBLOCK_DEL, Actions.SOLVEBLOCK_EDIT,
Actions.SOLVEBLOCK_RESET, Actions.SOLVEBLOCK_DISP],
]),
('OutputBlocks', [
[Actions.OUTPUTBLOCK_ADD, Actions.OUTPUTBLOCK_DEL, Actions.OUTPUTBLOCK_EDIT],
]),
('_View', [
[Actions.TOGGLE_BLOCKS_WINDOW],
[Actions.TOGGLE_CONSOLE_WINDOW, Actions.TOGGLE_SCROLL_LOCK, Actions.SAVE_CONSOLE, Actions.CLEAR_CONSOLE],
[Actions.TOGGLE_AUTO_HIDE_PORT_LABELS, Actions.TOGGLE_SNAP_TO_GRID,],
[Actions.FIND_BLOCKS],
]),
('_Run', [
Actions.FLOW_GRAPH_GEN, Actions.FLOW_GRAPH_EXEC, Actions.FLOW_GRAPH_PLOT
]),
('_Help', [
[Actions.HELP_WINDOW_DISPLAY,
Actions.KEYBOARD_SHORTCUTS_WINDOW_DISPLAY,
Actions.SHOWDOC_WINDOW_DISPLAY],
[Actions.ABOUT_WINDOW_DISPLAY]
])
]
# The list of actions for the context menu.
CONTEXT_MENU_LIST = [
[Actions.BLOCK_CUT, Actions.BLOCK_COPY, Actions.BLOCK_PASTE, Actions.ELEMENT_DELETE],
[Actions.BLOCK_ROTATE_CCW, Actions.BLOCK_ROTATE_CW,
Actions.OUTVAR_ADD, Actions.ELEMENT_DISPLAY, Actions.DOC_DISPLAY,
Actions.SHOW_PARAM, Actions.PASTE_SELECTED,
Actions.OPEN_HIER,
],
[Actions.BLOCK_PARAM_MODIFY],
]
class SubMenuHelper(object):
''' Generates custom submenus for the main menu or toolbar. '''
def __init__(self):
self.submenus = {}
def build_submenu(self, name, obj, set_func):
# Get the correct helper function
create_func = getattr(self, "create_{}".format(name))
# Save the helper functions for rebuilding the menu later
self.submenus[name] = (create_func, obj, set_func)
# Actually build the menu
set_func(obj, create_func())
def refresh_submenus(self):
for name in self.submenus:
create_func, obj, set_func = self.submenus[name]
set_func(obj, create_func())
def create_flow_graph_new_type(self):
""" Different flowgraph types """
menu = Gio.Menu()
platform = Gtk.Application.get_default().platform
generate_modes = platform.get_generate_options()
for key, name, default in generate_modes:
target = "app.flowgraph.new_type::{}".format(key)
menu.append(name, target)
return menu
def create_flow_graph_recent(self):
""" Recent flow graphs """
config = Gtk.Application.get_default().config
recent_files = config.get_recent_files()
menu = Gio.Menu()
if len(recent_files) > 0:
files = Gio.Menu()
for i, file_name in enumerate(recent_files):
target = "app.flowgraph.open_recent::{}".format(file_name)
files.append(file_name.replace("_", "__"), target)
menu.append_section(None, files)
#clear = Gio.Menu()
#clear.append("Clear recent files", "app.flowgraph.clear_recent")
#menu.append_section(None, clear)
else:
# Show an empty menu
menuitem = Gio.MenuItem.new("No items found", "app.none")
menu.append_item(menuitem)
return menu
class MenuHelper(SubMenuHelper):
"""
Recursively builds a menu from a given list of actions.
Args:
- actions: List of actions to build the menu
- menu: Current menu being built
Notes:
- Tuple: Create a new submenu from the parent (1st) and child (2nd) elements
- Action: Append to current menu
- List: Start a new section
"""
def __init__(self):
SubMenuHelper.__init__(self)
def build_menu(self, actions, menu):
for item in actions:
if isinstance(item, tuple):
# Create a new submenu
parent, child = (item[0], item[1])
# Create the parent
label, target = (parent, None)
if isinstance(parent, Actions.Action):
label = parent.label
target = "{}.{}".format(parent.prefix, parent.name)
menuitem = Gio.MenuItem.new(label, None)
if hasattr(parent, "icon_name"):
menuitem.set_icon(Gio.Icon.new_for_string(parent.icon_name))
# Create the new submenu
if isinstance(child, list):
submenu = Gio.Menu()
self.build_menu(child, submenu)
menuitem.set_submenu(submenu)
elif isinstance(child, str):
# Child is the name of the submenu to create
def set_func(obj, menu):
obj.set_submenu(menu)
self.build_submenu(child, menuitem, set_func)
menu.append_item(menuitem)
elif isinstance(item, list):
# Create a new section
section = Gio.Menu()
self.build_menu(item, section)
menu.append_section(None, section)
elif isinstance(item, Actions.Action):
# Append a new menuitem
target = "{}.{}".format(item.prefix, item.name)
menuitem = Gio.MenuItem.new(item.label, target)
if item.icon_name:
menuitem.set_icon(Gio.Icon.new_for_string(item.icon_name))
menu.append_item(menuitem)
class ToolbarHelper(SubMenuHelper):
"""
Builds a toolbar from a given list of actions.
Args:
- actions: List of actions to build the menu
- item: Current menu being built
Notes:
- Tuple: Create a new submenu from the parent (1st) and child (2nd) elements
- Action: Append to current menu
- List: Start a new section
"""
def __init__(self):
SubMenuHelper.__init__(self)
def build_toolbar(self, actions, current):
for item in actions:
if isinstance(item, list):
# Toolbar's don't have sections like menus, so call this function
# recursively with the "section" and just append a separator.
self.build_toolbar(item, self)
current.insert(Gtk.SeparatorToolItem.new(), -1)
elif isinstance(item, tuple):
parent, child = (item[0], item[1])
# Create an item with a submenu
# Generate the submenu and add to the item.
# Add the item to the toolbar
button = Gtk.MenuToolButton.new()
# The tuple should be made up of an Action and something.
button.set_label(parent.label)
button.set_tooltip_text(parent.tooltip)
button.set_icon_name(parent.icon_name)
target = "{}.{}".format(parent.prefix, parent.name)
button.set_action_name(target)
def set_func(obj, menu):
obj.set_menu(Gtk.Menu.new_from_model(menu))
self.build_submenu(child, button, set_func)
current.insert(button, -1)
elif isinstance(item, Actions.Action):
button = Gtk.ToolButton.new()
button.set_label(item.label)
button.set_tooltip_text(item.tooltip)
button.set_icon_name(item.icon_name)
target = "{}.{}".format(item.prefix, item.name)
button.set_action_name(target)
current.insert(button, -1)
class Menu(Gio.Menu, MenuHelper):
""" Main Menu """
def __init__(self):
GObject.GObject.__init__(self)
MenuHelper.__init__(self)
log.debug("Building the main menu")
self.build_menu(MENU_BAR_LIST, self)
class ContextMenu(Gio.Menu, MenuHelper):
""" Context menu for the drawing area """
def __init__(self):
GObject.GObject.__init__(self)
log.debug("Building the context menu")
self.build_menu(CONTEXT_MENU_LIST, self)
class Toolbar(Gtk.Toolbar, ToolbarHelper):
""" The gtk toolbar with actions added from the toolbar list. """
def __init__(self):
"""
Parse the list of action names in the toolbar list.
Look up the action for each name in the action list and add it to the
toolbar.
"""
GObject.GObject.__init__(self)
ToolbarHelper.__init__(self)
self.set_style(Gtk.ToolbarStyle.ICONS)
#self.get_style_context().add_class(Gtk.STYLE_CLASS_PRIMARY_TOOLBAR)
#SubMenuCreator.__init__(self)
self.build_toolbar(TOOLBAR_LIST, self) | PypiClean |
/Auto_FOX-1.0.0b1-py3-none-any.whl/FOX/armc/guess.py | from __future__ import annotations
from types import MappingProxyType
from itertools import chain
from typing import (
Iterable,
Mapping,
MutableMapping,
Tuple,
Dict,
Set,
Container,
TYPE_CHECKING
)
import numpy as np
import pandas as pd
from scm.plams import Units
from nanoutils import Literal, PathType
from ..io import PSFContainer, PRMContainer
from ..utils import prepend_exception
from ..ff import UFF_DF, SIGMA_DF, LJDataFrame, estimate_lj
if TYPE_CHECKING:
from FOX import MultiMolecule
__all__ = ['guess_param']
ParamKind = Literal['epsilon', 'sigma']
ModeKind = Literal[
'ionic_radius',
'ion_radius',
'ionic_radii',
'ion_radii',
'rdf',
'uff',
'crystal_radius',
'crystal_radii'
]
#: A :class:`frozenset` with alias for the :code:`"ion_radius"` guessing mode.
ION_SET = frozenset({
'ionic_radius',
'ion_radius',
'ionic_radii',
'ion_radii'
})
#: A :class:`frozenset` with alias for the :code:`"crystal_radius"` guessing mode.
CRYSTAL_SET = frozenset({
'crystal_radius',
'crystal_radii'
})
#: A :class:`frozenset` containing all allowed values for the ``mode`` parameter.
MODE_SET = ION_SET | CRYSTAL_SET | {'rdf', 'uff'}
#: A :class:`~collections.abc.Mapping` containing the default unit for each ``param`` value.
DEFAULT_UNIT: MappingProxyType[ParamKind, str] = MappingProxyType({
'epsilon': 'kcal/mol',
'sigma': 'angstrom',
})
def guess_param(
mol_list: Iterable[MultiMolecule],
param: ParamKind,
mode: ModeKind = 'rdf',
*,
cp2k_settings: None | MutableMapping = None,
prm: None | PathType | PRMContainer = None,
psf_list: None | Iterable[PathType | PSFContainer] = None,
unit: None | str = None,
param_mapping: None | Mapping[tuple[str, str], float] = None,
) -> Dict[Tuple[str, str], float]:
"""Estimate all Lennard-Jones missing forcefield parameters.
Examples
--------
.. code:: python
>>> from FOX import MultiMolecule
>>> from FOX.armc import guess_param
>>> mol_list = [MultiMolecule(...), ...]
>>> prm = str(...)
>>> psf_list = [str(...), ...]
>>> epsilon_dict = guess_ParamKind(mol_list, 'epsilon', prm=prm, psf_list=psf_list)
>>> sigma_dict = guess_ParamKind(mol_list, 'sigma', prm=prm, psf_list=psf_list)
ParamKindeters
----------
mol_list : :class:`Iterable[FOX.MultiMolecule] <collections.abc.Iterable>`
An iterable of molecules.
param : :class:`str`
The to-be estimated parameter.
Accepted values are ``"epsilon"`` and ``"sigma"``.
mode : :class:`str`
The procedure for estimating the parameters.
Accepted values are ``"rdf"``, ``"uff"``, ``"crystal_radius"`` and ``"ion_radius"``.
cp2k_settings : :class:`~collections.abc.MutableMapping`, optional
The CP2K input settings.
prm : :term:`python:path-like` or :class:`~FOX.PRMContainer`, optional
An optional .prm file.
psf_list : :class:`Iterable[str|FOX.PSFContainer] <collections.abc.Iterable>`, optional
An optional list of .psf files.
unit : :class:`str`, optional
The unit of the to-be returned quantity.
If ``None``, default to kcal/mol for :code:`param="epsilon"`
and angstrom for :code:`param="sigma"`.
Returns
-------
:class:`dict[tuple[str, str], float] <dict>`
A dictionary with atom-pairs as keys and the estimated parameters as values.
""" # noqa: E501
# Validate param and mode
param = _validate_arg(param, name='param', ref={'epsilon', 'sigma'}) # type: ignore
mode = _validate_arg(mode, name='mode', ref=MODE_SET) # type: ignore
if unit is not None:
convert_unit = Units.conversion_ratio(DEFAULT_UNIT[param], unit)
else:
convert_unit = 1
# Construct a set with all valid atoms types
mol_list = [mol.copy() for mol in mol_list]
if psf_list is not None:
atoms: Set[str] = set()
for mol, p in zip(mol_list, psf_list):
psf: PSFContainer = PSFContainer.read(p) if not isinstance(p, PSFContainer) else p
mol.atoms_alias = psf.to_atom_alias_dict()
atoms |= set(psf.atom_type)
else:
atoms = set(chain.from_iterable(mol.atoms.keys() for mol in mol_list))
# Construct a DataFrame and update it with all available parameters
df = LJDataFrame(np.nan, index=atoms)
if cp2k_settings is not None:
df.overlay_cp2k_settings(cp2k_settings)
if param_mapping is not None:
for k, v in param_mapping.items():
df.loc[k, param] = v / convert_unit
if prm is not None:
prm_: PRMContainer = prm if isinstance(prm, PRMContainer) else PRMContainer.read(prm)
df.overlay_prm(prm_)
prm_dict = _nb_from_prm(prm_, param=param)
else:
prm_dict = {}
# Extract the relevant parameter Series
_series = df[param]
series = _series[_series.isnull()]
# Construct the to-be returned series and set them to the correct units
ret = _guess_param(series, mode, mol_list=mol_list, prm_dict=prm_dict)
ret *= convert_unit
return ret
def _validate_arg(value: str, name: str, ref: Container[str]) -> str:
"""Check if **value** is in **ref**.
Returns
-------
:class:`str`
The lowered version of **value**.
"""
try:
ret = value.lower()
assert ret in ref
except (TypeError, AttributeError) as ex:
raise TypeError(f"Invalid {name!r} type: {value.__class__.__name__!r}") from ex
except AssertionError as ex:
raise ValueError(f"Invalid {name!r} value: {value!r:.100}") from ex
return ret
def _guess_param(
series: pd.Series,
mode: ModeKind,
mol_list: Iterable[MultiMolecule],
prm_dict: MutableMapping[str, float],
unit: None | str = None,
) -> pd.Series:
"""Perform the parameter guessing as specified by **mode**.
Returns
-------
:class:`pd.Series <pandas.Series>`
A dictionary with atom-pairs as keys (2-tuples) and the estimated parameters as values.
"""
if mode == 'rdf':
rdf(series, mol_list)
elif mode == 'uff':
uff(series, prm_dict, mol_list)
elif mode in ION_SET:
ion_radius(series, prm_dict, mol_list)
elif mode in CRYSTAL_SET:
crystal_radius(series, prm_dict, mol_list)
return series
def uff(
series: pd.Series,
prm_mapping: MutableMapping[str, float],
mol_list: Iterable[MultiMolecule],
) -> None:
"""Guess parameters in **df** using UFF parameters."""
uff_loc = UFF_DF[series.name].loc
iterator = ((at1, at2) for mol in mol_list for at1, (at2, _) in mol.atoms_alias.items())
for at1, at2 in iterator:
if at1 not in prm_mapping:
try:
prm_mapping[at1] = uff_loc[at2]
except KeyError:
pass
_set_radii(series, prm_mapping, uff_loc)
def ion_radius(
series: pd.Series,
prm_mapping: MutableMapping[str, float],
mol_list: Iterable[MultiMolecule],
) -> None:
"""Guess parameters in **df** using ionic radii."""
if series.name == 'epsilon':
raise NotImplementedError("'epsilon' guessing is not supported "
"with `guess='ion_radius'`")
ion_loc = SIGMA_DF['ionic_sigma'].loc
iterator = ((at1, at2) for mol in mol_list for at1, (at2, _) in mol.atoms_alias.items())
for at1, at2 in iterator:
if at1 not in prm_mapping:
try:
prm_mapping[at1] = ion_loc[at2]
except KeyError:
pass
_set_radii(series, prm_mapping, ion_loc)
def crystal_radius(
series: pd.Series,
prm_mapping: MutableMapping[str, float],
mol_list: Iterable[MultiMolecule],
) -> None:
"""Guess parameters in **df** using crystal radii."""
if series.name == 'epsilon':
raise NotImplementedError("'epsilon' guessing is not supported "
"with `guess='crystal_radius'`")
ion_loc = SIGMA_DF['crystal_sigma'].loc
iterator = ((at1, at2) for mol in mol_list for at1, (at2, _) in mol.atoms_alias.items())
for at1, at2 in iterator:
if at1 not in prm_mapping:
try:
prm_mapping[at1] = ion_loc[at2]
except KeyError:
pass
_set_radii(series, prm_mapping, ion_loc)
def rdf(series: pd.Series, mol_list: Iterable[MultiMolecule]) -> None:
"""Guess parameters in **df** using the Boltzmann-inverted radial distribution function."""
is_null = series.isnull()
nonzero = series[~is_null].index
atom_subset = set(chain.from_iterable(series[is_null].index))
# Construct the RDF and guess the parameters
rdf_gen = (mol.init_rdf(atom_subset=atom_subset) for mol in mol_list)
for rdf in rdf_gen:
guess = estimate_lj(rdf)
guess.index = pd.MultiIndex.from_tuples(sorted(i.split()) for i in guess.index)
guess[guess.index.intersection(nonzero)] = np.nan
series.update(guess[series.name])
def _geometric_mean(a, b):
return np.abs(a * b)**0.5
def _arithmetic_mean(a, b):
return (a + b) / 2
@prepend_exception('No reference parameters available for atom type: ', exception=KeyError)
def _set_radii(
series: pd.Series,
prm_mapping: Mapping[str, float],
ref_mapping: Mapping[str, float],
) -> None:
if series.name == 'epsilon':
func = _geometric_mean
elif series.name == 'sigma':
func = _arithmetic_mean
else:
raise ValueError(f"series.name: {series.name!r:.100}")
for i, j in series.index: # pd.MultiIndex
if i in prm_mapping:
value_i = prm_mapping[i]
else:
value_i = ref_mapping[i]
if j in prm_mapping:
value_j = prm_mapping[j]
else:
value_j = ref_mapping[j]
series[i, j] = func(value_i, value_j)
def _nb_from_prm(prm: PRMContainer, param: ParamKind) -> Dict[str, float]:
r"""Extract a dict from **prm** with all :math:`\varepsilon` or :math:`\sigma` values."""
if prm.nonbonded is None:
return {}
nonbonded = prm.nonbonded[[2, 3]].copy()
nonbonded.columns = ['epsilon', 'sigma'] # kcal/mol and Angstrom
nonbonded['sigma'] *= 2 / 2**(1/6) # Conversion factor between (R / 2) and sigma
return nonbonded[param].to_dict() | PypiClean |
/LibJciHitachi-0.5.2-py3-none-any.whl/JciHitachi/aws_connection.py | import datetime
import json
import logging
import threading
import time
import uuid
from dataclasses import dataclass, field
from typing import Dict
import awscrt
import httpx
from awsiot import iotshadow, mqtt_connection_builder
from .model import JciHitachiAWSStatus, JciHitachiAWSStatusSupport
AWS_COGNITO_REGION = "ap-northeast-1"
AWS_COGNITO_IDP_ENDPOINT = f"cognito-idp.{AWS_COGNITO_REGION}.amazonaws.com/"
AWS_COGNITO_ENDPOINT = f"cognito-identity.{AWS_COGNITO_REGION}.amazonaws.com/"
AWS_COGNITO_CLIENT_ID = "7kfnjsb66ei1qt5s5gjv6j1lp6"
AWS_COGNITO_USERPOOL_ID = "ap-northeast-1_aTZeaievK"
#AMAZON_ROOT_CERT = os.path.join(os.path.dirname(os.path.abspath(__file__)), './cert/AmazonRootCA1.pem')
AWS_IOT_ENDPOINT = "https://iot-api.jci-hitachi-smarthome.com"
AWS_MQTT_ENDPOINT = "a8kcu267h96in-ats.iot.ap-northeast-1.amazonaws.com"
_LOGGER = logging.getLogger(__name__)
@dataclass
class AWSTokens:
access_token: str
id_token: str
refresh_token: str
expiration: float
@dataclass
class AWSIdentity:
identity_id: str
user_name: str
user_attributes: dict
@dataclass
class JciHitachiMqttEvents:
device_status: Dict[str, JciHitachiAWSStatus] = field(default_factory=dict)
device_support: Dict[str, JciHitachiAWSStatusSupport] = field(default_factory=dict)
device_control: Dict[str, dict] = field(default_factory=dict)
device_shadow: Dict[str, dict] = field(default_factory=dict)
mqtt_error: str = field(default_factory=str)
device_status_event: threading.Event = field(default_factory=threading.Event)
device_support_event: threading.Event = field(default_factory=threading.Event)
device_control_event: threading.Event = field(default_factory=threading.Event)
device_shadow_event: threading.Event = field(default_factory=threading.Event)
mqtt_error_event: threading.Event = field(default_factory=threading.Event)
class JciHitachiAWSCognitoConnection:
"""Connecting to Jci-Hitachi AWS Cognito API.
Parameters
----------
email : str
User email.
password : str
User password.
aws_tokens : AWSTokens, optional
If aws_tokens is given, it is used by request;
otherwise, a login procedure is performed to obtain new aws_tokens,
by default None.
proxy : str, optional
Proxy setting. Format:"IP:port", by default None.
print_response : bool, optional
If set, all responses of httpx will be printed, by default False.
"""
def __init__(self, email, password, aws_tokens=None, proxy=None, print_response=False):
self._login_response = None
self._email = email
self._password = password
self._print_response = print_response
self._proxies = {'http': proxy, 'https': proxy} if proxy else None
if aws_tokens:
self._aws_tokens = aws_tokens
else:
conn_status, self._aws_tokens = self.login()
if conn_status != "OK":
raise RuntimeError(f"An error occurred when signing into AWS Cognito Service: {conn_status}")
def _generate_headers(self, target):
normal_headers = {
"X-Amz-Target": target,
"User-Agent": "Dalvik/2.1.0",
"content-type": "application/x-amz-json-1.1",
"Accept" : "application/json",
}
return normal_headers
def _handle_response(self, response):
response_json = response.json()
if response.status_code == httpx.codes.ok:
return "OK", response_json
else:
return f"{response_json['__type']} {response_json['message']}", response_json
@property
def aws_tokens(self):
return self._aws_tokens
def login(self, use_refresh_token=False):
"""Login API.
Parameters
----------
use_refresh_token : bool, optional
Whether or not to use AWSTokens.refresh_token to login.
If AWSTokens is not provided, fallback to email and password, by default False
Returns
-------
(str, AWSTokens)
(status, aws tokens).
"""
# https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_InitiateAuth.html
if use_refresh_token and self._aws_tokens != None:
login_json_data = {
"AuthFlow": 'REFRESH_TOKEN_AUTH',
"AuthParameters":{
'REFRESH_TOKEN': self._aws_tokens.refresh_token,
},
"ClientId": AWS_COGNITO_CLIENT_ID,
}
else:
use_refresh_token = False
login_json_data = {
"AuthFlow": 'USER_PASSWORD_AUTH',
"AuthParameters": {
'USERNAME': self._email,
'PASSWORD': self._password,
},
"ClientId": AWS_COGNITO_CLIENT_ID,
}
login_headers = self._generate_headers("AWSCognitoIdentityProviderService.InitiateAuth")
login_req = httpx.post("{}".format(f"https://{AWS_COGNITO_IDP_ENDPOINT}"),
json=login_json_data,
headers=login_headers,
proxies=self._proxies,
)
if self._print_response:
self.print_response(login_req)
status, response = self._handle_response(login_req)
aws_tokens = None
if status == "OK":
auth_result = response["AuthenticationResult"]
aws_tokens = AWSTokens(
access_token = auth_result['AccessToken'],
id_token = auth_result['IdToken'],
refresh_token = self._aws_tokens.refresh_token if use_refresh_token else auth_result['RefreshToken'],
expiration = time.time() + auth_result['ExpiresIn'],
)
return status, aws_tokens
def get_data(self):
raise NotImplementedError
def print_response(self, response):
print('===================================================')
print(self.__class__.__name__, 'Response:')
print('headers:', response.headers)
print('status_code:', response.status_code)
print('text:', json.dumps(response.json(), indent=True))
print('===================================================')
class ChangePassword(JciHitachiAWSCognitoConnection):
"""API internal endpoint.
https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_ChangePassword.html
Parameters
----------
email : str
User email.
password : str
User password.
"""
def __init__(self, email, password, **kwargs):
super().__init__(email, password, **kwargs)
def get_data(self, new_password):
json_data = {
"AccessToken": self._aws_tokens.access_token,
"PreviousPassword": self._password,
"ProposedPassword": new_password,
}
headers = self._generate_headers("AWSCognitoIdentityProviderService.ChangePassword")
req = httpx.post("{}".format(f"https://{AWS_COGNITO_IDP_ENDPOINT}"),
json=json_data,
headers=headers,
proxies=self._proxies,
)
if self._print_response:
self.print_response(req)
status, response = self._handle_response(req)
return status, None
class GetUser(JciHitachiAWSCognitoConnection):
"""API internal endpoint.
https://docs.aws.amazon.com/cognito-user-identity-pools/latest/APIReference/API_GetUser.html
Parameters
----------
email : str
User email.
password : str
User password.
"""
def __init__(self, email, password, **kwargs):
super().__init__(email, password, **kwargs)
def get_data(self):
json_data = {
"AccessToken": self._aws_tokens.access_token,
}
headers = self._generate_headers("AWSCognitoIdentityProviderService.GetUser")
req = httpx.post("{}".format(f"https://{AWS_COGNITO_IDP_ENDPOINT}"),
json=json_data,
headers=headers,
proxies=self._proxies,
)
if self._print_response:
self.print_response(req)
status, response = self._handle_response(req)
aws_identity = None
if status == "OK":
user_attributes = {attr["Name"]: attr["Value"] for attr in response["UserAttributes"]}
aws_identity = AWSIdentity(
identity_id = user_attributes["custom:cognito_identity_id"],
user_name = response["Username"],
user_attributes = user_attributes,
)
return status, aws_identity
class GetCredentials(JciHitachiAWSCognitoConnection):
"""API internal endpoint.
Parameters
----------
email : str
User email.
password : str
User password.
"""
def __init__(self, email, password, **kwargs):
super().__init__(email, password, **kwargs)
def get_data(self, aws_identity):
json_data = {
"IdentityId": aws_identity.identity_id,
"Logins": {
f"{AWS_COGNITO_IDP_ENDPOINT}{AWS_COGNITO_USERPOOL_ID}": self._aws_tokens.id_token,
}
}
headers = self._generate_headers("AWSCognitoIdentityService.GetCredentialsForIdentity")
req = httpx.post("{}".format(f"https://{AWS_COGNITO_ENDPOINT}"),
json=json_data,
headers=headers,
proxies=self._proxies,
)
if self._print_response:
self.print_response(req)
status, response = self._handle_response(req)
aws_credentials = None
if status == "OK":
aws_credentials = awscrt.auth.AwsCredentials(
access_key_id = response["Credentials"]["AccessKeyId"],
secret_access_key = response["Credentials"]["SecretKey"],
session_token = response["Credentials"]["SessionToken"],
expiration = datetime.datetime.fromtimestamp(response["Credentials"]["Expiration"]),
)
return status, aws_credentials
class JciHitachiAWSIoTConnection:
"""Connecting to Jci-Hitachi AWS IoT API.
Parameters
----------
aws_tokens : AWSTokens
AWS tokens.
proxy : str, optional
Proxy setting. Format:"IP:port", by default None.
print_response : bool, optional
If set, all responses of httpx will be printed, by default False.
"""
def __init__(self, aws_tokens, proxy=None, print_response=False):
self._aws_tokens = aws_tokens
self._print_response = print_response
self._proxies = {'http': proxy, 'https': proxy} if proxy else None
def _generate_normal_headers(self):
normal_headers = {
"authorization": f"Bearer {self._aws_tokens.id_token}",
"accesstoken": f"Bearer {self._aws_tokens.access_token}",
"User-Agent": "Dalvik/2.1.0",
"content-type" : "application/json",
"Accept" : "application/json",
}
return normal_headers
def _handle_response(self, response):
response_json = response.json()
if response.status_code == httpx.codes.ok:
code = response_json["status"]["code"]
if code == 0:
return code, "OK", response_json
elif code == 6:
return code, "Invalid email or password", response_json
elif code == 12:
return code, "Invalid session token", response_json
else:
return code, "Unknown error", response_json
else:
return response.status_code, f"HTTP exception {response.status_code}", response_json
def _send(self, api_name, json=None):
req = httpx.post(
"{}{}".format(AWS_IOT_ENDPOINT, api_name),
headers=self._generate_normal_headers(),
json=json,
proxies=self._proxies,
)
if self._print_response:
self.print_response(req)
code, message, response_json = self._handle_response(req)
return message, response_json
def get_data(self):
raise NotImplementedError
def print_response(self, response):
print('===================================================')
print(self.__class__.__name__, 'Response:')
print('headers:', response.headers)
print('status_code:', response.status_code)
print('text:', json.dumps(response.json(), indent=True))
print('===================================================')
class GetAllDevice(JciHitachiAWSIoTConnection):
"""API internal endpoint.
Parameters
----------
aws_tokens : AWSTokens
AWS tokens.
"""
def __init__(self, aws_tokens, **kwargs):
super().__init__(aws_tokens, **kwargs)
def get_data(self):
return self._send("/GetAllDevice")
class GetAllGroup(JciHitachiAWSIoTConnection):
"""API internal endpoint.
Parameters
----------
aws_tokens : AWSTokens
AWS tokens.
"""
def __init__(self, aws_tokens, **kwargs):
super().__init__(aws_tokens, **kwargs)
def get_data(self):
return self._send("/GetAllGroup")
class GetAllRegion(JciHitachiAWSIoTConnection):
"""API internal endpoint.
Parameters
----------
aws_tokens : AWSTokens
AWS tokens.
"""
def __init__(self, aws_tokens, **kwargs):
super().__init__(aws_tokens, **kwargs)
def get_data(self):
return self._send("/GetAllRegion")
class GetAvailableAggregationMonthlyData(JciHitachiAWSIoTConnection):
"""API internal endpoint.
Parameters
----------
aws_tokens : AWSTokens
AWS tokens.
"""
def __init__(self, aws_tokens, **kwargs):
super().__init__(aws_tokens, **kwargs)
def get_data(self, thing_name, time_start, time_end):
json_data = {
"ThingName": thing_name,
"TimeStart": time_start,
"TimeEnd": time_end,
}
return self._send("/GetAvailableAggregationMonthlyData", json_data)
class GetHistoryEventByUser(JciHitachiAWSIoTConnection):
"""API internal endpoint.
Parameters
----------
aws_tokens : AWSTokens
AWS tokens.
"""
def __init__(self, aws_tokens, **kwargs):
super().__init__(aws_tokens, **kwargs)
def get_data(self, time_start, time_end):
json_data = {
"TimeStart": time_start,
"TimeEnd": time_end,
}
return self._send("/GetHistoryEventByUser", json_data)
class ListSubUser(JciHitachiAWSIoTConnection):
"""API internal endpoint.
Parameters
----------
aws_tokens : AWSTokens
AWS tokens.
"""
def __init__(self, aws_tokens, **kwargs):
super().__init__(aws_tokens, **kwargs)
def get_data(self):
return self._send("/ListSubUser")
class JciHitachiAWSMqttConnection:
"""Connecting to Jci-Hitachi AWS MQTT to get latest events.
Parameters
----------
get_credentials_callable : Callable
Callable which takes no arguments and returns AwsCredentials.
print_response : bool, optional
If set, all responses of MQTT will be printed, by default False.
"""
def __init__(self, get_credentials_callable, print_response=False):
self._get_credentials_callable = get_credentials_callable
self._print_response = print_response
self._mqttc = None
self._shadow_mqttc = None
self._client_tokens = {}
self._mqtt_events = JciHitachiMqttEvents()
def __del__(self):
self.disconnect()
@property
def mqtt_events(self):
"""MQTT events.
Returns
-------
JciHitachiMqttEvents
See JciHitachiMqttEvents.
"""
return self._mqtt_events
def _on_publish(self, topic, payload, dup, qos, retain, **kwargs):
try:
payload = json.loads(payload.decode())
except Exception as e:
self._mqtt_events.mqtt_error = e.__class__.__name__
self._mqtt_events.mqtt_error_event.set()
_LOGGER.error(f"Mqtt topic {topic} published with payload {payload} cannot be decoded: {e}")
return
if self._print_response:
print(f"Mqtt topic {topic} published with payload \n {payload}")
split_topic = topic.split('/')
if len(split_topic) >= 4 and split_topic[3] != "shadow":
thing_name = split_topic[1]
if split_topic[2] == "status" and split_topic[3] == "response":
self._mqtt_events.device_status[thing_name] = JciHitachiAWSStatus(payload)
self._mqtt_events.device_status_event.set()
elif split_topic[2] == "registration" and split_topic[3] == "response":
self._mqtt_events.device_support[thing_name] = JciHitachiAWSStatusSupport(payload)
self._mqtt_events.device_support_event.set()
elif split_topic[2] == "control" and split_topic[3] == "response":
self._mqtt_events.device_control[thing_name] = payload
self._mqtt_events.device_control_event.set()
def _on_update_named_shadow_accepted(self, response):
try:
thing_name = self._client_tokens.pop(response.client_token)
except:
_LOGGER.error(f"An unknown shadow response is received. Client token: {response.client_token}")
return
if response.state:
if response.state.reported:
self._mqtt_events.device_control[thing_name] = response.state.reported
self._mqtt_events.device_control_event.set()
def _on_update_named_shadow_rejected(self, error):
_LOGGER.error(f"A shadow request {error.client_token} was rejected by the API: {error.code} {error.message}")
def _on_get_named_shadow_accepted(self, response):
try:
thing_name = self._client_tokens.pop(response.client_token)
except:
_LOGGER.error(f"An unknown shadow response is received. Client token: {response.client_token}")
return
if response.state:
if response.state.reported:
self._mqtt_events.device_shadow[thing_name] = response.state.reported
self._mqtt_events.device_shadow_event.set()
def _on_get_named_shadow_rejected(self, error):
_LOGGER.error(f"A shadow request {error.client_token} was rejected by the API: {error.code} {error.message}")
def _on_message(self, topic, payload, dup, qos, retain, **kwargs):
return
def disconnect(self):
"""Disconnect from the MQTT broker.
"""
if self._mqttc is not None:
self._mqttc.disconnect()
def configure(self):
"""Configure MQTT."""
cred_provider = awscrt.auth.AwsCredentialsProvider.new_delegate(self._get_credentials_callable)
event_loop_group = awscrt.io.EventLoopGroup(1)
host_resolver = awscrt.io.DefaultHostResolver(event_loop_group)
client_bootstrap = awscrt.io.ClientBootstrap(event_loop_group, host_resolver)
self._mqttc = mqtt_connection_builder.websockets_with_default_aws_signing(
AWS_COGNITO_REGION,
cred_provider,
client_bootstrap=client_bootstrap,
endpoint=AWS_MQTT_ENDPOINT,
client_id=str(uuid.uuid4())
)
self._mqttc.on_message(self._on_message)
self._shadow_mqttc = iotshadow.IotShadowClient(self._mqttc)
def connect(self, host_identity_id, shadow_names=None, thing_names=None):
"""Connect to the MQTT broker and start loop.
Parameters
----------
host_identity_id : str
Host identity ID.
shadow_names : str or list of str, optional
Names to be subscribed in Shadow, by default None.
thing_names : str or list of str, optional
Things to be subscribed in Shadow, by default None.
"""
try:
connect_future = self._mqttc.connect()
connect_future.result()
_LOGGER.info("MQTT Connected.")
except Exception as e:
self._mqtt_events.mqtt_error = e.__class__.__name__
self._mqtt_events.mqtt_error_event.set()
_LOGGER.error('MQTT connection failed with exception {}'.format(e))
return False
try:
subscribe_future, _ = self._mqttc.subscribe(f"{host_identity_id}/#", awscrt.mqtt.QoS.AT_LEAST_ONCE, callback=self._on_publish)
subscribe_future.result()
if thing_names is not None and shadow_names is not None:
shadow_names = [shadow_names] if isinstance(shadow_names, str) else shadow_names
thing_names = [thing_names] if isinstance(thing_names, str) else thing_names
for shadow_name in shadow_names:
for thing_name in thing_names:
update_accepted_subscribed_future, _ = self._shadow_mqttc.subscribe_to_update_named_shadow_accepted(
request=iotshadow.UpdateNamedShadowSubscriptionRequest(shadow_name=shadow_name, thing_name=thing_name),
qos=awscrt.mqtt.QoS.AT_LEAST_ONCE,
callback=self._on_update_named_shadow_accepted)
update_rejected_subscribed_future, _ = self._shadow_mqttc.subscribe_to_update_named_shadow_rejected(
request=iotshadow.UpdateNamedShadowSubscriptionRequest(shadow_name=shadow_name, thing_name=thing_name),
qos=awscrt.mqtt.QoS.AT_LEAST_ONCE,
callback=self._on_update_named_shadow_rejected)
# Wait for subscriptions to succeed
update_accepted_subscribed_future.result()
update_rejected_subscribed_future.result()
get_accepted_subscribed_future, _ = self._shadow_mqttc.subscribe_to_get_named_shadow_accepted(
request=iotshadow.GetNamedShadowSubscriptionRequest(shadow_name=shadow_name, thing_name=thing_name),
qos=awscrt.mqtt.QoS.AT_LEAST_ONCE,
callback=self._on_get_named_shadow_accepted)
get_rejected_subscribed_future, _ = self._shadow_mqttc.subscribe_to_get_named_shadow_rejected(
request=iotshadow.GetNamedShadowSubscriptionRequest(shadow_name=shadow_name, thing_name=thing_name),
qos=awscrt.mqtt.QoS.AT_LEAST_ONCE,
callback=self._on_get_named_shadow_rejected)
# Wait for subscriptions to succeed
get_accepted_subscribed_future.result()
get_rejected_subscribed_future.result()
except Exception as e:
self._mqtt_events.mqtt_error = e.__class__.__name__
self._mqtt_events.mqtt_error_event.set()
self.disconnect()
_LOGGER.error('MQTT subscription failed with exception {}'.format(e))
return False
return True
def publish(self, topic, payload):
"""Publish message.
Parameters
----------
topic : str
Topic to publish.
payload : dict
Payload to publish.
"""
try:
publish_future, _ = self._mqttc.publish(topic, json.dumps(payload), awscrt.mqtt.QoS.AT_LEAST_ONCE)
publish_future.result()
except Exception as e:
self._mqtt_events.mqtt_error = e.__class__.__name__
self._mqtt_events.mqtt_error_event.set()
_LOGGER.error('Publish failed with exception: {}'.format(e))
def publish_shadow(self, thing_name, command_name, payload={}, shadow_name=None):
"""Publish message to IoT Shadow Service.
Parameters
----------
thing_name : str
Thing name.
command_name : str
Command name, which can be `get` or `update`.
payload : dict, optional
Payload to publish, by default {}.
shadow_name : str, optional
Shadow name, by default None.
"""
if command_name not in ["get", "update"]: # we don't subscribe delete
raise ValueError("command_name must be one of `get` or `update`.")
# The length of client token can't exceed 64 bytes, so we only use gateway mac address as the token.
client_token = thing_name.split("_")[1]
self._client_tokens.update({client_token: thing_name})
if shadow_name is None:
if command_name == "get":
publish_future = self._shadow_mqttc.publish_get_shadow(
iotshadow.GetShadowRequest(
client_token=client_token,
thing_name=thing_name
),
qos=awscrt.mqtt.QoS.AT_LEAST_ONCE
)
elif command_name == "update":
publish_future = self._shadow_mqttc.publish_update_shadow(
iotshadow.UpdateShadowRequest(
client_token=client_token,
state=iotshadow.ShadowState(reported=payload),
thing_name=thing_name
),
qos=awscrt.mqtt.QoS.AT_LEAST_ONCE
)
elif command_name == "delete":
publish_future = self._shadow_mqttc.publish_delete_shadow(
iotshadow.DeleteShadowRequest(
client_token=client_token,
thing_name=thing_name
),
qos=awscrt.mqtt.QoS.AT_LEAST_ONCE
)
else:
if command_name == "get":
publish_future = self._shadow_mqttc.publish_get_named_shadow(
iotshadow.GetNamedShadowRequest(
client_token=client_token,
shadow_name=shadow_name,
thing_name=thing_name
),
qos=awscrt.mqtt.QoS.AT_LEAST_ONCE
)
elif command_name == "update":
publish_future = self._shadow_mqttc.publish_update_named_shadow(
iotshadow.UpdateNamedShadowRequest(
client_token=client_token,
shadow_name=shadow_name,
state=iotshadow.ShadowState(reported=payload),
thing_name=thing_name
),
qos=awscrt.mqtt.QoS.AT_LEAST_ONCE
)
elif command_name == "delete":
publish_future = self._shadow_mqttc.publish_delete_named_shadow(
iotshadow.DeleteNamedShadowRequest(
client_token=client_token,
shadow_name=shadow_name,
thing_name=thing_name
),
qos=awscrt.mqtt.QoS.AT_LEAST_ONCE
)
try:
publish_future.result()
except Exception as e:
self._mqtt_events.mqtt_error = e.__class__.__name__
self._mqtt_events.mqtt_error_event.set()
_LOGGER.error('Publish failed with exception: {}'.format(e)) | PypiClean |
/BigJob-0.64.5.tar.gz/BigJob-0.64.5/bigjob_dynamic/many_job_affinity.py | import os
import sys
sys.path.append(os.path.dirname( __file__ ))
from many_job import *
class many_job_affinity_service(many_job_service):
def __init__(self, bigjob_list, advert_host):
""" accepts resource list as key/value pair:
( {"resource_url" : "gram://qb1.loni.org/jobmanager-pbs", "number_cores" : "32", "allocation" : "<allocaton>", "queue" : "workq",
"re_agent": os.getcwd() + "/bigjob_agent_launcher.sh" "walltime":1000, "affinity": "affinity1"},
{"resource_url" : "gram://qb1.loni.org/jobmanager-pbs", "number_cores" : "32", "allocation" : "<allocation", "queue" : "workq",
"re_agent": "os.getcwd() + "/bigjob_agent_launcher.sh", "walltime":1000, "affinity": "affinity1"})
"""
super(many_job_affinity_service, self).__init__(bigjob_list, advert_host)
def __schedule_subjob (self, subjob):
""" find resource (bigjob) with the same affinity for subjob
returns bigjob object """
for i in self.bigjob_list:
bigjob = i["bigjob"]
lock = i["lock"]
affinity = i["affinity"]
lock.acquire()
free_cores = i["free_cores"]
bigjob_url = bigjob.pilot_url
state = bigjob.get_state_detail()
env = subjob.job_description.environment[0]
sj_affinity=None
if env.find("affinity") >=0:
sj_affinity = env.split("=")[1]
logging.debug("Subjob Env: " + str(sj_affinity))
logging.debug("Big Job: " + bigjob_url + " Cores: " + "%s"%free_cores + "/" + i["number_cores"] + " Affinity: "
+ affinity + " SJ affinity: " + sj_affinity + " State: " + state)
if state.lower() == "running" and free_cores >= int(subjob.job_description.number_of_processes) and affinity == sj_affinity:
free_cores = i["free_cores"]
free_cores = free_cores - int(subjob.job_description.number_of_processes)
i["free_cores"]=free_cores
lock.release()
return i
lock.release()
# no resource found
self.subjob_queue.put(subjob)
logging.debug("found no active resource for sub-job => (re-) queue it")
return None | PypiClean |
/MetaCalls-0.0.5-cp310-cp310-manylinux2014_x86_64.whl/metacalls/node_modules/@types/node/querystring.d.ts | declare module 'querystring' {
interface StringifyOptions {
encodeURIComponent?: ((str: string) => string) | undefined;
}
interface ParseOptions {
maxKeys?: number | undefined;
decodeURIComponent?: ((str: string) => string) | undefined;
}
interface ParsedUrlQuery extends NodeJS.Dict<string | string[]> {}
interface ParsedUrlQueryInput extends NodeJS.Dict<string | number | boolean | ReadonlyArray<string> | ReadonlyArray<number> | ReadonlyArray<boolean> | null> {}
/**
* The `querystring.stringify()` method produces a URL query string from a
* given `obj` by iterating through the object's "own properties".
*
* It serializes the following types of values passed in `obj`:[string](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) |
* [number](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) |
* [bigint](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) |
* [boolean](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) |
* [string\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#String_type) |
* [number\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Number_type) |
* [bigint\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/BigInt) |
* [boolean\[\]](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Data_structures#Boolean_type) The numeric values must be finite. Any other input values will be coerced to
* empty strings.
*
* ```js
* querystring.stringify({ foo: 'bar', baz: ['qux', 'quux'], corge: '' });
* // Returns 'foo=bar&baz=qux&baz=quux&corge='
*
* querystring.stringify({ foo: 'bar', baz: 'qux' }, ';', ':');
* // Returns 'foo:bar;baz:qux'
* ```
*
* By default, characters requiring percent-encoding within the query string will
* be encoded as UTF-8\. If an alternative encoding is required, then an alternative`encodeURIComponent` option will need to be specified:
*
* ```js
* // Assuming gbkEncodeURIComponent function already exists,
*
* querystring.stringify({ w: '中文', foo: 'bar' }, null, null,
* { encodeURIComponent: gbkEncodeURIComponent });
* ```
* @since v0.1.25
* @param obj The object to serialize into a URL query string
* @param [sep='&'] The substring used to delimit key and value pairs in the query string.
* @param [eq='='] . The substring used to delimit keys and values in the query string.
*/
function stringify(obj?: ParsedUrlQueryInput, sep?: string, eq?: string, options?: StringifyOptions): string;
/**
* The `querystring.parse()` method parses a URL query string (`str`) into a
* collection of key and value pairs.
*
* For example, the query string `'foo=bar&abc=xyz&abc=123'` is parsed into:
*
* ```js
* {
* foo: 'bar',
* abc: ['xyz', '123']
* }
* ```
*
* The object returned by the `querystring.parse()` method _does not_prototypically inherit from the JavaScript `Object`. This means that typical`Object` methods such as `obj.toString()`,
* `obj.hasOwnProperty()`, and others
* are not defined and _will not work_.
*
* By default, percent-encoded characters within the query string will be assumed
* to use UTF-8 encoding. If an alternative character encoding is used, then an
* alternative `decodeURIComponent` option will need to be specified:
*
* ```js
* // Assuming gbkDecodeURIComponent function already exists...
*
* querystring.parse('w=%D6%D0%CE%C4&foo=bar', null, null,
* { decodeURIComponent: gbkDecodeURIComponent });
* ```
* @since v0.1.25
* @param str The URL query string to parse
* @param [sep='&'] The substring used to delimit key and value pairs in the query string.
* @param [eq='='] . The substring used to delimit keys and values in the query string.
*/
function parse(str: string, sep?: string, eq?: string, options?: ParseOptions): ParsedUrlQuery;
/**
* The querystring.encode() function is an alias for querystring.stringify().
*/
const encode: typeof stringify;
/**
* The querystring.decode() function is an alias for querystring.parse().
*/
const decode: typeof parse;
/**
* The `querystring.escape()` method performs URL percent-encoding on the given`str` in a manner that is optimized for the specific requirements of URL
* query strings.
*
* The `querystring.escape()` method is used by `querystring.stringify()` and is
* generally not expected to be used directly. It is exported primarily to allow
* application code to provide a replacement percent-encoding implementation if
* necessary by assigning `querystring.escape` to an alternative function.
* @since v0.1.25
*/
function escape(str: string): string;
/**
* The `querystring.unescape()` method performs decoding of URL percent-encoded
* characters on the given `str`.
*
* The `querystring.unescape()` method is used by `querystring.parse()` and is
* generally not expected to be used directly. It is exported primarily to allow
* application code to provide a replacement decoding implementation if
* necessary by assigning `querystring.unescape` to an alternative function.
*
* By default, the `querystring.unescape()` method will attempt to use the
* JavaScript built-in `decodeURIComponent()` method to decode. If that fails,
* a safer equivalent that does not throw on malformed URLs will be used.
* @since v0.1.25
*/
function unescape(str: string): string;
}
declare module 'node:querystring' {
export * from 'querystring';
} | PypiClean |
/DisplaceNet-0.1.tar.gz/DisplaceNet-0.1/engine/object_detection_branch/retina_net/examples/ResNet50RetinaNet.py | from __future__ import print_function
# import miscellaneous modules
import matplotlib.pyplot as plt
import numpy as np
import os
import time
import cv2
# import keras
# set tf backend to allow memory to grow, instead of claiming everything
# import keras_retinanet
from examples.object_detectors.retina_net.keras_retinanet import models
from engine.object_detectors.retina_net.keras_retinanet.utils import draw_box, draw_caption
from engine.object_detectors.retina_net.keras_retinanet.utils import label_color
from engine.object_detectors.retina_net.keras_retinanet.utils import read_image_bgr, preprocess_image, resize_image
# adjust this to point to your downloaded/trained model
# models can be downloaded here: https://github.com/fizyr/keras-retinanet/releases
model_path = os.path.join('..', 'snapshots', 'resnet50_coco_best_v2.1.0.h5')
# load retinanet model
model = models.load_model(model_path, backbone_name='resnet50')
# if the model is not converted to an inference model, use the line below
# see: https://github.com/fizyr/keras-retinanet#converting-a-training-model-to-inference-model
#model = models.load_model(model_path, backbone_name='resnet50', convert_model=True)
#print(model.summary())
# load label to names mapping for visualization purposes
labels_to_names = {0: 'person', 1: 'bicycle', 2: 'car', 3: 'motorcycle', 4: 'airplane',
5: 'bus', 6: 'train', 7: 'truck', 8: 'boat', 9: 'traffic light',
10: 'fire hydrant', 11: 'stop sign', 12: 'parking meter', 13: 'bench',
14: 'bird', 15: 'cat', 16: 'dog', 17: 'horse', 18: 'sheep', 19: 'cow',
20: 'elephant', 21: 'bear', 22: 'zebra', 23: 'giraffe', 24: 'backpack',
25: 'umbrella', 26: 'handbag', 27: 'tie', 28: 'suitcase', 29: 'frisbee',
30: 'skis', 31: 'snowboard', 32: 'sports ball', 33: 'kite', 34: 'baseball bat',
35: 'baseball glove', 36: 'skateboard', 37: 'surfboard', 38: 'tennis racket', 39: 'bottle',
40: 'wine glass', 41: 'cup', 42: 'fork', 43: 'knife', 44: 'spoon', 45: 'bowl',
46: 'banana', 47: 'apple', 48: 'sandwich', 49: 'orange', 50: 'broccoli',
51: 'carrot', 52: 'hot dog', 53: 'pizza', 54: 'donut', 55: 'cake', 56: 'chair',
57: 'couch', 58: 'potted plant', 59: 'bed', 60: 'dining table', 61: 'toilet', 62: 'tv',
63: 'laptop', 64: 'mouse', 65: 'remote', 66: 'keyboard', 67: 'cell phone', 68: 'microwave',
69: 'oven', 70: 'toaster', 71: 'sink', 72: 'refrigerator', 73: 'book', 74: 'clock', 75: 'vase',
76: 'scissors', 77: 'teddy bear', 78: 'hair drier', 79: 'toothbrush'}
# Run detection on example
# load image
image = read_image_bgr('human_right_viol_2.jpg')
# copy to draw on
draw = image.copy()
draw = cv2.cvtColor(draw, cv2.COLOR_BGR2RGB)
# preprocess image for network
image = preprocess_image(image)
image, scale = resize_image(image)
# process image
start = time.time()
boxes, scores, labels = model.predict_on_batch(np.expand_dims(image, axis=0))
print("processing time: ", time.time() - start)
# correct for image scale
boxes /= scale
counter = 0
persons_counter = 0
final_array = np.empty([len(boxes[0]), 4])
# visualize detections
for box, score, label in zip(boxes[0], scores[0], labels[0]):
# scores are sorted so we can break
if score < 0.5:
break
decoded_label = "{}".format(labels_to_names[label])
if decoded_label == 'person':
persons_counter = persons_counter + 1
color = label_color(label)
b = box.astype(int)
draw_box(draw, b, color=color)
final_array[counter][0] = b[0]
final_array[counter][1] = b[1]
final_array[counter][2] = b[2]
final_array[counter][3] = b[3]
caption = "{} {:.3f}".format(labels_to_names[label], score)
draw_caption(draw, b, caption)
counter += 1
print ('Persons found: ', persons_counter)
plt.figure(figsize=(15, 15))
plt.axis('off')
plt.imshow(draw)
plt.show() | PypiClean |
/EditObj2-0.5.1.tar.gz/EditObj2-0.5.1/field_gtk.py |
# field_gtk.py
# Copyright (C) 2007-2008 Jean-Baptiste LAMY -- [email protected]
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import editobj2
from editobj2.field import *
from editobj2.field import _WithButtonField, _RangeField, _ShortEnumField, _LongEnumField
import gobject, gtk, gtk.gdk as gdk
class GtkField(MultiGUIField):
y_flags = gtk.FILL
class GtkLabelField(GtkField, LabelField, gtk.Label):
def __init__(self, gui, master, o, attr, undo_stack):
gtk.Label.__init__(self)
super(GtkLabelField, self).__init__(gui, master, o, attr, undo_stack)
self.update()
def update(self):
self.old_str = self.get_value()
self.set_text(self.old_str)
class GtkEntryField(GtkField, EntryField, gtk.Entry):
def __init__(self, gui, master, o, attr, undo_stack):
gtk.Entry.__init__(self)
super(GtkEntryField, self).__init__(gui, master, o, attr, undo_stack)
self.update()
self.connect("focus_out_event", self.validate)
self.connect("key_press_event", self.validate)
def validate(self, widget, event):
if (event.type is gdk.KEY_PRESS) and ((not event.string) or (not event.string in "\r\n")): return
s = self.get_text().decode("utf-8")
if s != self.old_str:
self.old_str = s
self.set_value(s)
def update(self):
self.updating = 1
try:
self.old_str = self.get_value()
self.set_text(self.old_str)
finally:
self.updating = 0
class GtkIntField (GtkEntryField, IntField): pass # XXX no "spin-button" since they don't allow entering e.g. "1 + 2" as an integer !
class GtkDateField (GtkEntryField, DateField): pass
class GtkFloatField (GtkEntryField, FloatField): pass
class GtkStringField(GtkEntryField, StringField): pass
class GtkPasswordField(GtkStringField, PasswordField):
def __init__(self, gui, master, o, attr, undo_stack):
GtkStringField.__init__(self, gui, master, o, attr, undo_stack)
self.set_visibility(0)
class GtkBoolField(GtkField, BoolField, gtk.CheckButton):
def __init__(self, gui, master, o, attr, undo_stack):
gtk.CheckButton.__init__(self)
super(GtkBoolField, self).__init__(gui, master, o, attr, undo_stack)
self.update()
self.connect("toggled", self.validate)
self.connect("clicked", self.clicked)
def validate(self, *event):
v = self.descr.get(self.o, self.attr)
if isinstance(v, int):
self.set_value(int(self.get_active()))
else:
self.set_value(bool(self.get_active()))
def clicked(self, *event):
if self.get_inconsistent(): self.set_inconsistent(0)
def update(self):
self.updating = 1
try:
v = self.descr.get(self.o, self.attr)
if v is introsp.NonConsistent: self.set_inconsistent(1)
else: self.set_active(v)
finally:
self.updating = 0
class GtkProgressBarField(GtkField, ProgressBarField, gtk.ProgressBar):
def __init__(self, gui, master, o, attr, undo_stack):
gtk.ProgressBar.__init__(self)
super(ProgressBarField, self).__init__(gui, master, o, attr, undo_stack)
self.update()
def update(self):
v = self.get_value()
if v is introsp.NonConsistent:
self.pulse()
else: self.set_fraction(v)
class GtkEditButtonField(GtkField, EditButtonField, gtk.Button):
def __init__(self, gui, master, o, attr, undo_stack):
gtk.Button.__init__(self, editobj2.TRANSLATOR(u"Edit..."))
super(GtkEditButtonField, self).__init__(gui, master, o, attr, undo_stack)
self.connect("clicked", self.on_click)
self.update()
def update(self):
self.set_property("sensitive", not self.get_value() is None)
def on_click(self, *args):
p = self
while p:
p = p.get_parent()
if isinstance(p, gtk.Window) and p.get_modal():
editobj2.edit(self.get_value(), undo_stack = self.undo_stack, on_validate = lambda obj: None)
return
editobj2.edit(self.get_value(), undo_stack = self.undo_stack)
class Gtk_WithButtonField(GtkField, _WithButtonField, gtk.HBox):
def __init__(self, gui, master, o, attr, undo_stack, Field, button_text, on_button):
gtk.HBox.__init__(self)
super(Gtk_WithButtonField, self).__init__(gui, master, o, attr, undo_stack, Field, button_text, on_button)
self.pack_start(self.field)
button = gtk.Button(editobj2.TRANSLATOR(button_text))
button.connect("clicked", self.on_click)
self.pack_end(button, 0, 1)
class GtkWithButtonStringField(GtkField, WithButtonStringField, gtk.HBox):
def __init__(self, gui, master, o, attr, undo_stack):
gtk.HBox.__init__(self)
super(GtkWithButtonStringField, self).__init__(gui, master, o, attr, undo_stack)
self.pack_start(self.string_field)
button = gtk.Button(editobj2.TRANSLATOR(self.button_text))
button.connect("clicked", self.on_button)
self.pack_end(button, 0, 1)
class GtkFilenameField(GtkWithButtonStringField, FilenameField):
def on_button(self, *args):
dialog = gtk.FileChooserDialog(action = gtk.FILE_CHOOSER_ACTION_SAVE, buttons = (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OK, gtk.RESPONSE_OK))
dialog.set_resizable(1)
dialog.set_current_name(self.get_value())
if dialog.run() == gtk.RESPONSE_OK:
filename = dialog.get_filename()
if filename:
self.string_field.set_value(filename)
self.string_field.update()
dialog.destroy()
class GtkDirnameField(GtkWithButtonStringField, DirnameField):
def on_button(self, *args):
dialog = gtk.FileChooserDialog(action = gtk.FILE_CHOOSER_ACTION_CREATE_FOLDER, buttons = (gtk.STOCK_CANCEL, gtk.RESPONSE_CANCEL, gtk.STOCK_OK, gtk.RESPONSE_OK))
dialog.set_resizable(1)
dialog.set_current_folder(self.get_value())
if dialog.run() == gtk.RESPONSE_OK:
folder = dialog.get_current_folder()
if folder:
self.string_field.set_value(folder)
self.string_field.update()
dialog.destroy()
class GtkURLField(GtkWithButtonStringField, URLField):
def on_button(self, *args):
import webbrowser
webbrowser.open_new(self.get_value())
class GtkTextField(GtkField, TextField, gtk.ScrolledWindow):
y_flags = gtk.FILL | gtk.EXPAND
def __init__(self, gui, master, o, attr, undo_stack):
gtk.ScrolledWindow.__init__(self)
super(GtkTextField, self).__init__(gui, master, o, attr, undo_stack)
self.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.set_shadow_type(gtk.SHADOW_IN)
self.set_size_request(-1, 125)
self.text = gtk.TextView()
self.text.set_left_margin (4)
self.text.set_right_margin(4)
self.text.set_wrap_mode(gtk.WRAP_WORD)
self.text.set_size_request(200, -1)
self.text.connect("focus_out_event", self.validate)
self.add(self.text)
self.update()
def validate(self, *args):
s = self.text.get_buffer().get_text(*self.text.get_buffer().get_bounds()).decode("utf-8")
self.set_value(s)
def update(self):
self.updating = 1
try:
self.old_str = self.get_value()
if self.text.get_buffer().get_text(*self.text.get_buffer().get_bounds()).decode("utf-8") != self.old_str:
self.text.get_buffer().set_text(self.old_str)
finally:
self.updating = 0
class GtkObjectAttributeField(GtkField, ObjectAttributeField, gtk.Frame):
def __init__(self, gui, master, o, attr, undo_stack):
super(GtkObjectAttributeField, self).__init__(gui, master, o, attr, undo_stack)
gtk.Frame.__init__(self)
self.set_shadow_type(gtk.SHADOW_IN)
self.add(self.attribute_pane)
class GtkObjectHEditorField(GtkField, ObjectHEditorField, gtk.Frame):
def __init__(self, gui, master, o, attr, undo_stack):
super(GtkObjectHEditorField, self).__init__(gui, master, o, attr, undo_stack)
gtk.Frame.__init__(self)
self.set_shadow_type(gtk.SHADOW_IN)
self.add(self.editor_pane)
class GtkObjectVEditorField(GtkField, ObjectVEditorField, gtk.Frame):
def __init__(self, gui, master, o, attr, undo_stack):
super(GtkObjectVEditorField, self).__init__(gui, master, o, attr, undo_stack)
gtk.Frame.__init__(self)
self.set_shadow_type(gtk.SHADOW_IN)
self.add(self.editor_pane)
class Gtk_RangeField(GtkField, _RangeField, gtk.HScale):
def __init__(self, gui, master, o, attr, undo_stack, min, max, incr = 1):
self.adjustment = gtk.Adjustment(0, min, max, incr)
gtk.HScale.__init__(self, self.adjustment)
self.set_digits(0)
super(Gtk_RangeField, self).__init__(gui, master, o, attr, undo_stack, min, max, incr)
self.connect("value_changed", self.validate)
def validate(self, *args):
if self.updating: return
self.set_value(int(round(self.adjustment.get_value())))
def update(self):
self.updating = 1
try:
self.adjustment.set_value(self.get_value())
finally:
self.updating = 0
class Gtk_ShortEnumField(GtkField, _ShortEnumField, gtk.ComboBox):
def __init__(self, gui, master, o, attr, undo_stack, choices, value_2_enum = None, enum_2_value = None):
self.liststore = gtk.ListStore(gobject.TYPE_STRING)
gtk.ComboBox.__init__(self, self.liststore)
cell = gtk.CellRendererText()
self.pack_start(cell, True)
self.add_attribute(cell, 'text', 0)
super(Gtk_ShortEnumField, self).__init__(gui, master, o, attr, undo_stack, choices, value_2_enum, enum_2_value)
for choice in self.choice_keys: self.liststore.append((choice,))
self.update()
self.connect("changed", self.validate)
def validate(self, *args):
i = self.get_active()
if i != -1: self.set_value(self.choices[self.choice_keys[i]])
def update(self):
self.updating = 1
try:
i = self.choice_2_index.get(self.get_value())
if not i is None: self.set_active(i)
else: self.set_active(-1)
finally:
self.updating = 0
class Gtk_LongEnumField(GtkField, _LongEnumField, gtk.ScrolledWindow):
y_flags = gtk.FILL | gtk.EXPAND
def __init__(self, gui, master, o, attr, undo_stack, choices, value_2_enum = None, enum_2_value = None):
gtk.ScrolledWindow.__init__(self)
self.set_policy(gtk.POLICY_AUTOMATIC, gtk.POLICY_AUTOMATIC)
self.set_shadow_type(gtk.SHADOW_IN)
self.set_size_request(-1, 125)
super(Gtk_LongEnumField, self).__init__(gui, master, o, attr, undo_stack, choices, value_2_enum, enum_2_value)
self.liststore = gtk.ListStore(gobject.TYPE_STRING)
for choice in self.choice_keys: self.liststore.append((choice,))
renderer = gtk.CellRendererText()
self.treeview = gtk.TreeView(self.liststore)
self.treeview.set_headers_visible(0)
self.treeview.append_column(gtk.TreeViewColumn(None, renderer, text = 0))
self.add(self.treeview)
self.update()
self.treeview.get_selection().connect("changed", self.validate)
def validate(self, *args):
liststore, iter = self.treeview.get_selection().get_selected()
if iter:
i = int(liststore.get_path(iter)[0])
if i != self.i: # XXX validate is called twice by GTK, why ?
self.i = i
enum = self.choices[self.choice_keys[i]]
self.set_value(enum)
def update(self):
self.updating = 1
try:
selection = self.treeview.get_selection()
selection.unselect_all()
self.i = self.choice_2_index.get(self.get_value())
if not self.i is None:
selection.select_iter(self.liststore.get_iter(self.i))
self.treeview.scroll_to_cell(max(self.i - 2, 0))
finally:
self.updating = 0 | PypiClean |
/McPhysics-1.5.12.tar.gz/McPhysics-1.5.12/playground.py | # (https://github.com/Spinmob/macrospyn).
# Copyright (c) 2002-2020 Jack Childress (Sankey).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, version 3.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import numpy as _n
import scipy.stats as _stats
import time as _t
# For embedding matplotlib figures
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as _canvas
from matplotlib.backends.backend_qt5agg import NavigationToolbar2QT as _navbar
from matplotlib.figure import Figure as _figure
import mcphysics as _m
import spinmob as _s
import spinmob.egg as _egg
_g = _egg.gui
import traceback as _traceback
_p = _traceback.print_last
def plot_and_integrate_reduced_chi2(dof=10, xmin=1e-6, xmax=5, steps=1e5):
"""
Plots the reduced chi^2 density function, and then numerically integrates it.
Parameters
----------
dof=10
Degrees of freedom.
xmin, xmax, steps
Plot range from xmin to xmax with the specified steps. This will affect
the validity of the numerical integral.
"""
_s.pylab.figure(100)
a1 = _s.pylab.subplot(211)
a2 = _s.pylab.subplot(212, sharex=a1)
_s.plot.xy.function('f(x,dof)', xmin, xmax, steps, g=dict(f=_m.functions.reduced_chi2, dof=dof),
axes=a1, ylabel='$P(\chi^2_r)$', xlabel='$\chi^2_r$', tall=True)
_s.tweaks.integrate_shown_data(output_axes=a2, tall=True)
_s.tweaks.ubertidy(window_size=[800,950])
class fitting_statistics_demo():
"""
Graphical interface for generating fake data, fitting, and collecting
fit statistics.
Parameters
----------
block=True
Whether to block the command line when the window is first shown.
"""
def __init__(self, block=False):
self._build_gui(block)
def _build_gui(self, block=False):
"""
Builds the GUI for taking fake data.
"""
# Make a window with a left grid for settings and controls, and
# a right grid for visualization.
self.window = _g.Window('Fake Data Taker', size=[1000,700], autosettings_path='window.cfg')
self.window.event_close = self.event_close
self.grid_controls = self.window.place_object(_g.GridLayout(False))
self.grid_plotting = self.window.place_object(_g.GridLayout(False), alignment=0)
# Add the acquire button & connect the signal
self.button_acquire = self.grid_controls.place_object(_g.Button('Acquire'), alignment=0).set_width(70)
self.button_fit = self.grid_controls.place_object(_g.Button('Fit') , alignment=0).set_width(55)
self.button_loop = self.grid_controls.place_object(_g.Button('Loop', True), alignment=0).set_width(55)
self.button_clear = self.grid_controls.place_object(_g.Button('Clear'), alignment=0).set_width(55)
self.button_acquire.signal_clicked.connect(self.button_acquire_clicked)
self.button_fit .signal_clicked.connect(self.button_fit_clicked)
self.button_loop .signal_clicked.connect(self.button_loop_clicked)
self.button_clear .signal_clicked.connect(self.button_clear_clicked)
self.button_loop.set_colors_checked('white', 'red')
# Create an populate the settings tree
self.grid_controls.new_autorow()
self.tree_settings = self.grid_controls.place_object(_g.TreeDictionary(), column_span=4, alignment=0)
self.tree_settings.add_parameter('Acquire/reality', '1.7*x+1.2')
self.tree_settings.add_parameter('Acquire/x_noise', 0)
self.tree_settings.add_parameter('Acquire/y_noise', 1.3)
self.tree_settings.add_parameter('Acquire/xmin', 0)
self.tree_settings.add_parameter('Acquire/xmax', 10)
self.tree_settings.add_parameter('Acquire/steps', 100, dec=True)
self.tree_settings.add_parameter('Fit/function', 'a*x+b')
self.tree_settings.add_parameter('Fit/parameters', 'a=0,b=0')
self.tree_settings.add_parameter('Fit/assumed_ey', 1.3)
self.tree_settings.add_parameter('Stats/bins', 14)
self.tree_settings.add_parameter('Stats/versus_x', 'a')
self.tree_settings.add_parameter('Stats/versus_y', 'b')
self.tree_settings.add_parameter('Stats/plot_theory', False)
# Add the tabs and plotter to the other grid
self.tabs_plotting = self.grid_plotting.place_object(_g.TabArea('tabs_plotting.cfg'), alignment=0)
# Tab for raw data
self.tab_raw = self.tabs_plotting.add_tab('Raw Data')
self.plot_raw = self.tab_raw.place_object(
_g.DataboxPlot(autosettings_path='plot_raw.cfg', autoscript=1),
alignment=0)
self.plot_raw.autoscript_custom = self._autoscript_raw
# Tab for fit
self.tab_fit = self.tabs_plotting.add_tab('Fit')
self.figure_fit = _figure()
self.canvas_fit = _canvas(self.figure_fit)
self.navbar_fit = _navbar(self.canvas_fit, self.window._widget)
self.tab_fit.place_object(self.navbar_fit, alignment=0)
self.tab_fit.new_autorow()
self.tab_fit.place_object(self.canvas_fit, alignment=0)
# Fitter object linked to this figure canvas
self.fitter = _s.data.fitter()
self.fitter.set(autoplot=False)
self.fitter.figures = [self.figure_fit]
# Tab for running total of fit parameters
self.tab_parameters = self.tabs_plotting.add_tab('Fit Parameters')
self.plot_parameters = self.tab_parameters.place_object(
_g.DataboxPlot(autosettings_path='plot_parameters.cfg', show_logger=True),
alignment=0)
# Give it a handle on the fitter for the script
self.plot_parameters.fitter = self.fitter
# Tab for histograms
self.tab_stats = self.tabs_plotting.add_tab('Histograms')
self.figure_stats = _figure()
self.canvas_stats = _canvas(self.figure_stats)
self.navbar_stats = _navbar(self.canvas_stats, self.window._widget)
self.tab_stats.place_object(self.navbar_stats, alignment=0)
self.tab_stats.new_autorow()
self.tab_stats.place_object(self.canvas_stats, alignment=0)
# Changing tabs can update plots
self.tabs_plotting.signal_switched.connect(self.tabs_plotting_switched)
# Set up the autosave & load.
self.tree_settings.connect_any_signal_changed(self.tree_settings.autosave)
self.tree_settings.connect_any_signal_changed(self.update_all_plots)
self.tree_settings.load()
# Show the window
self.window.show(block)
def _autoscript_raw(self):
"""
Returns a nice custom autoscript for plotting the raw data.
"""
return "x = [ d[0] ]\ny = [ d[1] ]\n\nxlabels = 'x'\nylabels = ['y']"
def tabs_plotting_switched(self, *a):
"""
Someone switched a tab!
"""
if a[0]==1: self.update_fit_plot()
elif a[0]==3: self.update_histograms_plot()
def button_acquire_clicked(self, *a):
"""
Acquires fake data and dumps it with the header into the plotter.
"""
# Dump the header info
self.tree_settings.send_to_databox_header(self.plot_raw)
# Generate the data
x = _n.linspace(self.tree_settings['Acquire/xmin'],
self.tree_settings['Acquire/xmax'],
self.tree_settings['Acquire/steps'])
d = _s.fun.generate_fake_data(self.tree_settings['Acquire/reality'], x,
self.tree_settings['Acquire/y_noise'],
self.tree_settings['Acquire/x_noise'])
# Dump it to the plotter and plot
self.plot_raw.copy_columns(d)
# Plot it.
self.plot_raw.plot()
# Autosave if checked
self.plot_raw.autosave()
def button_fit_clicked(self,*a):
"""
Assuming there is data, run the fit!
"""
# Set the functions
self.fitter.set_functions(self.tree_settings['Fit/function'],
self.tree_settings['Fit/parameters'])
# Set the data
self.fitter.set_data(self.plot_raw[0], self.plot_raw[1],
self.tree_settings['Fit/assumed_ey'])
# Fit!
self.fitter.fit()
# Draw
self.figure_fit.canvas.draw()
self.window.process_events()
# Now append the fit results to the next tab's plotter
ps = self.fitter.results.params
x2 = self.fitter.get_reduced_chi_squared()
dof = self.fitter.get_degrees_of_freedom()
ckeys = ['reduced_chi2', 'DOF']
row = [x2,dof]
for pname in ps:
# Append the fit parameter
ckeys.append(pname)
row .append(ps[pname].value)
# Append the fit error
ckeys.append(pname+'_error')
row .append(ps[pname].stderr)
# If the parameters haven't changed, just append the data
self.plot_parameters.append_row(row, ckeys=ckeys)
# If this is the first row, set up the histograms
if len(self.plot_parameters[0]) == 1:
# PARAMETERS: Send the settings to the header
self.tree_settings.send_to_databox_header(self.plot_parameters)
# Generate a plot script
s = 'x = [None]\ny = [d[0],d[1]'
for n in range(len(ps)):
s = s+',d['+str(2*n+2)+']'
s = s+']\n\nxlabels = "Iteration"\nylabels = [ d.ckeys[0], d.ckeys[1]'
for n in range(len(ps)):
s = s+',d.ckeys['+str(2*n+2)+']'
s = s+']'
# Set to manual script and update the text
self.plot_parameters.combo_autoscript.set_value(0, block_signals=True)
self.plot_parameters.script.set_text(s)
# HISTOGRAMS: Clear the figure and set up the histogram axes
self.axes_histograms = []
self.figure_stats.clear()
# Calculate how many rows of plots are needed
rows = int(_n.ceil(len(ps)*0.5)+1)
# Reduced chi^2 histogram
self.axes_histograms.append(self.figure_stats.add_subplot(rows, 2, 1))
self.axes_histograms.append(self.figure_stats.add_subplot(rows, 2, 2))
# Parameter histograms
for n in range(len(ps)):
self.axes_histograms.append(self.figure_stats.add_subplot(rows, 2, n+3))
# Update the parameters plot!
self.plot_parameters.plot()
# If we're on the fit or stats tab (these are slow to plot)
if self.tabs_plotting.get_current_tab()==1: self.update_fit_plot()
if self.tabs_plotting.get_current_tab()==3: self.update_histograms_plot()
def button_loop_clicked(self, value):
"""
When someone clicks the "loop" button.
"""
# If it's enabled, start the loop
if not value: return
# Run the loop
while self.button_loop.is_checked():
# Acquire data and fit
self.button_acquire_clicked(True)
self.window.process_events()
self.button_fit_clicked(True)
self.window.process_events()
def button_clear_clicked(self, *a):
"""
Someone clears the data.
"""
self.plot_parameters.clear()
self.update_all_plots()
def update_fit_plot(self):
"""
Update the fit plot.
"""
if not self.tabs_plotting.get_current_tab()==1: return
self.fitter.plot()
self.window.process_events()
self.figure_fit.canvas.draw()
self.window.process_events()
def update_histograms_plot(self):
"""
Update the histogram plots (actually perform the histogram and plot).
"""
# Don't bother if we're not looking.
if not self.tabs_plotting.get_current_tab()==3: return
if len(self.plot_parameters) and len(self.axes_histograms):
# Update the chi^2 histogram histograms
self.axes_histograms[0].clear()
N,B,c = self.axes_histograms[0].hist(self.plot_parameters[0], self.tree_settings['Stats/bins'], label='$\chi^2_{reduced}$')
x = (B[1:]+B[:-1])*0.5
# Include the error bars
self.axes_histograms[0].errorbar(x, N, _n.sqrt(N), ls='', marker='+')
# Tidy up
self.axes_histograms[0].set_xlabel('$\chi^2_{reduced}$')
self.axes_histograms[0].set_ylabel('Counts')
# Plot the expected distribution.
if self.tree_settings['Stats/plot_theory']:
x2 = _n.linspace(min(0.5*(B[1]-B[0]),0.02), max(1.5,max(self.plot_parameters[0])), 400)
dof = self.plot_parameters[1][-1]
pdf = len(self.plot_parameters[1]) * dof * _stats.chi2.pdf(x2*dof,dof) * (B[1]-B[0])
self.axes_histograms[0].plot(x2,pdf,label='Expected ('+str(dof)+ 'DOF)')
self.axes_histograms[0].legend()
# Include zero, to give a sense of scale.
self.axes_histograms[0].set_xlim(0,max(1.5,max(self.plot_parameters[0]))*1.05)
# Plot the correlations
self.axes_histograms[1].clear()
self.axes_histograms[1].plot(self.plot_parameters[self.tree_settings['Stats/versus_x']],
self.plot_parameters[self.tree_settings['Stats/versus_y']],
label=self.tree_settings['Stats/versus_y']+' vs '+self.tree_settings['Stats/versus_x'],
linestyle='', marker='o', alpha=0.3)
self.axes_histograms[1].set_xlabel(self.tree_settings['Stats/versus_x'])
self.axes_histograms[1].set_ylabel(self.tree_settings['Stats/versus_y'])
self.axes_histograms[1].legend()
# Now plot the distributions of the other fit parameters.
for n in range(len(self.fitter.p_fit)):
# Plot the histogram
self.axes_histograms[n+2].clear()
N,B,c = self.axes_histograms[n+2].hist(self.plot_parameters[2*n+2], self.tree_settings['Stats/bins'], label=self.fitter.get_parameter_names()[n])
x = (B[1:]+B[:-1])*0.5
# Include the error bars
self.axes_histograms[n+2].errorbar(x, N, _n.sqrt(N), ls='', marker='+')
# Tidy up
self.axes_histograms[n+2].set_xlabel(self.fitter.get_parameter_names()[n])
self.axes_histograms[n+2].set_ylabel('Counts')
# Plot the expected distribution, calculated from the mean
# and fit error bar.
if self.tree_settings['Stats/plot_theory']:
x0 = _n.average(self.plot_parameters[2*n+2])
ex = self.plot_parameters[2*n+3][-1]
x = _n.linspace(x0-4*ex, x0+4*ex, 400)
pdf = len(self.plot_parameters[1]) * _stats.norm.pdf((x-x0)/ex)/ex * (B[1]-B[0])
self.axes_histograms[n+2].plot(x,pdf,label='Expected')
self.axes_histograms[n+2].legend()
self.figure_stats.canvas.draw()
self.window.process_events()
def update_all_plots(self, *a):
"""
Updates the Fit and Stats plots.
"""
self.update_fit_plot()
self.update_histograms_plot()
def event_close(self, *a):
"""
Quits acquisition when the window closes.
"""
self.button_loop.set_checked(False)
class geiger_simulation():
"""
Graphical interface for simulating a Geiger counter.
Parameters
----------
block=False : bool
Whether to block the console while the window is open.
"""
def __init__(self, name='geiger_simulation', block=False):
self.name = name
self.exception_timer = _g.TimerExceptions()
# Assemble the main layout
self.window = _g.Window('Geiger Simulation', autosettings_path=name+'.window', size=[900,700])
self.grid_top = gt = self.window.add(_g.GridLayout(margins=False))
self.window.new_autorow()
self.grid_bot = gb = self.window.add(_g.GridLayout(margins=False), alignment=0)
self.tabs_settings = gb.add(_g.TabArea(autosettings_path=name+'.tabs_settings'))
self.tabs_data = gb.add(_g.TabArea(autosettings_path=name+'.tabs_data'), alignment=0)
#################################
# Top controls
self.button_acquire = gt.add(_g.Button(
'Acquire', checkable=True,
tip='Aquire fake Geiger data according to the settings below.',
signal_toggled = self._button_acquire_toggled,
style_checked = 'font-size:20px; color:white; background-color:red',
style_unchecked = 'font-size:20px; color:None; background-color:None',
)).set_width(120)
gt.add(_g.Label(' Counts: ')).set_style('font-size:20px;')
self.number_counts = gt.add(_g.NumberBox(
0, 1, bounds=(0,None), int=True,
)).set_style('font-size:20px').set_width(150)
gt.add(_g.Label(' Time:')).set_style('font-size:20px;')
self.number_time = gt.add(_g.NumberBox(
0, 1, bounds=(0,None), siPrefix=True, suffix='s',
)).set_style('font-size:20px').set_width(150)
self.button_reset = gt.add(_g.Button(
text='Reset', signal_clicked=self._button_reset_clicked,
tip='Reset counts and time.')).set_style('font-size:20px;')
#################################
# Settings
self.tab_settings = ts = self.tabs_settings.add('Settings')
ts.new_autorow()
self.settings = s = ts.add(_g.TreeDictionary(autosettings_path=name+'.settings')).set_width(290)
s.add_parameter('Source-Detector Distance', 0.01, step=0.001,
siPrefix=True, suffix='m', bounds=(1e-3, None),
tip='Distance from the source to the detector.')
s.add_parameter('Acquisition Time', 1.0, dec=True,
siPrefix=True, suffix='s', bounds=(1e-9, None),
tip='How long to acquire data for.')
s.add_parameter('Iterations', 1, dec=True, bounds=(0,None),
tip='How many times to repeat the acquisition. 0 means "keep looping".')
s.add_parameter('Iterations/Completed', 0, readonly=True,
tip='How many acquisitions have been completed.')
s.add_parameter('Iterations/Reset Each Time', True,
tip='Click the reset button at the start of each iteration.')
s.add_parameter('Engine/Rate at 1 mm', 2000.0, bounds=(0, None),
siPrefix=True, suffix='Counts/s',
tip='Average counts per second when positioned at 1 mm.')
s.add_parameter('Engine/Time Resolution', 1e-4,
siPrefix=True, suffix='s', dec=True, bounds=(1e-12,None),
tip='Time resolution of the detector. Should be small enough\n'
+'that only one click happens per time step, but large enough\n'
+'that the random number generator will not bottom out.')
s.add_parameter('Engine/Chunk Size', 0.1,
siPrefix=True, suffix='s', dec=True, bounds=(1e-10,None),
tip='How long each chunk should be during acquisition.')
s.add_parameter('Engine/Simulate Delay', True,
tip='Whether to pause appropriately during acquisition.')
###################################
# Plots
self.tab_raw = tr = self.tabs_data.add('Raw Data')
self.plot_raw = tr.add(_g.DataboxPlot('*.raw', autosettings_path=name+'.plot_raw'), alignment=0)
self.tab_log = tl = self.tabs_data.add('Logger')
self.plot_log = tl.add(_g.DataboxPlot('*.log', autosettings_path=name+'.plot_log', show_logger=True), alignment=0)
###################################
# Start the show!
self.window.show(block)
def _button_reset_clicked(self, *a):
"""
Reset the time and counts.
"""
self.number_counts(0)
self.number_time(0)
self.plot_raw.clear()
def _button_acquire_toggled(self, *a):
"""
Someone toggled "Acquire".
"""
# Let the loop finish itself
if not self.button_acquire.is_checked(): return
# Shortcut
s = self.settings
# Loop
s['Iterations/Completed'] = 0
while self.button_acquire.is_checked() \
and (s['Iterations/Completed'] < s['Iterations'] or s['Iterations'] <= 0):
# Get a data set
self.acquire_data()
s['Iterations/Completed'] += 1
self.window.process_events()
# Uncheck it.
self.button_acquire.set_checked(False)
def acquire_data(self):
"""
Acquires data and processes / plots it, as per the shown settings.
"""
# Shortcuts
s = self.settings
d = self.plot_raw
l = self.plot_log
# Get the mean rate using naive 1/r^2 fall off
rate = s['Engine/Rate at 1 mm'] * (1e-3 / s['Source-Detector Distance'])**2
dt = s['Engine/Time Resolution']
DT = s['Engine/Chunk Size']
# Get the probability per time step of a tick
p = rate*dt
# Remaining time to count down
N = int(_n.round(s['Acquisition Time']/dt)) # Total number of steps
n = min(int(_n.ceil(DT/dt)), 100000)
# If we're supposed to
if s['Iterations/Reset Each Time']: self.button_reset.click()
# Acquire in chunks until it's done.
t0 = _t.time()
while N > 0 and self.button_acquire.is_checked():
# Get the last time
if 't' in d.ckeys: t_start = d['t'][-1]+dt
else: t_start = dt
# Clear the data
d.clear()
s.send_to_databox_header(d)
# Generate the time data
d['t'] = _n.linspace(t_start,t_start+(n-1)*dt,n)
d['Count'] = _n.zeros(n)
# Now get the time bins with a click
d['Count'][_n.random.rand(n)<p] = 1
d.plot()
# Update the master numbers
self.number_counts.increment(len(_n.where(d['Count'] == 1)[0]))
self.number_time .increment(n*dt)
# Update remaining time
N -= n
# Update GUI, then wait for the chunk time minus processing time
self.window.process_events()
if s['Engine/Simulate Delay']: self.window.sleep(DT - (_t.time()-t0), 0.005)
# Update t0
t0 = _t.time()
# All done! Send this info to the logger if we didn't cancel
if self.button_acquire():
# If we don't have a "last" run number, set it to 0 (incremented later)
if not 'Run #' in l.ckeys: i = 0
# Otherwise, use the actual last run number
else: i = l['Run #'][-1]
# Append the data to the logger.
l.append_row(
[i+1, s['Source-Detector Distance'], s['Acquisition Time'], self.number_time(), self.number_counts()],
['Run #', 'Distance (m)', 'Acquisition (s)', 'Total (s)', 'Counts'])
l.plot()
if __name__ == '__main__':
self = fitting_statistics_demo() | PypiClean |
/NREL_disco-0.4.2-py3-none-any.whl/disco/cli/summarize_hosting_capacity.py |
import json
import logging
import re
import shutil
import sys
import time
from pathlib import Path
from tempfile import NamedTemporaryFile
import click
import chevron
from jade.loggers import setup_logging
from jade.utils.subprocess_manager import check_run_command
from jade.utils.utils import get_cli_string, load_data, dump_data
import disco
logger = logging.getLogger("summarize_hc_metrics")
DISCO = Path(disco.__path__[0]).parent
TEMPLATE_FILE = DISCO / "disco" / "postprocess" / "query.mustache"
HOSTING_CAPACITY_THRESHOLDS = DISCO / "disco" / "postprocess" / "config" / "hc_thresholds.toml"
SQLITE3_CMD_TEMPLATE = """
.read {{query_file}}
.headers on
.mode csv
.echo on
.output {{hc_summary_file}}
select * from hc_summary;
.output {{hc_by_sample_file}}
select * from hc_by_sample_kw;
.output {{bad_feeder_file}}
select * from bad_feeders order by feeder;
.output {{bad_feeder_pct_thresholds_file}}
select * from bad_feeders_pct_threshold order by feeder;
.output {{bad_feeder_violations_count_file}}
select * from bad_feeders_violation_count_overall;
"""
def _check_task_pattern(_, __, val):
if val is None:
return val
if not re.search(r"^[\w\% ]+$", val):
logger.error("Task pattern can only contain alphanumeric characters, spaces, and '%'.")
sys.exit(1)
return val
@click.command()
@click.option(
"-d",
"--database",
type=click.Path(exists=True),
required=True,
help="Path to simulation results database file",
)
@click.option(
"-s",
"--scenario",
type=click.Choice(["pf1", "control_mode", "derms"], case_sensitive=False),
required=True,
help="Scenario name",
)
@click.option(
"-T",
"--task-names",
multiple=True,
help="Query data with these task names in the database.",
)
@click.option(
"-t",
"--task-pattern",
type=str,
help="Pattern to match one or more tasks in the database with SQL LIKE. Can only contain "
"letters, numbers, spaces, underscores, and %. Example: '%Time Series%",
callback=_check_task_pattern,
)
@click.option(
"--hc-thresholds",
type=click.Path(),
default=HOSTING_CAPACITY_THRESHOLDS,
show_default=True,
help="File containing thresholds for filtering metrics",
)
@click.option(
"--thermal/--no-thermal",
is_flag=True,
default=True,
show_default=True,
help="Check for thermal violations",
)
@click.option(
"--voltage/--no-voltage",
is_flag=True,
default=True,
show_default=True,
help="Check for voltage violations",
)
@click.option(
"--secondaries/--no-secondaries",
is_flag=True,
default=False,
show_default=True,
help="Include secondary nodes in voltage checks.",
)
@click.option(
"-o",
"--output-directory",
default="hc_reports",
show_default=True,
help="Create report files in this directory. Must not already exist.",
callback=lambda _, __, x: Path(x),
)
@click.option(
"-f",
"--force",
is_flag=True,
default=False,
show_default=True,
help="Overwrite any pre-existing output files.",
)
@click.option(
"--verbose", is_flag=True, default=False, show_default=True, help="Enable verbose logging"
)
def summarize_hosting_capacity(
database,
scenario,
task_names,
task_pattern,
hc_thresholds,
thermal,
voltage,
secondaries,
output_directory,
force,
verbose,
):
"""Create summary files for hosting capacity results."""
if output_directory.exists():
if force:
shutil.rmtree(output_directory)
else:
print(f"{output_directory} already exists. Set --force to overwrite.", file=sys.stderr)
sys.exit(1)
output_directory.mkdir()
level = logging.DEBUG if verbose else logging.INFO
log_file = output_directory / "summarize_hc_metrics.log"
setup_logging("summarize_hc_metrics", log_file, console_level=level, file_level=level, packages=["disco"])
logger.info(get_cli_string())
if task_pattern is None and not task_names:
logger.error("One of --task-names or --tast-pattern must be passed")
sys.exit(1)
if task_pattern is not None and task_names:
logger.error("Only one of --task-names and --tast-pattern can be passed")
sys.exit(1)
hc_summary_filename = output_directory / "hc_summary.csv"
hc_by_sample_filename = output_directory / "hc_by_sample.csv"
bad_feeder_filename = output_directory / "feeders_fail_base_case.csv"
bad_feeder_pct_thresholds_filename = (
output_directory / "feeders_pct_thresholds_fail_base_case.csv"
)
bad_feeder_violations_count_filename = (
output_directory / "feeders_fail_base_case_threshold_violation_counts.csv"
)
defaults = load_data(hc_thresholds)
options = {"scenario": scenario}
if task_names:
options["task_names"] = " OR ".join((f"task.name = '{x}'" for x in task_names))
if task_pattern is not None:
options["task_pattern"] = task_pattern
if thermal:
options["thermal"] = defaults["thermal"]
if voltage:
options["voltage"] = defaults["voltage"]
options["voltage"]["secondaries"] = secondaries
with open(TEMPLATE_FILE, "r") as f_in:
query = chevron.render(f_in, options)
with NamedTemporaryFile(mode="w") as f_query:
f_query.write(query)
f_query.flush()
out_file = output_directory / "query.sql"
shutil.copyfile(f_query.name, out_file)
with NamedTemporaryFile(mode="w") as f_sqlite3_cmd:
query_options = {
"query_file": f_query.name,
"hc_summary_file": hc_summary_filename,
"hc_by_sample_file": hc_by_sample_filename,
"bad_feeder_file": bad_feeder_filename,
"bad_feeder_pct_thresholds_file": bad_feeder_pct_thresholds_filename,
"bad_feeder_violations_count_file": bad_feeder_violations_count_filename,
}
f_sqlite3_cmd.write(chevron.render(SQLITE3_CMD_TEMPLATE, query_options))
f_sqlite3_cmd.write("\n")
f_sqlite3_cmd.flush()
logger.info(
"Running SQL queries on %s with thresholds\n%s",
database,
json.dumps(options, indent=4),
)
dump_data(options, output_directory / "thresholds.json", indent=True)
start = time.time()
cmd = f"sqlite3 -init {f_sqlite3_cmd.name} {database} .exit"
check_run_command(cmd)
logger.info("Queries complete. Duration = %.2f seconds", time.time() - start)
for filename in output_directory.iterdir():
logger.info("Created output file %s", filename) | PypiClean |
/INGInious-0.8.7.tar.gz/INGInious-0.8.7/inginious/frontend/static/js/codemirror/mode/ntriples/ntriples.js | /*
The following expression defines the defined ASF grammar transitions.
pre_subject ->
{
( writing_subject_uri | writing_bnode_uri )
-> pre_predicate
-> writing_predicate_uri
-> pre_object
-> writing_object_uri | writing_object_bnode |
(
writing_object_literal
-> writing_literal_lang | writing_literal_type
)
-> post_object
-> BEGIN
} otherwise {
-> ERROR
}
*/
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
CodeMirror.defineMode("ntriples", function() {
var Location = {
PRE_SUBJECT : 0,
WRITING_SUB_URI : 1,
WRITING_BNODE_URI : 2,
PRE_PRED : 3,
WRITING_PRED_URI : 4,
PRE_OBJ : 5,
WRITING_OBJ_URI : 6,
WRITING_OBJ_BNODE : 7,
WRITING_OBJ_LITERAL : 8,
WRITING_LIT_LANG : 9,
WRITING_LIT_TYPE : 10,
POST_OBJ : 11,
ERROR : 12
};
function transitState(currState, c) {
var currLocation = currState.location;
var ret;
// Opening.
if (currLocation == Location.PRE_SUBJECT && c == '<') ret = Location.WRITING_SUB_URI;
else if(currLocation == Location.PRE_SUBJECT && c == '_') ret = Location.WRITING_BNODE_URI;
else if(currLocation == Location.PRE_PRED && c == '<') ret = Location.WRITING_PRED_URI;
else if(currLocation == Location.PRE_OBJ && c == '<') ret = Location.WRITING_OBJ_URI;
else if(currLocation == Location.PRE_OBJ && c == '_') ret = Location.WRITING_OBJ_BNODE;
else if(currLocation == Location.PRE_OBJ && c == '"') ret = Location.WRITING_OBJ_LITERAL;
// Closing.
else if(currLocation == Location.WRITING_SUB_URI && c == '>') ret = Location.PRE_PRED;
else if(currLocation == Location.WRITING_BNODE_URI && c == ' ') ret = Location.PRE_PRED;
else if(currLocation == Location.WRITING_PRED_URI && c == '>') ret = Location.PRE_OBJ;
else if(currLocation == Location.WRITING_OBJ_URI && c == '>') ret = Location.POST_OBJ;
else if(currLocation == Location.WRITING_OBJ_BNODE && c == ' ') ret = Location.POST_OBJ;
else if(currLocation == Location.WRITING_OBJ_LITERAL && c == '"') ret = Location.POST_OBJ;
else if(currLocation == Location.WRITING_LIT_LANG && c == ' ') ret = Location.POST_OBJ;
else if(currLocation == Location.WRITING_LIT_TYPE && c == '>') ret = Location.POST_OBJ;
// Closing typed and language literal.
else if(currLocation == Location.WRITING_OBJ_LITERAL && c == '@') ret = Location.WRITING_LIT_LANG;
else if(currLocation == Location.WRITING_OBJ_LITERAL && c == '^') ret = Location.WRITING_LIT_TYPE;
// Spaces.
else if( c == ' ' &&
(
currLocation == Location.PRE_SUBJECT ||
currLocation == Location.PRE_PRED ||
currLocation == Location.PRE_OBJ ||
currLocation == Location.POST_OBJ
)
) ret = currLocation;
// Reset.
else if(currLocation == Location.POST_OBJ && c == '.') ret = Location.PRE_SUBJECT;
// Error
else ret = Location.ERROR;
currState.location=ret;
}
return {
startState: function() {
return {
location : Location.PRE_SUBJECT,
uris : [],
anchors : [],
bnodes : [],
langs : [],
types : []
};
},
token: function(stream, state) {
var ch = stream.next();
if(ch == '<') {
transitState(state, ch);
var parsedURI = '';
stream.eatWhile( function(c) { if( c != '#' && c != '>' ) { parsedURI += c; return true; } return false;} );
state.uris.push(parsedURI);
if( stream.match('#', false) ) return 'variable';
stream.next();
transitState(state, '>');
return 'variable';
}
if(ch == '#') {
var parsedAnchor = '';
stream.eatWhile(function(c) { if(c != '>' && c != ' ') { parsedAnchor+= c; return true; } return false;});
state.anchors.push(parsedAnchor);
return 'variable-2';
}
if(ch == '>') {
transitState(state, '>');
return 'variable';
}
if(ch == '_') {
transitState(state, ch);
var parsedBNode = '';
stream.eatWhile(function(c) { if( c != ' ' ) { parsedBNode += c; return true; } return false;});
state.bnodes.push(parsedBNode);
stream.next();
transitState(state, ' ');
return 'builtin';
}
if(ch == '"') {
transitState(state, ch);
stream.eatWhile( function(c) { return c != '"'; } );
stream.next();
if( stream.peek() != '@' && stream.peek() != '^' ) {
transitState(state, '"');
}
return 'string';
}
if( ch == '@' ) {
transitState(state, '@');
var parsedLang = '';
stream.eatWhile(function(c) { if( c != ' ' ) { parsedLang += c; return true; } return false;});
state.langs.push(parsedLang);
stream.next();
transitState(state, ' ');
return 'string-2';
}
if( ch == '^' ) {
stream.next();
transitState(state, '^');
var parsedType = '';
stream.eatWhile(function(c) { if( c != '>' ) { parsedType += c; return true; } return false;} );
state.types.push(parsedType);
stream.next();
transitState(state, '>');
return 'variable';
}
if( ch == ' ' ) {
transitState(state, ch);
}
if( ch == '.' ) {
transitState(state, ch);
}
}
};
});
CodeMirror.defineMIME("text/n-triples", "ntriples");
}); | PypiClean |
/Doozer-1.2.0.tar.gz/Doozer-1.2.0/docs/extensions.rst | ==========
Extensions
==========
Extensions provide additional functionality to applications. Configuration
management is shared between applications and extensions in a central location.
Using Extensions
================
.. code::
from doozer import Application
from doozer_sqlite import SQLite
app = Application(__name__)
db = SQLite(app)
db.connection.execute('SELECT 1;')
Developing Extensions
=====================
Doozer provides an :class:`~doozer.extensions.Extension` base class to make
extension development easier.
.. code::
from doozer import Extension
class SQLite(Extension):
DEFAULT_SETTINGS = {'SQLITE_CONNECTION_STRING': ':memory:'}
def __init__(self, app=None):
self._connection = None
super().__init__(app)
@property
def connection(self):
if not self._connection:
conn_string = self.app.settings['SQLITE_CONNECTION_STRING']
self._connection = sqlite3.connect(conn_string)
return self._connection
The :class:`~doozer.extensions.Extension` class provides two special attributes
that are meant to be overridden:
* :attr:`~doozer.extensions.Extension.DEFAULT_SETTINGS` provides default values
for an extension's settings during the
:meth:`~doozer.extensions.Extension.init_app` step. When a value is used by
an extension and has a sensible default, it should be stored here (e.g., a
database hostname).
* :attr:`~doozer.extensions.Extension.REQUIRED_SETTINGS` provides a list of
keys that are checked for existence during the
:meth:`~doozer.extensions.Extension.init_app` step. If one or more required
settings are not set on the application instance assigned to the extension, a
``KeyError`` is raised. Extensions should set this when a value is required
but has no default (e.g., a database password).
.. _extending-the-cli:
Extending the Command Line
==========================
Doozer offers an extensible command line interface. To register your own
commands, use :func:`~doozer.cli.register_commands`. Any function passed to it
will have its usage created directly from its signature. During the course of
initializing the application for use with the extension (i.e.,
:meth:`~doozer.extensions.Extension.init_app`), Doozer will check for a method
on the extension's instance named ``register_cli`` and call it. If you place
any calls to :func:`~doozer.cli.register_commands` inside it, the command line
interface will be extended automatically.
In order to access the new commands, the ``doozer`` command line utility must
be given a reference to an :class:`~doozer.base.Application`. This is done
through the ``--app`` argument:
.. code::
$ doozer --app APP_PATH
.. note::
For details about the syntax to use when passing a reference to an
:class:`~doozer.base.Application`, see :ref:`running-applications`.
A positional argument in the Python function will result in a required
positional argument in the command::
def trash(heap):
pass
.. code:: sh
$ doozer --app APP_PATH NAMESPACE trash HEAP
A keyword argument in the Python function will result in a positional argument
in the command with a default value to be used when the argument is omitted::
def trash(heap='marjory'):
pass
.. code:: sh
$ doozer --app APP_PATH NAMESPACE trash [HEAP]
A keyword-only argument in the Python function will result in an optional
argument in the command::
def trash(*, heap='marjory'):
pass
.. code:: sh
$ doozer --app APP_PATH NAMESPACE trash [--heap HEAP]
By default, all optional arguments will have a flag that matches the function
argument's name. When no other optional arguments start with the same
character, a single-character abbreviated flag can also be used.
.. code:: sh
$ doozer --app APP_PATH NAMESPACE trash [-g HEAP]
The ``trash`` function can then be registered with the CLI::
register_commands('fraggle', [trash])
.. code:: sh
$ doozer --app APP_PATH fraggle trash --help
Additionally, if a command includes a ``quiet`` or ``verbose`` argument, it
will automatically receive the count of the number of times it was specified
(e.g., ``-v`` will have the value ``1``, ``-vv`` will have the value ``2``).
When both arguments are included, they will be added as a mutually exclusive
group.
.. note::
Due to how :meth:`argparse <python:argparse.ArgumentParser.add_argument>`
handles argument counts, ``quiet`` and ``verbose`` will be set to ``None``
rather than ``0`` when the flag isn't specified when the command is
invoked.
.. code:: sh
$ doozer --app APP_PATH fraggle trash -vvvv
$ doozer --app APP_PATH fraggle trash --quiet
Available Extensions
====================
Several extensions are available for use:
* `Henson-AMQP <https://henson-amqp.readthedocs.io>`_
* `Henson-Database <https://henson-database.readthedocs.io>`_
* `Henson-Logging <https://henson-logging.readthedocs.io>`_
| PypiClean |
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/drawing/stencil/Text.js.uncompressed.js | define("dojox/drawing/stencil/Text", ["dijit","dojo","dojox"], function(dijit,dojo,dojox){
dojo.provide("dojox.drawing.stencil.Text");
dojox.drawing.stencil.Text = dojox.drawing.util.oo.declare(
// summary:
// Creates a dojox.gfx Text (SVG or VML) based on data provided.
// description:
// There are two text classes. TextBlock extends this one and
// adds editable functionality, discovers text width etc.
// This class displays text only. There is no line wrapping.
// Multiple lines can be acheived by inserting \n linebreaks
// in the text.
//
dojox.drawing.stencil._Base,
function(options){
// summary:
// constructor.
},
{
type:"dojox.drawing.stencil.Text",
anchorType:"none",
baseRender:true,
// align: String
// Text horizontal alignment.
// Options: start, middle, end
align:"start",
//
// valign:String
// Text vertical alignment
// Options: top, middle, bottom (FIXME: bottom not supported)
valign:"top",
//
// _lineHeight: [readonly] Number
// The height of each line of text. Based on style information
// and font size.
_lineHeight:1,
/*=====
StencilData: {
// summary:
// The data used to create the dojox.gfx Text
// x: Number
// Left point x
// y: Number
// Top point y
// width: ? Number
// Optional width of Text. Not required but reccommended.
// for auto-sizing, use TextBlock
// height: ? Number
// Optional height of Text. If not provided, _lineHeight is used.
// text: String
// The string content. If not provided, may auto-delete depending on defaults.
},
StencilPoints: [
// summary:
// An Array of dojox.__StencilPoint objects that describe the Stencil
// 0: Object
// Top left point
// 1: Object
// Top right point
// 2: Object
// Bottom right point
// 3: Object
// Bottom left point
],
=====*/
typesetter: function(text){
// summary:
// Register raw text, returning typeset form.
// Uses function dojox.drawing.stencil.Text.typeset
// for typesetting, if it exists.
//
if(dojox.drawing.util.typeset){
this._rawText = text;
return dojox.drawing.util.typeset.convertLaTeX(text);
}
return text;
},
setText: function(text){
// summary:
// Setter for text.
//
// Only apply typesetting to objects that the user can modify.
// Else, it is assumed that typesetting is done elsewhere.
if(this.enabled){
text = this.typesetter(text);
}
// This only has an effect if text is null or this.created is false.
this._text = text;
this._textArray = [];
this.created && this.render(text);
},
getText: function(){
// summary:
// Getter for text.
//
return this._rawText || this._text;
},
dataToPoints: function(/*Object*/o){
//summary:
// Converts data to points.
o = o || this.data;
var w = o.width =="auto" ? 1 : o.width;
var h = o.height || this._lineHeight;
this.points = [
{x:o.x, y:o.y}, // TL
{x:o.x + w, y:o.y}, // TR
{x:o.x + w, y:o.y + h}, // BR
{x:o.x, y:o.y + h} // BL
];
return this.points;
},
pointsToData: function(/*Array*/p){
// summary:
// Converts points to data
p = p || this.points;
var s = p[0];
var e = p[2];
this.data = {
x: s.x,
y: s.y,
width: e.x-s.x,
height: e.y-s.y
};
return this.data;
},
render: function(/* String*/text){
// summary:
// Renders the 'hit' object (the shape used for an expanded
// hit area and for highlighting) and the'shape' (the actual
// display object). Text is slightly different than other
// implementations. Instead of calling render twice, it calls
// _createHilite for the 'hit'
// arguments:
// text String
// Changes text if sent. Be sure to use the setText and
// not to call this directly.
//
this.remove(this.shape, this.hit);
//console.log("text render, outline:", !this.annotation, this.renderHit, (!this.annotation && this.renderHit))
!this.annotation && this.renderHit && this._renderOutline();
if(text!=undefined){
this._text = text;
this._textArray = this._text.split("\n");
}
var d = this.pointsToData();
var h = this._lineHeight;
var x = d.x + this.style.text.pad*2;
var y = d.y + this._lineHeight - (this.textSize*.4);
if(this.valign=="middle"){
y -= h/2;
}
this.shape = this.container.createGroup();
/*console.log(" render ", this.type, this.id)
console.log(" render Y:", d.y, "textSize:", this.textSize, "LH:", this._lineHeight)
console.log(" render text:", y, " ... ", this._text, "enabled:", this.enabled);
console.log(" render text:", this.style.currentText);
*/
dojo.forEach(this._textArray, function(txt, i){
var tb = this.shape.createText({x: x, y: y+(h*i), text: unescape(txt), align: this.align})
.setFont(this.style.currentText)
.setFill(this.style.currentText.color);
this._setNodeAtts(tb);
}, this);
this._setNodeAtts(this.shape);
},
_renderOutline: function(){
// summary:
// Create the hit and highlight area
// for the Text.
//
if(this.annotation){ return; }
var d = this.pointsToData();
if(this.align=="middle"){
d.x -= d.width/2 - this.style.text.pad * 2;
}else if(this.align=="start"){
d.x += this.style.text.pad;
}else if(this.align=="end"){
d.x -= d.width - this.style.text.pad * 3;
}
if(this.valign=="middle"){
d.y -= (this._lineHeight )/2 - this.style.text.pad;
}
this.hit = this.container.createRect(d)
.setStroke(this.style.currentHit)
.setFill(this.style.currentHit.fill);
//.setFill("#ffff00");
this._setNodeAtts(this.hit);
this.hit.moveToBack();
},
makeFit: function(text, w){
var span = dojo.create('span', {innerHTML:text, id:"foo"}, document.body);
var sz = 1;
dojo.style(span, "fontSize", sz+"px");
var cnt = 30;
while(dojo.marginBox(span).w<w){
sz++;
dojo.style(span, "fontSize", sz+"px");
if(cnt--<=0) break;
}
sz--;
var box = dojo.marginBox(span);
dojo.destroy(span);
return {size:sz, box:box};
}
}
);
dojox.drawing.register({
name:"dojox.drawing.stencil.Text"
}, "stencil");
}); | PypiClean |
/123_object_detection-0.1.tar.gz/123_object_detection-0.1/object_detection/utils/np_box_mask_list_ops.py | from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from six.moves import range
from object_detection.utils import np_box_list_ops
from object_detection.utils import np_box_mask_list
from object_detection.utils import np_mask_ops
def box_list_to_box_mask_list(boxlist):
"""Converts a BoxList containing 'masks' into a BoxMaskList.
Args:
boxlist: An np_box_list.BoxList object.
Returns:
An np_box_mask_list.BoxMaskList object.
Raises:
ValueError: If boxlist does not contain `masks` as a field.
"""
if not boxlist.has_field('masks'):
raise ValueError('boxlist does not contain mask field.')
box_mask_list = np_box_mask_list.BoxMaskList(
box_data=boxlist.get(),
mask_data=boxlist.get_field('masks'))
extra_fields = boxlist.get_extra_fields()
for key in extra_fields:
if key != 'masks':
box_mask_list.data[key] = boxlist.get_field(key)
return box_mask_list
def area(box_mask_list):
"""Computes area of masks.
Args:
box_mask_list: np_box_mask_list.BoxMaskList holding N boxes and masks
Returns:
a numpy array with shape [N*1] representing mask areas
"""
return np_mask_ops.area(box_mask_list.get_masks())
def intersection(box_mask_list1, box_mask_list2):
"""Compute pairwise intersection areas between masks.
Args:
box_mask_list1: BoxMaskList holding N boxes and masks
box_mask_list2: BoxMaskList holding M boxes and masks
Returns:
a numpy array with shape [N*M] representing pairwise intersection area
"""
return np_mask_ops.intersection(box_mask_list1.get_masks(),
box_mask_list2.get_masks())
def iou(box_mask_list1, box_mask_list2):
"""Computes pairwise intersection-over-union between box and mask collections.
Args:
box_mask_list1: BoxMaskList holding N boxes and masks
box_mask_list2: BoxMaskList holding M boxes and masks
Returns:
a numpy array with shape [N, M] representing pairwise iou scores.
"""
return np_mask_ops.iou(box_mask_list1.get_masks(),
box_mask_list2.get_masks())
def ioa(box_mask_list1, box_mask_list2):
"""Computes pairwise intersection-over-area between box and mask collections.
Intersection-over-area (ioa) between two masks mask1 and mask2 is defined as
their intersection area over mask2's area. Note that ioa is not symmetric,
that is, IOA(mask1, mask2) != IOA(mask2, mask1).
Args:
box_mask_list1: np_box_mask_list.BoxMaskList holding N boxes and masks
box_mask_list2: np_box_mask_list.BoxMaskList holding M boxes and masks
Returns:
a numpy array with shape [N, M] representing pairwise ioa scores.
"""
return np_mask_ops.ioa(box_mask_list1.get_masks(), box_mask_list2.get_masks())
def gather(box_mask_list, indices, fields=None):
"""Gather boxes from np_box_mask_list.BoxMaskList according to indices.
By default, gather returns boxes corresponding to the input index list, as
well as all additional fields stored in the box_mask_list (indexing into the
first dimension). However one can optionally only gather from a
subset of fields.
Args:
box_mask_list: np_box_mask_list.BoxMaskList holding N boxes
indices: a 1-d numpy array of type int_
fields: (optional) list of fields to also gather from. If None (default),
all fields are gathered from. Pass an empty fields list to only gather
the box coordinates.
Returns:
subbox_mask_list: a np_box_mask_list.BoxMaskList corresponding to the subset
of the input box_mask_list specified by indices
Raises:
ValueError: if specified field is not contained in box_mask_list or if the
indices are not of type int_
"""
if fields is not None:
if 'masks' not in fields:
fields.append('masks')
return box_list_to_box_mask_list(
np_box_list_ops.gather(
boxlist=box_mask_list, indices=indices, fields=fields))
def sort_by_field(box_mask_list, field,
order=np_box_list_ops.SortOrder.DESCEND):
"""Sort boxes and associated fields according to a scalar field.
A common use case is reordering the boxes according to descending scores.
Args:
box_mask_list: BoxMaskList holding N boxes.
field: A BoxMaskList field for sorting and reordering the BoxMaskList.
order: (Optional) 'descend' or 'ascend'. Default is descend.
Returns:
sorted_box_mask_list: A sorted BoxMaskList with the field in the specified
order.
"""
return box_list_to_box_mask_list(
np_box_list_ops.sort_by_field(
boxlist=box_mask_list, field=field, order=order))
def non_max_suppression(box_mask_list,
max_output_size=10000,
iou_threshold=1.0,
score_threshold=-10.0):
"""Non maximum suppression.
This op greedily selects a subset of detection bounding boxes, pruning
away boxes that have high IOU (intersection over union) overlap (> thresh)
with already selected boxes. In each iteration, the detected bounding box with
highest score in the available pool is selected.
Args:
box_mask_list: np_box_mask_list.BoxMaskList holding N boxes. Must contain
a 'scores' field representing detection scores. All scores belong to the
same class.
max_output_size: maximum number of retained boxes
iou_threshold: intersection over union threshold.
score_threshold: minimum score threshold. Remove the boxes with scores
less than this value. Default value is set to -10. A very
low threshold to pass pretty much all the boxes, unless
the user sets a different score threshold.
Returns:
an np_box_mask_list.BoxMaskList holding M boxes where M <= max_output_size
Raises:
ValueError: if 'scores' field does not exist
ValueError: if threshold is not in [0, 1]
ValueError: if max_output_size < 0
"""
if not box_mask_list.has_field('scores'):
raise ValueError('Field scores does not exist')
if iou_threshold < 0. or iou_threshold > 1.0:
raise ValueError('IOU threshold must be in [0, 1]')
if max_output_size < 0:
raise ValueError('max_output_size must be bigger than 0.')
box_mask_list = filter_scores_greater_than(box_mask_list, score_threshold)
if box_mask_list.num_boxes() == 0:
return box_mask_list
box_mask_list = sort_by_field(box_mask_list, 'scores')
# Prevent further computation if NMS is disabled.
if iou_threshold == 1.0:
if box_mask_list.num_boxes() > max_output_size:
selected_indices = np.arange(max_output_size)
return gather(box_mask_list, selected_indices)
else:
return box_mask_list
masks = box_mask_list.get_masks()
num_masks = box_mask_list.num_boxes()
# is_index_valid is True only for all remaining valid boxes,
is_index_valid = np.full(num_masks, 1, dtype=bool)
selected_indices = []
num_output = 0
for i in range(num_masks):
if num_output < max_output_size:
if is_index_valid[i]:
num_output += 1
selected_indices.append(i)
is_index_valid[i] = False
valid_indices = np.where(is_index_valid)[0]
if valid_indices.size == 0:
break
intersect_over_union = np_mask_ops.iou(
np.expand_dims(masks[i], axis=0), masks[valid_indices])
intersect_over_union = np.squeeze(intersect_over_union, axis=0)
is_index_valid[valid_indices] = np.logical_and(
is_index_valid[valid_indices],
intersect_over_union <= iou_threshold)
return gather(box_mask_list, np.array(selected_indices))
def multi_class_non_max_suppression(box_mask_list, score_thresh, iou_thresh,
max_output_size):
"""Multi-class version of non maximum suppression.
This op greedily selects a subset of detection bounding boxes, pruning
away boxes that have high IOU (intersection over union) overlap (> thresh)
with already selected boxes. It operates independently for each class for
which scores are provided (via the scores field of the input box_list),
pruning boxes with score less than a provided threshold prior to
applying NMS.
Args:
box_mask_list: np_box_mask_list.BoxMaskList holding N boxes. Must contain a
'scores' field representing detection scores. This scores field is a
tensor that can be 1 dimensional (in the case of a single class) or
2-dimensional, in which case we assume that it takes the
shape [num_boxes, num_classes]. We further assume that this rank is known
statically and that scores.shape[1] is also known (i.e., the number of
classes is fixed and known at graph construction time).
score_thresh: scalar threshold for score (low scoring boxes are removed).
iou_thresh: scalar threshold for IOU (boxes that that high IOU overlap
with previously selected boxes are removed).
max_output_size: maximum number of retained boxes per class.
Returns:
a box_mask_list holding M boxes with a rank-1 scores field representing
corresponding scores for each box with scores sorted in decreasing order
and a rank-1 classes field representing a class label for each box.
Raises:
ValueError: if iou_thresh is not in [0, 1] or if input box_mask_list does
not have a valid scores field.
"""
if not 0 <= iou_thresh <= 1.0:
raise ValueError('thresh must be between 0 and 1')
if not isinstance(box_mask_list, np_box_mask_list.BoxMaskList):
raise ValueError('box_mask_list must be a box_mask_list')
if not box_mask_list.has_field('scores'):
raise ValueError('input box_mask_list must have \'scores\' field')
scores = box_mask_list.get_field('scores')
if len(scores.shape) == 1:
scores = np.reshape(scores, [-1, 1])
elif len(scores.shape) == 2:
if scores.shape[1] is None:
raise ValueError('scores field must have statically defined second '
'dimension')
else:
raise ValueError('scores field must be of rank 1 or 2')
num_boxes = box_mask_list.num_boxes()
num_scores = scores.shape[0]
num_classes = scores.shape[1]
if num_boxes != num_scores:
raise ValueError('Incorrect scores field length: actual vs expected.')
selected_boxes_list = []
for class_idx in range(num_classes):
box_mask_list_and_class_scores = np_box_mask_list.BoxMaskList(
box_data=box_mask_list.get(),
mask_data=box_mask_list.get_masks())
class_scores = np.reshape(scores[0:num_scores, class_idx], [-1])
box_mask_list_and_class_scores.add_field('scores', class_scores)
box_mask_list_filt = filter_scores_greater_than(
box_mask_list_and_class_scores, score_thresh)
nms_result = non_max_suppression(
box_mask_list_filt,
max_output_size=max_output_size,
iou_threshold=iou_thresh,
score_threshold=score_thresh)
nms_result.add_field(
'classes',
np.zeros_like(nms_result.get_field('scores')) + class_idx)
selected_boxes_list.append(nms_result)
selected_boxes = np_box_list_ops.concatenate(selected_boxes_list)
sorted_boxes = np_box_list_ops.sort_by_field(selected_boxes, 'scores')
return box_list_to_box_mask_list(boxlist=sorted_boxes)
def prune_non_overlapping_masks(box_mask_list1, box_mask_list2, minoverlap=0.0):
"""Prunes the boxes in list1 that overlap less than thresh with list2.
For each mask in box_mask_list1, we want its IOA to be more than minoverlap
with at least one of the masks in box_mask_list2. If it does not, we remove
it. If the masks are not full size image, we do the pruning based on boxes.
Args:
box_mask_list1: np_box_mask_list.BoxMaskList holding N boxes and masks.
box_mask_list2: np_box_mask_list.BoxMaskList holding M boxes and masks.
minoverlap: Minimum required overlap between boxes, to count them as
overlapping.
Returns:
A pruned box_mask_list with size [N', 4].
"""
intersection_over_area = ioa(box_mask_list2, box_mask_list1) # [M, N] tensor
intersection_over_area = np.amax(intersection_over_area, axis=0) # [N] tensor
keep_bool = np.greater_equal(intersection_over_area, np.array(minoverlap))
keep_inds = np.nonzero(keep_bool)[0]
new_box_mask_list1 = gather(box_mask_list1, keep_inds)
return new_box_mask_list1
def concatenate(box_mask_lists, fields=None):
"""Concatenate list of box_mask_lists.
This op concatenates a list of input box_mask_lists into a larger
box_mask_list. It also
handles concatenation of box_mask_list fields as long as the field tensor
shapes are equal except for the first dimension.
Args:
box_mask_lists: list of np_box_mask_list.BoxMaskList objects
fields: optional list of fields to also concatenate. By default, all
fields from the first BoxMaskList in the list are included in the
concatenation.
Returns:
a box_mask_list with number of boxes equal to
sum([box_mask_list.num_boxes() for box_mask_list in box_mask_list])
Raises:
ValueError: if box_mask_lists is invalid (i.e., is not a list, is empty, or
contains non box_mask_list objects), or if requested fields are not
contained in all box_mask_lists
"""
if fields is not None:
if 'masks' not in fields:
fields.append('masks')
return box_list_to_box_mask_list(
np_box_list_ops.concatenate(boxlists=box_mask_lists, fields=fields))
def filter_scores_greater_than(box_mask_list, thresh):
"""Filter to keep only boxes and masks with score exceeding a given threshold.
This op keeps the collection of boxes and masks whose corresponding scores are
greater than the input threshold.
Args:
box_mask_list: BoxMaskList holding N boxes and masks. Must contain a
'scores' field representing detection scores.
thresh: scalar threshold
Returns:
a BoxMaskList holding M boxes and masks where M <= N
Raises:
ValueError: if box_mask_list not a np_box_mask_list.BoxMaskList object or
if it does not have a scores field
"""
if not isinstance(box_mask_list, np_box_mask_list.BoxMaskList):
raise ValueError('box_mask_list must be a BoxMaskList')
if not box_mask_list.has_field('scores'):
raise ValueError('input box_mask_list must have \'scores\' field')
scores = box_mask_list.get_field('scores')
if len(scores.shape) > 2:
raise ValueError('Scores should have rank 1 or 2')
if len(scores.shape) == 2 and scores.shape[1] != 1:
raise ValueError('Scores should have rank 1 or have shape '
'consistent with [None, 1]')
high_score_indices = np.reshape(np.where(np.greater(scores, thresh)),
[-1]).astype(np.int32)
return gather(box_mask_list, high_score_indices) | PypiClean |
/CAGMon-0.8.5-py3-none-any.whl/cagmon/echo.py | import os
from gwpy.time import tconvert
__author__ = 'Phil Jung <[email protected]>'
#---------------------------------------------------------------------------------------------------------#
def html_head():
html_head = '''<!doctype html>
<html lang="en">
<head>
<meta charset="utf-8"><link rel="stylesheet" href="./css/style.css">
</head>
<body>
<div class="row Head" id="head_row">
<div class="cell" id="head_cell">
<div id="head_title">CAGMon: Correlation Analysis based on Glitch Monitoring
</div>
</div>
</div>
'''
return html_head
def html_configuration(gst, get, coefficients_trend_stride, filter_type, freq1, freq2, main_channel, mic_alpha, mic_c, sample_rate):
options = ''
if filter_type == 'lowpass':
options += ' ,lowpass filter: {}Hz'.format(freq1)
elif filter_type == 'highpass':
options += ', highpass filter: {}Hz'.format(freq1)
elif filter_type == 'bandpass':
options += ', bandpass filter: {}-{}Hz'.format(freq1, freq2)
html_configuration = '''
<div class="row Configuration" id="row">
<div class="cell" id="cell">
<div id="sub_title">Configuration
</div>
<hr id="line"/>
<div id="config_text">
<span>{0} - {1} </span>
<div id="config_text_detail">
<span>Active segment: <a draggable="true" href="./segments/FlagSegment.txt" target="_blank" rel="noopener">txt</a> <a draggable="true" href="./segments/FlagSegment.json" target="_blank" rel="noopener">json</a> </span>
</div>
<div id="config_text_detail">
<span>stride: {2} seconds, sample rate: {8}Hz, data size: {3}{4}</span>
</div>
<div id="config_text_detail">
<span>main channel: {5}</span>
</div>
<div id="config_text_detail">
<span>MICe Alpha: {6}, MICe c: {7}</span>
</div>
</div>
</div>
</div>
'''.format(gst, get, coefficients_trend_stride, int(coefficients_trend_stride*sample_rate), options, main_channel, mic_alpha, mic_c, sample_rate)
return html_configuration
def html_summary(gst, get, coefficients_trend_stride, main_channel, MIC_maxvalues, PCC_maxvalues, Kendall_maxvalues, sorted_MIC_maxvalues):
html_summary = '''
<div class="row Sammury" id="row">
<div class="cell" id="cell">
<div id="sub_title">Summary
</div>
<hr id="line"/>
<div id="slider">
<div class="gjs-lory-frame" id="slider_frame">
<div class="gjs-lory-slides" id="slider_slides">
<div class="gjs-lory-slide" id="slider_slide">
<img id="slide_image" src="{0}"/>
</div>
<div class="gjs-lory-slide" id="slider_slide">
<img id="slide_image" src="{1}"/>
</div>
<div class="gjs-lory-slide" id="slider_slide">
<img id="slide_image" src="{2}"/>
</div>
</div>
</div>
<span class="gjs-lory-prev" id="slider_left_arrow"><svg xmlns="http://www.w3.org/2000/svg" width="50" height="50" viewBox="0 0 501.5 501.5">
<g>
<path fill="#2E435A" d="M302.67 90.877l55.77 55.508L254.575 250.75 358.44 355.116l-55.77 55.506L143.56 250.75z">
</path>
</g>
</svg></span>
<span class="gjs-lory-next" id="slider_right_arrow"><svg xmlns="http://www.w3.org/2000/svg" width="50" height="50" viewBox="0 0 501.5 501.5">
<g>
<path fill="#2E435A" d="M199.33 410.622l-55.77-55.508L247.425 250.75 143.56 146.384l55.77-55.507L358.44 250.75z">
</path>
</g>
</svg></span>
</div>
<div id="summary_legend">*LTMV: lower than the median value
</div>
<hr id="line"/>
<div>
<table id="summary_table">
<tbody>
<tr>
<td id="table_cell_channel">Channel
</td>
<td id="table_cell_values">MICe
</td>
<td id="table_cell_values">MICe median
</td>
<td id="table_cell_values">PCC
</td>
<td id="table_cell_values">PCC median
</td>
<td id="table_cell_values">Kendall
</td>
<td id="table_cell_values">Kendall median
</td>
<td id="table_cell_segment">segment
</td>
</tr>
<tr>
</tr>
</tbody>
</table>
{3}
</div>
<hr id="table_line"/>
</div>
</div>
'''
summary_table = '''
<hr id="table_line"/>
<table id="summary_table">
<tbody>
<tr>
<td id="table_cell_channel">{0}
</td>
<td id="table_cell_values">{1}
</td>
<td id="table_cell_values">{2}
</td>
<td id="table_cell_values">{3}
</td>
<td id="table_cell_values">{4}
</td>
<td id="table_cell_values">{5}
</td>
<td id="table_cell_values">{6}
</td>
<td id="table_cell_segment">{7}
</td>
</tr>
<tr>
</tr>
</tbody>
</table>
'''
tables = ''''''
for max_MIC_info in sorted_MIC_maxvalues:
channel = max_MIC_info[0]
start = max_MIC_info[1]
MIC = round(MIC_maxvalues[channel]['values'][start],2)
MIC_median = round(MIC_maxvalues[channel]['median'],2)
try:
PCC_median = round(PCC_maxvalues[channel]['median'],2)
except KeyError:
PCC_median = 0.
try:
Kendall_median = round(Kendall_maxvalues[channel]['median'],2)
except KeyError:
Kendall_median = 0.
try:
PCC = round(PCC_maxvalues[channel]['values'][start],2)
except KeyError:
PCC = 'LTMV'
try:
Kendall = round(Kendall_maxvalues[channel]['values'][start],2)
except KeyError:
Kendall = 'LTMV'
segment = '{0} - {1}'.format(start, start+coefficients_trend_stride)
table = summary_table.format(channel, MIC, MIC_median, PCC, PCC_median, Kendall, Kendall_median, segment)
tables += table
MIC_coefficient_contribution_plot = './plots/Trend/MICe_Coefficient-Distribution-Trend_{0}-{1}_{2}_{3}.png'.format(int(gst), int(get-gst), main_channel, int(coefficients_trend_stride))
PCC_coefficient_contribution_plot = './plots/Trend/PCC_Coefficient-Distribution-Trend_{0}-{1}_{2}_{3}.png'.format(int(gst), int(get-gst), main_channel, int(coefficients_trend_stride))
Kendall_coefficient_contribution_plot = './plots/Trend/Kendall_Coefficient-Distribution-Trend_{0}-{1}_{2}_{3}.png'.format(int(gst), int(get-gst), main_channel, int(coefficients_trend_stride))
html_summary = html_summary.format(MIC_coefficient_contribution_plot, PCC_coefficient_contribution_plot, Kendall_coefficient_contribution_plot, tables)
return html_summary
def html_details(output_path, gst, get, coefficients_trend_stride, main_channel, MIC_maxvalues, PCC_maxvalues, Kendall_maxvalues, sorted_MIC_maxvalues):
html_details_subtitle = '''
<div class="row Details" id="row">
<div class="cell" id="cell">
<div id="sub_title">Details
</div>
<hr id="line"/>
</div>
</div>
'''
html_details_box ='''
<div class="row Details" id="row_detail">
<div class="cell" id="cell_detail">
<div id="detail_contents">
<span id="detail_text">Datetime: {12}</span>
<div id="detail_text">GPS time: {7}
</div>
<div id="detail_text">Channel: {0}
</div>
<div id="detail_text">MICe: {1}
</div>
<div id="detail_text">PCC: {2}
</div>
<div id="detail_text">Kendall's tau: {3}
</div>
<div id="detail_text">Median of MICe: {4}
</div>
<div id="detail_text">Median of PCC: {5}
</div>
<div id="detail_text">Median of Kendall: {6}
</div>
<div id="detail_text">Other MICe values: <a draggable="true" href="{8}" target="_blank" rel="noopener">txt</a>
</div>
<div id="detail_text">Other PCC values: <a draggable="true" href="{9}" target="_blank" rel="noopener">txt</a>
</div>
<div id="detail_text">Other Kendall values: <a draggable="true" href="{10}" target="_blank" rel="noopener">txt</a>
</div>
</div>
</div>
<div class="cell" id="cell_image">
<div id="slider">
<img id="slide_image" src="{11}"/>
</div>
</div>
</div>
'''
for channel in MIC_maxvalues.keys():
txt_bin = list()
for gps_time in sorted(MIC_maxvalues[channel]['values'].keys()):
value = MIC_maxvalues[channel]['values'][gps_time]
txt_bin.append('{0} {1}'.format(gps_time, value))
f = open('{0}data/MICe_{1}-{2}_{3}.txt'.format(output_path, int(gst), int(get-gst), channel), 'w')
f.write('\n'.join(txt_bin))
f.close()
for channel in PCC_maxvalues.keys():
txt_bin = list()
for gps_time in sorted(PCC_maxvalues[channel]['values'].keys()):
value = PCC_maxvalues[channel]['values'][gps_time]
txt_bin.append('{0} {1}'.format(gps_time, value))
f = open('{0}data/PCC_{1}-{2}_{3}.txt'.format(output_path, int(gst), int(get-gst), channel), 'w')
f.write('\n'.join(txt_bin))
f.close()
for channel in Kendall_maxvalues.keys():
txt_bin = list()
for gps_time in sorted(Kendall_maxvalues[channel]['values'].keys()):
value = Kendall_maxvalues[channel]['values'][gps_time]
txt_bin.append('{0} {1}'.format(gps_time, value))
f = open('{0}data/Kendall_{1}-{2}_{3}.txt'.format(output_path, int(gst), int(get-gst), channel), 'w')
f.write('\n'.join(txt_bin))
f.close()
details = ''''''
for max_MIC_info in sorted_MIC_maxvalues:
channel = max_MIC_info[0]
start = max_MIC_info[1]
MIC = round(MIC_maxvalues[channel]['values'][start],2)
MIC_median = round(MIC_maxvalues[channel]['median'],2)
try:
PCC_median = round(PCC_maxvalues[channel]['median'],2)
except KeyError:
PCC_median = 0.
try:
Kendall_median = round(Kendall_maxvalues[channel]['median'],2)
except KeyError:
Kendall_median = 0.
try:
PCC = round(PCC_maxvalues[channel]['values'][start],2)
except KeyError:
PCC = 'LTMV'
try:
Kendall = round(Kendall_maxvalues[channel]['values'][start],2)
except KeyError:
Kendall = 'LTMV'
datetime = '{0}-{1}'.format(tconvert(start).strftime('%Y-%m-%d %H:%M:%S'), coefficients_trend_stride)
segment = '{0} - {1}'.format(start, start+coefficients_trend_stride)
trend_plot = './plots/Trend/Coefficients-Trend_{0}-{1}_{2}_{3}.png'.format(int(gst), int(get-gst), channel, int(coefficients_trend_stride))
MIC_filelink = './data/MICe_{0}-{1}_{2}.txt'.format(int(gst), int(get-gst), channel)
PCC_filelink = './data/PCC_{0}-{1}_{2}.txt'.format(int(gst), int(get-gst), channel)
Kendall_filelink = './data/Kendall_{0}-{1}_{2}.txt'.format(int(gst), int(get-gst), channel)
box = html_details_box.format(channel, MIC, PCC, Kendall, MIC_median, PCC_median, Kendall_median, segment, MIC_filelink, PCC_filelink, Kendall_filelink, trend_plot, datetime)
details += box
html_details = html_details_subtitle + details
return html_details
def html_foot():
html_foot = '''
<div class="row Foot" id="foot_row">
<div class="cell" id="foot_cell">
<div id="foot_text">Designed by Phil Jung in Korea Gravitational Wave Group (KGWG) </div>
<div id="foot_text">e-mail: [email protected] </div>
</div>
</div>
'''
return html_foot
def html_script():
html_script = '''
<script>var items = document.querySelectorAll('#slider');
for (var i = 0, len = items.length; i < len; i++) {
(function(){
var e=this,t="https://cdnjs.cloudflare.com/ajax/libs/lory.js/2.3.4/lory.min.js",l=["0","false"],s="";
s="true"==s?1:parseInt(s,10);
var a={
slidesToScroll:parseInt("1",10),enableMouseEvents:l.indexOf("")>=0?0:1,infinite:!isNaN(s)&&s,rewind:!(l.indexOf("")>=0),slideSpeed:parseInt("300",10),rewindSpeed:parseInt("600",10),snapBackSpeed:parseInt("200",10),ease:"ease",classNameFrame:"gjs-lory-frame",classNameSlideContainer:"gjs-lory-slides",classNamePrevCtrl:"gjs-lory-prev",classNameNextCtrl:"gjs-lory-next"}
,r=function(){
window.sliderLory=lory(e,a)};
if("undefined"==typeof lory){
var n=document.createElement("script");
n.src=t,n.onload=r,document.head.appendChild(n)}
else r()}
.bind(items[i]))();
}
</script>
</body>
<html>
'''
return html_script
def css_text():
css_text = '''
* {
box-sizing: border-box;
}
body {
margin: 0;
}
.row{
display:flex;
justify-content:flex-start;
align-items:stretch;
flex-wrap:nowrap;
padding:10px;
}
.cell{
min-height:75px;
flex-grow:1;
flex-basis:100%;
}
#head_row{
background-image:linear-gradient(#fdb12a,#fdb12a);
background-repeat:repeat;
background-position:left top;
background-attachment:scroll;
background-size:auto;
margin:0 0 25px 0;
}
#head_cell{
margin:0 10% 0 10%;
min-height:auto;
}
#head_title{
padding:10px;
font-family:Helvetica, sans-serif;
letter-spacing:0;
font-size:30px;
position:relative;
}
#foot_row{
background-image:linear-gradient(#6f6e6c,#6f6e6c);
background-repeat:repeat;
background-position:left top;
background-attachment:scroll;
background-size:auto;
margin:25px 0 0 0;
}
#foot_cell{
margin:0 10% 0 10%;
min-height:auto;
}
#row{
margin:0 10% 0 10%;
}
#cell{
margin:0 0 0 0;
}
#line{
opacity:0.5;
margin:0 0 8px 0;
}
#sub_title{
padding:10px;
font-family:Helvetica, sans-serif;
letter-spacing:0;
font-size:25px;
position:relative;
}
#summary_legend{
padding:2px;
text-align:right;
}
#text{
padding:10px;
font-family:Helvetica, sans-serif;
font-size:18px;
}
#config_text{
padding:15px;
font-family:Helvetica, sans-serif;
font-size:21px;
}
#config_text_detail{
line-height:150%;
}
#cell_detail{
flex-basis:30%;
}
#cell_image{
flex-basis:70%;
}
#row_detail{
margin:0 11% 0.5% 11%;
border:1px solid #cacaca;
border-radius:5px 5px 5px 5px;
}
#summary_table{
width:100%;
}
#table_cell_channel{
width:30%;
font-family:Helvetica, sans-serif;
font-size:18px;
padding:1px 1px 1px 1%;
text-decoration:none;
}
#table_cell_values{
width:8.33%;
font-family:Helvetica, sans-serif;
font-size:18px;
padding:1px 1px 1px 1%;
}
#table_cell_segment{
width:20%;
font-family:Helvetica, sans-serif;
font-size:18px;
padding:1px 1px 1px 1%;
}
#table_line{
opacity:0.2;
margin:0 0 8px 0;
}
#slider{
position:relative;
width:auto;
}
#slider_frame{
width:88%;
margin:0 auto;
position:relative;
overflow:hidden;
white-space:nowrap;
}
#slider_slides{
display:inline-block;
transition-delay:1ms;
}
#slider_slide{
display:inline-block;
position:relative;
color:#fff;
width:100%;
margin-right:0px;
vertical-align:top;
min-height:130px;
white-space:normal;
background-color:rgba(0, 0, 0, 0.1);
background-image:linear-gradient(#ffffff,#ffffff);
background-repeat:repeat;
background-position:left top;
background-attachment:scroll;
background-size:auto;
}
#slide_image{
color:black;
width:100%;
}
#slider_left_arrow{
position:absolute;
display:block;
cursor:pointer;
top:50%;
left:0;
margin-top:-25px;
}
#slider_right_arrow{
position:absolute;
display:block;
cursor:pointer;
top:50%;
right:0;
margin-top:-25px;
}
#detail_contents{
padding:10px;
}
#detail_text{
font-family:Helvetica, sans-serif;
padding:2px;
font-size:18px;
}
#foot_text{
padding:2px;
font-family:Helvetica, sans-serif;
letter-spacing:0;
font-size:15px;
position:relative;
color:#eeeeee;
}
@media (max-width: 768px){
.row{
flex-wrap:wrap;
}
}
'''
return css_text
def make_html(output_path, gst, get, coefficients_trend_stride, filter_type, freq1, freq2, main_channel, mic_alpha, mic_c, sample_rate, MIC_maxvalues, PCC_maxvalues, Kendall_maxvalues, sorted_MIC_maxvalues):
head = html_head()
configuration = html_configuration(gst, get, coefficients_trend_stride, filter_type, freq1, freq2, main_channel, mic_alpha, mic_c, sample_rate)
summary = html_summary(gst, get, coefficients_trend_stride, main_channel, MIC_maxvalues, PCC_maxvalues, Kendall_maxvalues, sorted_MIC_maxvalues)
details = html_details(output_path, gst, get, coefficients_trend_stride, main_channel, MIC_maxvalues, PCC_maxvalues, Kendall_maxvalues, sorted_MIC_maxvalues)
foot = html_foot()
script = html_script()
html = head + configuration + summary + details + foot + script
css = css_text()
with open(output_path + 'index.html', 'w') as html_file:
html_file.write(html)
if not os.path.exists(output_path+'css'):
os.makedirs(output_path+'css')
with open(output_path + 'css/style.css', 'w') as css_file:
css_file.write(css)
print('Saved HTML file') | PypiClean |
/MAVR-0.93.tar.gz/MAVR-0.93/scripts/multiple_alignment/get_specific_positions_from_multiple_alignments.py | __author__ = 'Sergei F. Kliver'
import argparse
from RouToolPa.Routines import MultipleAlignmentRoutines
parser = argparse.ArgumentParser()
parser.add_argument("-i", "--input_dir", action="store", dest="input_dir", required=True,
help="Input directory with alignments")
parser.add_argument("-o", "--output_dir", action="store", dest="output_dir", required=True,
help="Output directory")
parser.add_argument("-r", "--reference_sequence_id", action="store", dest="reference_sequence_id", required=True,
help="Reference sequence id")
parser.add_argument("-p", "--position_file", action="store", dest="position_file", required=True,
help="File with positions to extract")
parser.add_argument("-s", "--alignment_file_suffix", action="store", dest="alignment_file_suffix", default="",
help="Suffix of alignment files. Default: no suffix")
parser.add_argument("-f", "--format", action="store", dest="format", default="fasta",
help="Format of alignments")
parser.add_argument("-g", "--gap_symbol", action="store", dest="gap_symbol", default="-",
help="Gap symbol. Default - '-'")
parser.add_argument("-t", "--type", action="store", dest="type", default="nucleotide",
help="Alignment type. Allowed: nucleotide(default), codon, protein")
parser.add_argument("-l", "--flank_length", action="store", dest="flank_length", default=0, type=int,
help="Flank length. Default: 0, i.e no flanks will be included in the output file")
args = parser.parse_args()
MultipleAlignmentRoutines.get_specific_positions_for_multiple_files(args.input_dir,
args.position_file,
args.reference_sequence_id,
args.output_dir,
alignment_file_suffix=args.alignment_file_suffix,
format=args.format,
gap_symbol=args.gap_symbol,
verbose=True,
alignment_type=args.type,
flank_length=args.flank_length) | PypiClean |
/OASYS1-WONDER-1.0.45.tar.gz/OASYS1-WONDER-1.0.45/orangecontrib/wonder/widgets/structural/ow_phases_gsasii.py |
import sys
from PyQt5.QtWidgets import QMessageBox
from orangewidget import gui as orangegui
from orangecontrib.wonder.widgets.gui.ow_generic_parameter_widget import ParameterBox
from orangecontrib.wonder.widgets.gui.ow_generic_phases_widget import OWGenericWidget, OWGenericPhases
from orangecontrib.wonder.util.gui_utility import gui
from oasys.widgets import congruence
from orangecontrib.wonder.util.fit_utilities import Symmetry
from orangecontrib.wonder.fit.parameters.measured_data.phase_gsasii import GSASIIPhase
from orangecontrib.wonder.fit.functions.gsasii_functions import gsasii_load_reflections
class OWGSASIIPhases(OWGenericPhases):
name = "GSAS-II Phases"
description = "Phases description"
icon = "icons/gsas2.png"
priority = 1.21
def __init__(self):
super().__init__()
def get_max_height(self):
return 650
def get_phase_box_instance(self, index, phase_tab):
return PhaseBox(widget=self,
parent=phase_tab,
index = index,
a = self.a[index],
a_fixed = self.a_fixed[index],
a_has_min = self.a_has_min[index],
a_min = self.a_min[index],
a_has_max = self.a_has_max[index],
a_max = self.a_max[index],
a_function = self.a_function[index],
a_function_value = self.a_function_value[index],
symmetry = self.symmetry[index],
cif_file = self.cif_file[index],
formula = self.formula[index],
intensity_scale_factor = self.intensity_scale_factor[index],
intensity_scale_factor_fixed = self.intensity_scale_factor_fixed[index],
intensity_scale_factor_has_min = self.intensity_scale_factor_has_min[index],
intensity_scale_factor_min = self.intensity_scale_factor_min[index],
intensity_scale_factor_has_max = self.intensity_scale_factor_has_max[index],
intensity_scale_factor_max = self.intensity_scale_factor_max[index],
intensity_scale_factor_function = self.intensity_scale_factor_function[index],
intensity_scale_factor_function_value= self.intensity_scale_factor_function_value[index],
phase_name = self.phase_name[index])
def get_empty_phase_box_instance(self, index, phase_tab):
return PhaseBox(widget=self, parent=phase_tab, index=index)
def check_congruence(self):
pass
def set_phases(self):
self.fit_global_parameters.measured_dataset.set_phases([self.phases_box_array[index].get_phase() for index in range(len(self.phases_box_array))])
measured_dataset = self.fit_global_parameters.measured_dataset
incident_radiations = measured_dataset.incident_radiations
for diffraction_pattern_index in range(measured_dataset.get_diffraction_patterns_number()):
incident_radiation = incident_radiations[0 if len(incident_radiations) == 1 else diffraction_pattern_index]
diffraction_pattern = measured_dataset.diffraction_patterns[diffraction_pattern_index]
line_profile = measured_dataset.line_profiles[diffraction_pattern_index]
line_profile.generate_additional_parameters()
for phase_index in range(self.fit_global_parameters.measured_dataset.get_phases_number()):
gsasii_phase = measured_dataset.phases[phase_index]
gsasii_reflections_list = gsasii_load_reflections(gsasii_phase.cif_file,
incident_radiation.wavelength.value,
diffraction_pattern.get_diffraction_point(0).twotheta,
diffraction_pattern.get_diffraction_point(-1).twotheta)
line_profile.set_additional_parameters_of_phase(phase_index=phase_index,
additional_parameters=gsasii_reflections_list)
self.phases_box_array[phase_index].set_gsasii_result(gsasii_reflections_list)
class PhaseBox(ParameterBox):
use_structure = 1
def __init__(self,
widget=None,
parent=None,
index=0,
a=0.0,
a_fixed=0,
a_has_min=0,
a_min=0.0,
a_has_max=0,
a_max=0.0,
a_function=0,
a_function_value="",
symmetry=2,
cif_file="",
formula="",
intensity_scale_factor=1.0,
intensity_scale_factor_fixed=0,
intensity_scale_factor_has_min=0,
intensity_scale_factor_min=0.0,
intensity_scale_factor_has_max=0,
intensity_scale_factor_max=0.0,
intensity_scale_factor_function=0,
intensity_scale_factor_function_value="",
phase_name=""):
super(PhaseBox, self).__init__(widget=widget,
parent=parent,
index=index,
a=a,
a_fixed=a_fixed,
a_has_min=a_has_min,
a_min=a_min,
a_has_max=a_has_max,
a_max=a_max,
a_function=a_function,
a_function_value=a_function_value,
symmetry=symmetry,
cif_file=cif_file,
formula=formula,
intensity_scale_factor=intensity_scale_factor,
intensity_scale_factor_fixed=intensity_scale_factor_fixed,
intensity_scale_factor_has_min=intensity_scale_factor_has_min,
intensity_scale_factor_min=intensity_scale_factor_min,
intensity_scale_factor_has_max=intensity_scale_factor_has_max,
intensity_scale_factor_max=intensity_scale_factor_max,
intensity_scale_factor_function=intensity_scale_factor_function,
intensity_scale_factor_function_value=intensity_scale_factor_function_value,
phase_name=phase_name)
def get_height(self):
return 470
def init_fields(self, **kwargs):
self.a = kwargs["a"]
self.a_fixed = kwargs["a_fixed"]
self.a_has_min = kwargs["a_has_min"]
self.a_min = kwargs["a_min"]
self.a_has_max = kwargs["a_has_max"]
self.a_max = kwargs["a_max"]
self.a_function = kwargs["a_function"]
self.a_function_value = kwargs["a_function_value"]
self.symmetry = kwargs["symmetry"]
self.cif_file = kwargs["cif_file"]
self.formula = kwargs["formula"]
self.intensity_scale_factor = kwargs["intensity_scale_factor"]
self.intensity_scale_factor_fixed = kwargs["intensity_scale_factor_fixed"]
self.intensity_scale_factor_has_min = kwargs["intensity_scale_factor_has_min"]
self.intensity_scale_factor_min = kwargs["intensity_scale_factor_min"]
self.intensity_scale_factor_has_max = kwargs["intensity_scale_factor_has_max"]
self.intensity_scale_factor_max = kwargs["intensity_scale_factor_max"]
self.intensity_scale_factor_function = kwargs["intensity_scale_factor_function"]
self.intensity_scale_factor_function_value = kwargs["intensity_scale_factor_function_value"]
self.phase_name = kwargs["phase_name"]
def init_gui(self, container):
gui.lineEdit(container, self, "phase_name", "Phase Name (will appear in tabs and plots)", labelWidth=260, valueType=str, callback=self.widget.dump_phase_name)
self.cb_symmetry = orangegui.comboBox(container, self, "symmetry", label="Symmetry", items=Symmetry.tuple(),
callback=self.set_symmetry, orientation="horizontal")
OWGenericWidget.create_box_in_widget(self, container, "a", "a [nm]", add_callback=True, min_value=0.0,
min_accepted=False, trim=5)
orangegui.separator(container)
structure_box = gui.widgetBox(container,
"", orientation="vertical",
width=self.CONTROL_AREA_WIDTH)
self.structure_box_1 = gui.widgetBox(structure_box,
"", orientation="vertical",
width=self.CONTROL_AREA_WIDTH - 5, height=90)
file_box = gui.widgetBox(self.structure_box_1, "", orientation="horizontal", width=self.CONTROL_AREA_WIDTH-10)
self.le_cif_file = gui.lineEdit(file_box, self, value="cif_file", valueType=str, label="CIF File", labelWidth=50, callback=self.widget.dump_cif_file)
orangegui.button(file_box, self, "...", callback=self.open_folders)
gui.lineEdit(self.structure_box_1, self, "formula", "Chemical Formula", labelWidth=110, valueType=str,
callback=self.widget.dump_formula)
OWGenericWidget.create_box_in_widget(self, self.structure_box_1, "intensity_scale_factor", "I0",
add_callback=True, min_value=0.0, min_accepted=False, trim=5)
text_area_box = gui.widgetBox(structure_box, "Calculation Result", orientation="vertical", height=165, width=self.CONTROL_AREA_WIDTH - 10)
self.text_area = gui.textArea(height=125, width=self.CONTROL_AREA_WIDTH - 30, readOnly=True)
self.text_area.setText("")
text_area_box.layout().addWidget(self.text_area)
self.is_on_init = False
def open_folders(self):
self.cif_file=gui.selectFileFromDialog(self,
self.cif_file,
start_directory=self.widget.working_directory)
self.le_cif_file.setText(self.cif_file)
def set_symmetry(self):
if not GSASIIPhase.is_cube(self.cb_symmetry.currentText()):
QMessageBox.critical(self, "Error",
"Only Cubic Systems are supported",
QMessageBox.Ok)
self.symmetry = 2
if not self.is_on_init: self.widget.dump_symmetry()
def callback_a(self):
if not self.is_on_init: self.widget.dump_a()
def callback_intensity_scale_factor(self):
if not self.is_on_init: self.widget.dump_intensity_scale_factor()
def get_basic_parameter_prefix(self):
return GSASIIPhase.get_parameters_prefix()
def set_data(self, phase):
OWGenericWidget.populate_fields_in_widget(self, "a", phase.a)
OWGenericWidget.populate_fields_in_widget(self, "intensity_scale_factor", phase.intensity_scale_factor)
self.cif_file = phase.cif_file
self.formula = phase.formula
simmetries = Symmetry.tuple()
for index in range(0, len(simmetries)):
if simmetries[index] == phase.symmetry:
self.symmetry = index
self.set_symmetry()
self.phase_name = phase.name
def get_phase(self):
phase = GSASIIPhase.init_cube(a0=OWGenericWidget.get_fit_parameter_from_widget(self, "a", self.get_parameters_prefix()),
symmetry=self.cb_symmetry.currentText(),
cif_file=self.cif_file,
formula=congruence.checkEmptyString(self.formula, "Chemical Formula"),
intensity_scale_factor=OWGenericWidget.get_fit_parameter_from_widget(self, "intensity_scale_factor", self.get_parameters_prefix()),
name=self.phase_name,
progressive=self.get_parameter_progressive())
return phase
def set_gsasii_result(self, gsasii_reflection_list):
self.text_area.clear()
text = "h k l 2\u03b8 m |F|^2 I\n" + \
"-----------------------------------------------------\n"
for reflection in gsasii_reflection_list.get_reflections():
text += str(reflection) + "\n"
self.text_area.setText(text)
from PyQt5.QtWidgets import QApplication
if __name__ == "__main__":
a = QApplication(sys.argv)
ow = OWGSASIIPhases()
ow.show()
a.exec_()
ow.saveSettings() | PypiClean |
/CleanAdminDjango-1.5.3.1.tar.gz/CleanAdminDjango-1.5.3.1/django/contrib/admindocs/utils.py | "Misc. utility functions/classes for admin documentation generator."
import re
from email.parser import HeaderParser
from email.errors import HeaderParseError
from django.utils.safestring import mark_safe
from django.core.urlresolvers import reverse
from django.utils.encoding import force_bytes
try:
import docutils.core
import docutils.nodes
import docutils.parsers.rst.roles
except ImportError:
docutils_is_available = False
else:
docutils_is_available = True
def trim_docstring(docstring):
"""
Uniformly trims leading/trailing whitespace from docstrings.
Based on http://www.python.org/peps/pep-0257.html#handling-docstring-indentation
"""
if not docstring or not docstring.strip():
return ''
# Convert tabs to spaces and split into lines
lines = docstring.expandtabs().splitlines()
indent = min([len(line) - len(line.lstrip()) for line in lines if line.lstrip()])
trimmed = [lines[0].lstrip()] + [line[indent:].rstrip() for line in lines[1:]]
return "\n".join(trimmed).strip()
def parse_docstring(docstring):
"""
Parse out the parts of a docstring. Returns (title, body, metadata).
"""
docstring = trim_docstring(docstring)
parts = re.split(r'\n{2,}', docstring)
title = parts[0]
if len(parts) == 1:
body = ''
metadata = {}
else:
parser = HeaderParser()
try:
metadata = parser.parsestr(parts[-1])
except HeaderParseError:
metadata = {}
body = "\n\n".join(parts[1:])
else:
metadata = dict(metadata.items())
if metadata:
body = "\n\n".join(parts[1:-1])
else:
body = "\n\n".join(parts[1:])
return title, body, metadata
def parse_rst(text, default_reference_context, thing_being_parsed=None):
"""
Convert the string from reST to an XHTML fragment.
"""
overrides = {
'doctitle_xform' : True,
'inital_header_level' : 3,
"default_reference_context" : default_reference_context,
"link_base" : reverse('django-admindocs-docroot').rstrip('/')
}
if thing_being_parsed:
thing_being_parsed = force_bytes("<%s>" % thing_being_parsed)
parts = docutils.core.publish_parts(text, source_path=thing_being_parsed,
destination_path=None, writer_name='html',
settings_overrides=overrides)
return mark_safe(parts['fragment'])
#
# reST roles
#
ROLES = {
'model' : '%s/models/%s/',
'view' : '%s/views/%s/',
'template' : '%s/templates/%s/',
'filter' : '%s/filters/#%s',
'tag' : '%s/tags/#%s',
}
def create_reference_role(rolename, urlbase):
def _role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None: options = {}
if content is None: content = []
node = docutils.nodes.reference(rawtext, text, refuri=(urlbase % (inliner.document.settings.link_base, text.lower())), **options)
return [node], []
docutils.parsers.rst.roles.register_canonical_role(rolename, _role)
def default_reference_role(name, rawtext, text, lineno, inliner, options=None, content=None):
if options is None: options = {}
if content is None: content = []
context = inliner.document.settings.default_reference_context
node = docutils.nodes.reference(rawtext, text, refuri=(ROLES[context] % (inliner.document.settings.link_base, text.lower())), **options)
return [node], []
if docutils_is_available:
docutils.parsers.rst.roles.register_canonical_role('cmsreference', default_reference_role)
docutils.parsers.rst.roles.DEFAULT_INTERPRETED_ROLE = 'cmsreference'
for name, urlbase in ROLES.items():
create_reference_role(name, urlbase) | PypiClean |
/Flask_BS4-5.3.1.0-py3-none-any.whl/flask_bs4/forms.py | from dominate import tags
from markupsafe import Markup
from wtforms.widgets.core import html_params
from .internals import xmlattr
def _add_description(field, **kwargs):
if field.description:
_attributes = {
"id": field.name + "Help",
"class": "form-text",
}
return f'<div { xmlattr(_attributes) }>{ field.description }</div>'
return ''
def _add_error_message(field_errors):
if field_errors:
return f'<div class="invalid-feedback">{ " ".join(field_errors) }</div>'
return ''
def _wrap_form(form,
action='',
button_map=None,
enctype=None,
extra_classes=[],
form_type=None,
horizontal_columns=None,
id=None,
method='post',
novalidate=False,
role='form',
render_kw={}):
_attributes = {
"action": action,
"method": method,
"id": id,
"class": "form " + " ".join(extra_classes),
"role": role,
"enctype": enctype if enctype else "",
**render_kw
}
return f'<form { xmlattr(_attributes) } { "novalidate" if novalidate else "" }>{ form }</form>'
def _wrap_field(field, **kwargs):
root = tags.div()
_root_classes = ['mb-3']
_field_classes = ['form-control']
_field_descripton = Markup(_add_description(field, **kwargs))
_field_errors = Markup(_add_error_message(field.errors))
_form_type = kwargs.get('form_type', 'basic')
_col1_class = ['form-label']
_col2_class = ['']
if field.type == 'SelectField':
_field_classes = ['form-select']
if field.errors:
_field_classes.append('is-invalid')
_field_label = field.label(class_=" ".join(_col1_class))
if _form_type in ['basic']:
root.add(_field_label)
root.add(field(class_=" ".join(_field_classes)))
root.add(_field_errors)
root.add(_field_descripton)
if _form_type in ['horizontal']:
_root_classes.append('row')
_cols = kwargs.get('horizontal_columns', ('lg', 2, 10))
_col1_class = [f'col-{ _cols[0] }-{ _cols[1] }', 'col-form-label']
_col2_class = [f'col-{ _cols[0] }-{ _cols[2] }']
_field_label = field.label(class_=" ".join(_col1_class))
_field_wrap = tags.div(_class=" ".join(_col2_class))
_field_wrap.add(field(class_=" ".join(_field_classes)))
_field_wrap.add(_field_errors)
_field_wrap.add(_field_descripton)
root.add(_field_label)
root.add(_field_wrap)
if _form_type in ['floating']:
_root_classes.append('form-floating')
root.add(field(class_=" ".join(_field_classes),
placeholder="placeholder"))
root.add(_field_label)
root.add(_field_errors)
root.add(_field_descripton)
root['class'] = " ".join(_root_classes)
return root
def _wrap_boolean(field, **kwargs):
root = tags.div()
wrap = tags.div(_class='form-check')
hwrap = tags.div()
_root_classes = ['mb-3']
_field_classes = ['form-check-input']
_label_classes = ['form-check-label']
_field_descripton = Markup(_add_description(field, **kwargs))
_field_errors = Markup(_add_error_message(field.errors))
_form_type = kwargs.get('form_type', 'basic')
if field.errors:
_field_classes.append('is-invalid')
if _form_type in ['horizontal']:
_cols = kwargs.get('horizontal_columns', ('lg', 2, 10))
_col1_class = f'offset-{ _cols[0] }-{ _cols[1] }'
_col2_class = f'col-{ _cols[0] }-{ _cols[2] }'
_root_classes.append('row')
hwrap['class'] = ' '.join([_col1_class, _col2_class])
wrap.add(field(class_=' '.join(_field_classes)))
wrap.add(field.label(class_=' '.join(_label_classes)))
wrap.add(_field_errors)
wrap.add(_field_descripton)
hwrap.add(wrap)
root.add(hwrap)
root['class'] = ' '.join(_root_classes)
return root
def _wrap_radio(field, **kwargs):
root = tags.div()
legend = tags.label(field.label.text)
wrapper = tags.div()
_root_classes = ['mb-3']
_legend_classes = ['form-label', 'pt-0']
_wrapper_classes = []
_field_descripton = Markup(_add_description(field, **kwargs))
_field_errors = Markup(_add_error_message(field.errors))
if field.errors:
_wrapper_classes.append('is-invalid')
_form_type = kwargs.get('form_type', 'basic')
if _form_type in ['horizontal']:
_cols = kwargs.get('horizontal_columns', ('lg', 2, 10))
_col1_class = f'col-{ _cols[0] }-{ _cols[1] }'
_col2_class = f'col-{ _cols[0] }-{ _cols[2] }'
_root_classes.append('row')
_legend_classes.append(_col1_class)
_wrapper_classes.append(_col2_class)
for key, value in field.choices:
item = tags.div(_class='form-check')
_label = tags.label(
value,
_for=key,
_class='form-check-label',
)
_field = tags._input(
type='radio',
name=field.name,
id=key,
value=key,
_class='form-check-input',
)
if key == field.data:
_field['checked'] = ''
item.add(_field)
item.add(_label)
wrapper.add(item)
wrapper.add(_field_errors)
wrapper.add(_field_descripton)
legend['class'] = ' '.join(_legend_classes)
wrapper['class'] = ' '.join(_wrapper_classes)
root['class'] = ' '.join(_root_classes)
root.add(legend)
root.add(wrapper)
return root
def _wrap_file(field, **kwargs):
_form_type = kwargs.pop('form_type', 'basic')
if _form_type in ['floating']:
return _wrap_field(field, form_type='basic', **kwargs)
else:
return _wrap_field(field, form_type=_form_type, **kwargs)
def _wrap_submit(field, **kwargs):
rv = ''
_form_type = kwargs.get('form_type', 'basic')
_cols = kwargs.get('horizontal_columns', ('lg', 2, 10))
_btn_map = kwargs.get('button_map', {'submit': 'primary'})
_col1 = f'col-{_cols[0]}-{_cols[1]}'
_col2 = f'col-{_cols[0]}-{_cols[2]}'
rv += f'<div class="mb-3 { "row" if _form_type == "horizontal" else "" }">'
rv += f'<div class="{ _col1 if _form_type == "horizontal" else "" }"></div>'
rv += f'<div class="{ _col2 if _form_type == "horizontal" else "" }">'
rv += field(class_=f'btn btn-{ _btn_map.get(field.name) }').unescape()
rv += f'</div>'
rv += _add_description(field, **kwargs)
rv += f'</div>'
return rv
def _wrap_csrf(field):
return field()
def _wrap_formfield(form, **kwargs):
form_fields = ''
_enctype = kwargs.pop('enctype', None)
for field in form:
# if field.type != 'CSRFTokenField':
# form_fields += render_field(field, **kwargs)
form_fields += render_field(field, **kwargs)
if field.type in ['FileField', 'MultipleFileField']:
_enctype = _enctype or 'multipart/form-data'
return Markup(form_fields)
def render_form(form, **kwargs):
form_fields = ''
_enctype = kwargs.pop('enctype', None)
for field in form:
form_fields += render_field(field, **kwargs)
if field.type in ['FileField', 'MultipleFileField']:
_enctype = _enctype or 'multipart/form-data'
return Markup(_wrap_form(form_fields, enctype=_enctype, **kwargs))
def render_field(field, **kwargs):
form_field = ''
if field.type == 'BooleanField':
form_field = _wrap_boolean(field, **kwargs)
elif field.type == 'RadioField':
form_field = _wrap_radio(field, **kwargs)
elif field.type == 'SubmitField':
form_field = _wrap_submit(field, **kwargs)
elif field.type == 'CSRFTokenField':
form_field = _wrap_csrf(field)
elif field.type == 'FormField':
form_field = _wrap_formfield(field.form, **kwargs)
elif field.type == 'FileField':
form_field = _wrap_file(field, **kwargs)
elif field.type == 'FieldList':
form_field = _wrap_formfield(field.entries, **kwargs)
else:
form_field = _wrap_field(field, **kwargs)
return Markup(form_field) | PypiClean |
/CNFgen-0.9.2-py3-none-any.whl/cnfgen/clihelpers/graph_helpers.py | import argparse
from cnfgen.families.coloring import GraphColoringFormula
from cnfgen.families.coloring import EvenColoringFormula
from cnfgen.families.dominatingset import DominatingSet
from cnfgen.families.dominatingset import Tiling
from cnfgen.families.graphisomorphism import GraphIsomorphism
from cnfgen.families.graphisomorphism import GraphAutomorphism
from cnfgen.families.subgraph import SubgraphFormula
from cnfgen.families.subgraph import CliqueFormula
from cnfgen.families.subgraph import BinaryCliqueFormula
from cnfgen.families.subgraph import RamseyWitnessFormula
from cnfgen.clitools import ObtainSimpleGraph, positive_int, nonnegative_int, make_graph_doc
from cnfgen.clihelpers.formula_helpers import FormulaHelper
class KColorCmdHelper(FormulaHelper):
"""Command line helper for k-color formula
"""
name = 'kcolor'
@staticmethod
def setup_command_line(parser):
"""Setup the command line options for k-color formula
Arguments:
- `parser`: parser to load with options.
"""
parser.usage = "usage:\n {0} [-h|--help] k G".format(parser.prog)
parser.description = """The formula encodes the fact that the graph G has a k-coloring.
This means that it is possible to assign one among the k colors to
that each vertex of the graph such that no two adjacent vertices get
the same color.
positional arguments:
k number of available colors
G a simple undirected graph (see 'cnfgen --help-graph')
optional arguments:
--help, -h show this help message and exit
"""
parser.add_argument('k', type=positive_int, action='store')
parser.add_argument('G', action=ObtainSimpleGraph)
@staticmethod
def build_formula(args, formula_class):
"""Build a k-colorability formula according to the arguments
Arguments:
- `args`: command line options
"""
return GraphColoringFormula(args.G, args.k,
formula_class=formula_class)
class ECCmdHelper(FormulaHelper):
name = 'ec'
@staticmethod
def setup_command_line(parser):
parser.usage = "usage:\n {0} [-h|--help] G".format(parser.prog)
parser.description = """The formula is defined on a graph G and claims that it is possible
to split the edges of the graph in two parts, so that each vertex has
an equal number of incident edges in each part.
The formula is well defined as long as all vertices have even degree
(i.e. each connected component has an Eulerian circuit). The formula
is satisfiable if and only if there is an even number of edges in each
connected component (i.e. each such circuit has even length).
The formula originate from the paper 'Locality and Hard SAT-instances'
by Klas Markstrom (2006).
positional arguments:
G a simple undirected graph (see 'cnfgen --help-graph')
optional arguments:
--help, -h show this help message and exit
"""
parser.add_argument('G', action=ObtainSimpleGraph)
@staticmethod
def build_formula(args, formula_class):
return EvenColoringFormula(args.G,
formula_class=formula_class)
class DominatingSetCmdHelper(FormulaHelper):
"""Command line helper for k-dominating set
"""
name = 'domset'
@staticmethod
def setup_command_line(parser):
"""Setup the command line options for dominating set formula
Arguments:
- `parser`: parser to load with options.
"""
parser.usage = """usage:
{0} [-h|--help] [-a|--alternative] d G""".format(parser.prog)
parser.description = """The formula encodes the fact that the graph G has a dominating set
of size d. This means that it is possible to pick at most d vertices
in G so that all remaining vertices have distance at most one
from them.
positional arguments:
d size of the dominating set
G a simple undirected graph (see 'cnfgen --help-graph')
optional arguments:
--help, -h show this help message and exit
--alternative, -a produces a provably hard version (default: false)
"""
parser.add_argument('--alternative',
'-a',
action='store_true',
default=False)
parser.add_argument('d',
type=positive_int,
action='store')
parser.add_argument(
'G', action=ObtainSimpleGraph)
@staticmethod
def build_formula(args, formula_class):
"""Build the k-dominating set formula
Arguments:
- `args`: command line options
"""
return DominatingSet(args.G, args.d,
alternative=args.alternative,
formula_class=formula_class)
class TilingCmdHelper(FormulaHelper):
"""Command line helper for tiling
"""
name = 'tiling'
@staticmethod
def setup_command_line(parser):
"""Setup the command line options for tiling formula
Arguments:
- `parser`: parser to load with options.
"""
parser.usage = """usage:\n {0} [-h|--help] G""".format(parser.prog)
parser.description = """The formula encodes the fact that the graph G has a tiling.
This means that it is possible to pick a subset of vertices D so that
all vertices have distance at most one from exactly one verteix in D.
positional arguments:
G a simple undirected graph (see 'cnfgen --help-graph')
optional arguments:
--help, -h show this help message and exit
"""
parser.add_argument('G', action=ObtainSimpleGraph)
@staticmethod
def build_formula(args, formula_class):
"""Build the tiling formula
Arguments:
- `args`: command line options
"""
return Tiling(args.G,formula_class=formula_class)
iso_description = """The formula takes one or two graphs as input.
{0} G1 --- test if G1 has nontrivial automorphisms
{0} G1 -e G2 --- test if G1 and G2 are isomorphic
where G1 and G2 are simple graph (see 'cnfgen --help-graph')
examples:
{0} grid 3 3
{0} complete 4 -e empty 4 plantclique 4
{0} first.gml -e second.gml
{0} gnm 10 5
optiona arguments:
--help, -h show this help message and exit
"""
class GIsoCmdHelper(FormulaHelper):
"""Command line helper for Graph Isomorphism formula
"""
name = 'iso'
@staticmethod
def setup_command_line(parser):
"""Setup the command line options for graph isomorphism formula
Arguments:
- `parser`: parser to load with options.
"""
parser.usage = 'usage:\n {} [-h] G1 [-e G2]'.format(parser.prog)
parser.description = iso_description.format(parser.prog)
parser.add_argument('G',action=ObtainSimpleGraph)
parser.add_argument('-e', metavar='G2',action=ObtainSimpleGraph)
@staticmethod
def build_formula(args, formula_class):
G = args.G
if hasattr(args, 'G2'):
G2 = args.G2
return GraphIsomorphism(G, G2,
formula_class=formula_class)
else:
return GraphAutomorphism(G,
formula_class=formula_class)
class KCliqueCmdHelper(FormulaHelper):
"""Command line helper for k-clique formula
"""
name = 'kclique'
@staticmethod
def setup_command_line(parser):
"""Setup the command line options for k-clique formula
"""
parser.usage = "usage:\n {0} [-h|--help] k G".format(parser.prog)
parser.description = """The formula is satiafiable if and only if graph G contains
a clique of size at least k, i.e. a set of k distinct vertices so that
every pair of them are connected by an edge.
positional arguments:
k size of the clique to be found
G a simple undirected graph (see 'cnfgen --help-graph')
optional arguments:
--help, -h show this help message and exit
--no-symmetry-breaking do not break symmetries by enforcing the
solution to be in increasing order (default: on)
"""
parser.add_argument('k', type=nonnegative_int, action='store')
parser.add_argument('G', action=ObtainSimpleGraph)
parser.add_argument('--no-symmetry-breaking',
action='store_false',
default=True,
dest='symmetrybreaking')
@staticmethod
def build_formula(args, formula_class):
"""Build a k-clique formula according to the arguments
Arguments:
- `args`: command line options
"""
return CliqueFormula(args.G, args.k,
args.symmetrybreaking,
formula_class=formula_class)
class BinaryKCliqueCmdHelper(FormulaHelper):
"""Command line helper for k-clique formula
"""
name = 'kcliquebin'
@staticmethod
def setup_command_line(parser):
"""Setup the command line options for k-clique formula
Arguments:
- `parser`: parser to load with options.
"""
parser.usage = "usage:\n {0} [-h|--help] k G".format(parser.prog)
parser.description = """The formula is satiafiable if and only if graph G contains a clique
of size at least k, i.e. a set of k distinct vertices so that every
pair of them are connected by an edge. The encoding is different from
the 'kclique' formula in the sense that every clique element is
indexed by a binary string of log(|V(G)|) variables.
positional arguments:
k size of the clique to be found
G a simple undirected graph (see 'cnfgen --help-graph')
optional arguments:
--help, -h show this help message and exit
"""
parser.add_argument('k', type=nonnegative_int, action='store')
parser.add_argument('G', action=ObtainSimpleGraph)
@staticmethod
def build_formula(args, formula_class):
"""Build a k-clique formula according to the arguments
Arguments:
- `args`: command line options
"""
return BinaryCliqueFormula(args.G, args.k,
formula_class=formula_class)
class RWCmdHelper(FormulaHelper):
"""Command line helper for ramsey graph formula
"""
name = 'ramlb'
@staticmethod
def setup_command_line(parser):
"""Setup the command line options for ramsey witness formula
Arguments:
- `parser`: parser to load with options.
"""
parser.usage = "usage:\n {0} [-h|--help] k s G".format(parser.prog)
parser.description = """The formula is satiafiable when graph G contains either a clique of
size at least k, or an independent set of size s. Notice that any
graph with r(k,s) vertices or more must contain one or the other.
Therefore the formula is unsatifiable only for a graph G such that
r(k,s) > |V(G)|.
positional arguments:
k size of the clique to be found
s size of the independent set to be found
G a simple undirected graph (see 'cnfgen --help-graph')
optional arguments:
--help, -h show this help message and exit
"""
parser.add_argument('k',
type=nonnegative_int,
action='store')
parser.add_argument('s',
type=nonnegative_int,
action='store')
parser.add_argument(
'G',
action=ObtainSimpleGraph)
@staticmethod
def build_formula(args, formula_class):
"""Build a formula to check that a graph is a ramsey number lower bound
Arguments:
- `args`: command line options
"""
return RamseyWitnessFormula(args.G, args.k, args.s,
formula_class=formula_class)
subgraph_description = """The formula takes two graphs: a main <graph>
and a candidate <subgraph>, and claims that
the latter is indeed a subgraph of the former.
examples:
{0} -G grid 4 4 -H grid 2 2
{0} -G gnd 10 4 -H complete 5 (decides whether there is a 5-clique)
{0} -G large.gml -H small.dot
positional arguments:
-G <graph> main graph (see \'cnfgen --help-graph\')
-H <subgraph> candidate subgraph (see \'cnfgen --help-graph\')
optional arguments:
--help, -h show this help message and exit
"""
class SubGraphCmdHelper(FormulaHelper):
"""Command line helper for Graph Isomorphism formula
"""
name = 'subgraph'
@staticmethod
def setup_command_line(parser):
"""Setup the command line options for graph isomorphism formula
Arguments:
- `parser`: parser to load with options.
"""
parser.usage = '{} [-h|--help] -G <graph> -H <subgraph>'.format(parser.prog)
parser.description = subgraph_description.format(parser.prog)
parser.add_argument('-G',
metavar='<graph>',
required=True,
help=argparse.SUPPRESS,
action=ObtainSimpleGraph)
parser.add_argument('-H',
metavar='<subgraph>',
required=True,
help=argparse.SUPPRESS,
action=ObtainSimpleGraph)
@staticmethod
def build_formula(args, formula_class):
"""Build a subgraph formula according to the arguments
Arguments:
- `args`: command line options
"""
return SubgraphFormula(args.G, args.H,
induced=False,
symbreak=False,
formula_class=formula_class) | PypiClean |
/JAVA_CLASS_GENERATOR-1.tar.gz/JAVA_CLASS_GENERATOR-1/JAVA_CLASS_GENERATOR/java_class_generator.py | import sys
import os
# Creating necessary functions
def clear():
# type: () -> None
if sys.platform.startswith('win'):
os.system('cls') # For Windows System
else:
os.system('clear') # For Linux System
def capitalize_first(string: str) -> str:
if len(string) == 0:
return ""
elif len(string) == 1:
return string[0].capitalize()
else:
return string[0].capitalize() + string[1::]
def generate_attributes(attribute_names: list, attribute_types: list) -> str:
result: str = "" # initial value
for i in range(min(len(attribute_names), len(attribute_types))):
result += """
private """ + str(attribute_types[i]) + """ """ + str(attribute_names[i]) + """;
"""
return result
def generate_constructor(class_name: str, parameters: list, parameter_types: list) -> str:
parameter_initialization: str = "" # initial value
for parameter in parameters:
parameter_initialization += """
this.""" + str(parameter) + """ = """ + str(parameter) + """;
"""
between_brackets: str = "" # initial value
for i in range(len(parameters)):
between_brackets += str(parameter_types[i]) + " " + str(parameters[i])
if i < len(parameters) - 1:
between_brackets += ", "
result: str = """
public """ + str(class_name) + """(""" + str(between_brackets) + """){
""" + str(parameter_initialization) + """
}
"""
return result
def generate_getter(attribute_name: str, attribute_type: str) -> str:
return """
public """ + str(attribute_type) + """ get""" + capitalize_first(str(attribute_name)) + """() {
return """ + str(attribute_name) + """;
}
"""
def generate_setter(attribute_name: str, attribute_type: str) -> str:
return """
public void set""" + capitalize_first(str(attribute_name)) + """(""" + str(attribute_type) + """ """ \
+ str(attribute_name) + """){
this.""" + str(attribute_name) + """ = """ + str(attribute_name) + """;
"""
def generate_method(method_name: str, return_type: str, parameters: list, parameter_types: list) -> str:
between_brackets: str = "" # initial value
for i in range(len(parameters)):
between_brackets += str(parameter_types[i]) + " " + str(parameters[i])
if i < len(parameters) - 1:
between_brackets += ", "
return """
public """ + str(return_type) + """ """ + str(method_name) + """(""" + str(between_brackets) + """){
throw new Exception(\"Method not implemented\");
}"""
# Creating main function used to run the application.
def main():
"""
This main function is used to run the application.
:return: None
"""
print("Welcome to 'Java Class Generator' by 'DigitalCreativeApkDev'.")
print("This application allows you to easily generate the template of a Java class you want to write!")
print("Enter 'Y' for yes.")
print("Enter anything else for no.")
continue_using: str = input("Do you want to continue using the application 'Java Class Generator'? ")
while continue_using == "Y":
# Clearing the command line window
clear()
class_name: str = input("Please enter the name of the class you want to write: ")
script: str = """
class """ + str(class_name) + """{
"""
attributes: list = [] # initial value
attribute_types: list = [] # initial value
num_attributes: int = int(input("How many attributes do you want in your class (at least 0)? "))
while num_attributes < 0:
num_attributes = int(input("Sorry, invalid input! "
"How many attributes do you want in your class (at least 0)? "))
for i in range(num_attributes):
attribute_name: str = input("Please enter the name of the attribute you want to add: ")
while attribute_name in attributes:
attribute_name = input("Sorry! That attribute is already used! Please enter another name: ")
attribute_type: str = input("Please enter the type of the attribute '" + str(attribute_name) + "': ")
attributes.append(attribute_name)
attribute_types.append(attribute_type)
script += generate_attributes(attributes, attribute_types)
script += generate_constructor(class_name, [], [])
script += generate_constructor(class_name, attributes, attribute_types)
for i in range(len(attributes)):
script += generate_getter(attributes[i], attribute_types[i])
script += generate_setter(attributes[i], attribute_types[i])
num_methods: int = int(input("How many methods do you want to create in '" + str(class_name) + "' class? "))
for i in range(num_methods):
method_name: str = input("Please enter the name of the method: ")
return_type: str = input("Please enter the return type of the method: ")
parameters: list = []
parameter_types: list = [] # initial value
num_parameters: int = int(input("How many parameters do you want in the method '"
+ str(method_name) + "' (at least 0)? "))
while num_parameters < 0:
num_parameters = int(input("Sorry, invalid input! How many parameters do you want in the method '"
+ str(method_name) + "' (at least 0)? "))
for k in range(num_parameters):
parameter_name: str = input("Please enter the name of the parameter: ")
parameter_type: str = input("Please enter the type of the parameter: ")
parameters.append(parameter_name)
parameter_types.append(parameter_type)
script += generate_method(method_name, return_type, parameters, parameter_types)
script += """
}"""
f = open(str(class_name) + ".java", "w")
f.write(script)
f.close()
# Clearing the command line window
clear()
print("Your class is written in the file '" + str(class_name) + ".java'!")
print("Enter 'Y' for yes.")
print("Enter anything else for no.")
continue_using = input("Do you want to continue using the application 'Java Class Generator'? ")
sys.exit()
if __name__ == '__main__':
main() | PypiClean |
/Financial_Graph_Lib-1.0.4-py3-none-any.whl/fglib/kagilib.py | import pandas as pd
from fglib import atrlib
import numpy as np
def kagi(df):
kagi_break = atrlib.brick_size(df)
d , o , c, ko , kc, color,vol = [],[],[],[],[],[],[]
ko.append(df["open"][0])
d.append(df["date"][0])
o.append(df["open"][0])
c.append(df["close"][0])
vol.append(df['volume'][0])
leng = len(ko)
i=0
if(ko[leng-1]+kagi_break>df["close"][i]):
i = 0
color.append("green")
while(ko[leng-1]+kagi_break>df["close"][i]):
i= i+1
elif(ko[leng-1]-kagi_break<df["close"][i]):
i = 0
color.append("red")
while(ko[leng-1]-kagi_break<df["close"][i]):
i= i+1
kc.append(df["close"][i])
volume = 0.0
j = i+1
while(j<len(df)):
volume += df['volume'][j]
leng = len(ko)
if(kc[leng-1]>ko[leng-1]):
if(df["close"][j]>kc[leng-1]):
kc[leng-1] = df["close"][j]
elif(df["close"][j]<kc[leng-1]-kagi_break):
ko.append(kc[leng-1])
d.append(df["date"][j])
vol.append(volume)
volume = 0.0
kc.append(df["close"][j])
else:
if(df["close"][j]<kc[leng-1]):
kc[leng-1] = df["close"][j]
elif(df["close"][j]>kc[leng-1]+kagi_break):
ko.append(kc[leng-1])
d.append(df["date"][j])
vol.append(volume)
volume = 0.0
kc.append(df["close"][j])
j = j+1
data = pd.DataFrame(d,columns=["date"])
data["open"] = ko
data["close"] = kc
data['volume'] = vol
x=np.arange(0,len(data))
height,low,high = [],[],[]
for i in x:
if data['close'][i] >data['open'][i]:
height.append(data['close'][i] - data['open'][i])
high.append(data['close'][i])
low.append(data['open'][i])
else:
height.append(data['open'][i] - data['close'][i])
high.append(data['open'][i])
low.append(data['close'][i])
data["height"] = height
data["low"] =low
data["high"] = high
x=np.arange(1,len(data))
for i in x:
if data['close'][i] >data['open'][i-1]:
color.append('green')
else:
color.append('red')
data["color"] = color
return data | PypiClean |
/BIA_OBS-1.0.3.tar.gz/BIA_OBS-1.0.3/BIA/static/dist/node_modules/fast-glob/out/providers/filters/deep.js | "use strict";
Object.defineProperty(exports, "__esModule", { value: true });
const utils = require("../../utils");
const partial_1 = require("../matchers/partial");
class DeepFilter {
constructor(_settings, _micromatchOptions) {
this._settings = _settings;
this._micromatchOptions = _micromatchOptions;
}
getFilter(basePath, positive, negative) {
const matcher = this._getMatcher(positive);
const negativeRe = this._getNegativePatternsRe(negative);
return (entry) => this._filter(basePath, entry, matcher, negativeRe);
}
_getMatcher(patterns) {
return new partial_1.default(patterns, this._settings, this._micromatchOptions);
}
_getNegativePatternsRe(patterns) {
const affectDepthOfReadingPatterns = patterns.filter(utils.pattern.isAffectDepthOfReadingPattern);
return utils.pattern.convertPatternsToRe(affectDepthOfReadingPatterns, this._micromatchOptions);
}
_filter(basePath, entry, matcher, negativeRe) {
if (this._isSkippedByDeep(basePath, entry.path)) {
return false;
}
if (this._isSkippedSymbolicLink(entry)) {
return false;
}
const filepath = utils.path.removeLeadingDotSegment(entry.path);
if (this._isSkippedByPositivePatterns(filepath, matcher)) {
return false;
}
return this._isSkippedByNegativePatterns(filepath, negativeRe);
}
_isSkippedByDeep(basePath, entryPath) {
/**
* Avoid unnecessary depth calculations when it doesn't matter.
*/
if (this._settings.deep === Infinity) {
return false;
}
return this._getEntryLevel(basePath, entryPath) >= this._settings.deep;
}
_getEntryLevel(basePath, entryPath) {
const entryPathDepth = entryPath.split('/').length;
if (basePath === '') {
return entryPathDepth;
}
const basePathDepth = basePath.split('/').length;
return entryPathDepth - basePathDepth;
}
_isSkippedSymbolicLink(entry) {
return !this._settings.followSymbolicLinks && entry.dirent.isSymbolicLink();
}
_isSkippedByPositivePatterns(entryPath, matcher) {
return !this._settings.baseNameMatch && !matcher.match(entryPath);
}
_isSkippedByNegativePatterns(entryPath, patternsRe) {
return !utils.pattern.matchAny(entryPath, patternsRe);
}
}
exports.default = DeepFilter; | PypiClean |
/ORG.asm-1.0.3.tar.gz/ORG.asm-1.0.3/doc/sphinx/source/install.rst | Installing |orgasm|
===================
Availability of |orgasm|
........................
|Orgasm| is open source and protected by the CeCILL 2.1 license
(`http://www.cecill.info/licences/Licence_CeCILL_V2.1-en.html <http://www.cecill.info/licences/Licence_CeCILL_V2.1-en.html>`_).
|Orgasm| is deposited on the Python Package Index (PyPI : `https://pypi.python.org/pypi/ORG.asm`_)
and all the sources can be downloaded from the `metabarcoding.org <http://metabarcoding.org>`_ gitlab server
(`https://git.metabarcoding.org/org-asm/org-asm`_).
Prerequisites
.............
To install the |orgasm|, you need that these softwares are installed on your
system:
* Python 3.5 (installed by default on most ``Unix`` systems, available from
`the Python website <http://www.python.org/>`_)
* ``gcc`` (installed by default on most ``Unix`` systems, available from the
GNU sites dedicated to `GCC <https://www.gnu.org/software/gcc/>`_ and
`GMake <https://www.gnu.org/software/make/>`_)
On a linux system
^^^^^^^^^^^^^^^^^
You have to take care that the Python development packages are installed.
On MacOSX
^^^^^^^^^
The C compiler and all the other compilation tools are included in the `XCode <https://itunes.apple.com/fr/app/xcode/id497799835?mt=12>`_
application not installed by default. Python3 is not installed by default. You have to install a complete distribution
of Python that you can download as a `MacOSX package from the Python website <https://www.python.org/downloads/>`_.
Developer command line tools can also be installed using the following command line in a UNIX terminal
.. code-block:: bash
xcode-select --install
From the Mojaves version of MacOSX the C header have to be installed using the following commands
.. code-block:: bash
open /Library/Developer/CommandLineTools/Packages/macOS_SDK_headers_for_macOS_10.14.pkg
Downloading and installing |orgasm|
...................................
The |orgasm| is downloaded and installed using the :download:`get-orgasm.py <../../../get_orgasm/get-orgasm.py>` script.
This is a user level installation that does not need administrator privilege.
Once downloaded, move the file :download:`get-orgasm.py <../../../get_orgasm/get-orgasm.py>` in the directory where you want to install
the |orgasm|. From a Unix terminal you must now run the command :
.. code-block:: bash
python3 get-orgasm.py
The script will create a new directory at the place you are running it in which all the
|orgasm| will be installed. No system privilege are required, and you system will not
be altered in any way by the obitools installation.
The newly created directory is named ORG.asm-VERSION where version is substituted by the
latest version number available.
Inside the newly created directory all the |orgasm| is installed. Close to this directory
there is a shell script named ``orgasm``. Running this script activate the |orgasm|
by reconfiguring your Unix environment.
.. code-block:: bash
./orgasm
Once activated you can desactivate |orgasm| by typing the command ``exit``.
.. code-block:: bash
exit
ORG.asm are no more activated, Bye...
=====================================
System level installation
.........................
To install the |orgasm| at the system level you can follow two options :
- copy the |orgasm| script in a usual directory for installing program like ``/usr/local/bin``
but never move the ``ORG.asm`` directory itself after the installation by the
:download:`get-orgasm.py <../../../get_orgasm/get-orgasm.py>`.
- The other solution is to add the ``export/bin`` directory located in the ``ORG.asm`` directory
to the ``PATH`` environment variable.
Retrieving the sources of |orgasm|
..................................
If you want to compile by yourself the |orgasm|, you will need to install the same
prerequisite:
.. code-block:: bash
> pip3 install -U pip
> pip3 install -U sphinx
> pip3 install -U cython
moreover you need to install any git client (a list of clients is available from `GIT website <https://git-scm.com/downloads>`_)
Then you can download the
.. code-block:: bash
> git clone https://git.metabarcoding.org/org-asm/org-asm.git
This command will create a new directory called ``org-asm``.
Compiling and installing |orgasm|
.................................
From the directory where you retrieved the sources, execute the following commands:
.. code-block:: bash
> cd org-asm
> python3 setup.py --serenity install
Once installed, you can test your installation by running the commands of the
:doc:`tutorials <./mitochondrion>`.
| PypiClean |
/Electrum-CHI-3.3.8.tar.gz/Electrum-CHI-3.3.8/packages/dns/rdata.py |
# Copyright (C) 2001-2017 Nominum, Inc.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose with or without fee is hereby granted,
# provided that the above copyright notice and this permission notice
# appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND NOMINUM DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL NOMINUM BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT
# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
"""DNS rdata."""
from io import BytesIO
import base64
import binascii
import dns.exception
import dns.name
import dns.rdataclass
import dns.rdatatype
import dns.tokenizer
import dns.wiredata
from ._compat import xrange, string_types, text_type
try:
import threading as _threading
except ImportError:
import dummy_threading as _threading
_hex_chunksize = 32
def _hexify(data, chunksize=_hex_chunksize):
"""Convert a binary string into its hex encoding, broken up into chunks
of chunksize characters separated by a space.
"""
line = binascii.hexlify(data)
return b' '.join([line[i:i + chunksize]
for i
in range(0, len(line), chunksize)]).decode()
_base64_chunksize = 32
def _base64ify(data, chunksize=_base64_chunksize):
"""Convert a binary string into its base64 encoding, broken up into chunks
of chunksize characters separated by a space.
"""
line = base64.b64encode(data)
return b' '.join([line[i:i + chunksize]
for i
in range(0, len(line), chunksize)]).decode()
__escaped = bytearray(b'"\\')
def _escapify(qstring):
"""Escape the characters in a quoted string which need it."""
if isinstance(qstring, text_type):
qstring = qstring.encode()
if not isinstance(qstring, bytearray):
qstring = bytearray(qstring)
text = ''
for c in qstring:
if c in __escaped:
text += '\\' + chr(c)
elif c >= 0x20 and c < 0x7F:
text += chr(c)
else:
text += '\\%03d' % c
return text
def _truncate_bitmap(what):
"""Determine the index of greatest byte that isn't all zeros, and
return the bitmap that contains all the bytes less than that index.
"""
for i in xrange(len(what) - 1, -1, -1):
if what[i] != 0:
return what[0: i + 1]
return what[0:1]
class Rdata(object):
"""Base class for all DNS rdata types."""
__slots__ = ['rdclass', 'rdtype']
def __init__(self, rdclass, rdtype):
"""Initialize an rdata.
*rdclass*, an ``int`` is the rdataclass of the Rdata.
*rdtype*, an ``int`` is the rdatatype of the Rdata.
"""
self.rdclass = rdclass
self.rdtype = rdtype
def covers(self):
"""Return the type a Rdata covers.
DNS SIG/RRSIG rdatas apply to a specific type; this type is
returned by the covers() function. If the rdata type is not
SIG or RRSIG, dns.rdatatype.NONE is returned. This is useful when
creating rdatasets, allowing the rdataset to contain only RRSIGs
of a particular type, e.g. RRSIG(NS).
Returns an ``int``.
"""
return dns.rdatatype.NONE
def extended_rdatatype(self):
"""Return a 32-bit type value, the least significant 16 bits of
which are the ordinary DNS type, and the upper 16 bits of which are
the "covered" type, if any.
Returns an ``int``.
"""
return self.covers() << 16 | self.rdtype
def to_text(self, origin=None, relativize=True, **kw):
"""Convert an rdata to text format.
Returns a ``text``.
"""
raise NotImplementedError
def to_wire(self, file, compress=None, origin=None):
"""Convert an rdata to wire format.
Returns a ``binary``.
"""
raise NotImplementedError
def to_digestable(self, origin=None):
"""Convert rdata to a format suitable for digesting in hashes. This
is also the DNSSEC canonical form.
Returns a ``binary``.
"""
f = BytesIO()
self.to_wire(f, None, origin)
return f.getvalue()
def validate(self):
"""Check that the current contents of the rdata's fields are
valid.
If you change an rdata by assigning to its fields,
it is a good idea to call validate() when you are done making
changes.
Raises various exceptions if there are problems.
Returns ``None``.
"""
dns.rdata.from_text(self.rdclass, self.rdtype, self.to_text())
def __repr__(self):
covers = self.covers()
if covers == dns.rdatatype.NONE:
ctext = ''
else:
ctext = '(' + dns.rdatatype.to_text(covers) + ')'
return '<DNS ' + dns.rdataclass.to_text(self.rdclass) + ' ' + \
dns.rdatatype.to_text(self.rdtype) + ctext + ' rdata: ' + \
str(self) + '>'
def __str__(self):
return self.to_text()
def _cmp(self, other):
"""Compare an rdata with another rdata of the same rdtype and
rdclass.
Return < 0 if self < other in the DNSSEC ordering, 0 if self
== other, and > 0 if self > other.
"""
our = self.to_digestable(dns.name.root)
their = other.to_digestable(dns.name.root)
if our == their:
return 0
elif our > their:
return 1
else:
return -1
def __eq__(self, other):
if not isinstance(other, Rdata):
return False
if self.rdclass != other.rdclass or self.rdtype != other.rdtype:
return False
return self._cmp(other) == 0
def __ne__(self, other):
if not isinstance(other, Rdata):
return True
if self.rdclass != other.rdclass or self.rdtype != other.rdtype:
return True
return self._cmp(other) != 0
def __lt__(self, other):
if not isinstance(other, Rdata) or \
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
return NotImplemented
return self._cmp(other) < 0
def __le__(self, other):
if not isinstance(other, Rdata) or \
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
return NotImplemented
return self._cmp(other) <= 0
def __ge__(self, other):
if not isinstance(other, Rdata) or \
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
return NotImplemented
return self._cmp(other) >= 0
def __gt__(self, other):
if not isinstance(other, Rdata) or \
self.rdclass != other.rdclass or self.rdtype != other.rdtype:
return NotImplemented
return self._cmp(other) > 0
def __hash__(self):
return hash(self.to_digestable(dns.name.root))
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
raise NotImplementedError
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
raise NotImplementedError
def choose_relativity(self, origin=None, relativize=True):
"""Convert any domain names in the rdata to the specified
relativization.
"""
class GenericRdata(Rdata):
"""Generic Rdata Class
This class is used for rdata types for which we have no better
implementation. It implements the DNS "unknown RRs" scheme.
"""
__slots__ = ['data']
def __init__(self, rdclass, rdtype, data):
super(GenericRdata, self).__init__(rdclass, rdtype)
self.data = data
def to_text(self, origin=None, relativize=True, **kw):
return r'\# %d ' % len(self.data) + _hexify(self.data)
@classmethod
def from_text(cls, rdclass, rdtype, tok, origin=None, relativize=True):
token = tok.get()
if not token.is_identifier() or token.value != r'\#':
raise dns.exception.SyntaxError(
r'generic rdata does not start with \#')
length = tok.get_int()
chunks = []
while 1:
token = tok.get()
if token.is_eol_or_eof():
break
chunks.append(token.value.encode())
hex = b''.join(chunks)
data = binascii.unhexlify(hex)
if len(data) != length:
raise dns.exception.SyntaxError(
'generic rdata hex data has wrong length')
return cls(rdclass, rdtype, data)
def to_wire(self, file, compress=None, origin=None):
file.write(self.data)
@classmethod
def from_wire(cls, rdclass, rdtype, wire, current, rdlen, origin=None):
return cls(rdclass, rdtype, wire[current: current + rdlen])
_rdata_modules = {}
_module_prefix = 'dns.rdtypes'
_import_lock = _threading.Lock()
def get_rdata_class(rdclass, rdtype):
def import_module(name):
with _import_lock:
mod = __import__(name)
components = name.split('.')
for comp in components[1:]:
mod = getattr(mod, comp)
return mod
mod = _rdata_modules.get((rdclass, rdtype))
rdclass_text = dns.rdataclass.to_text(rdclass)
rdtype_text = dns.rdatatype.to_text(rdtype)
rdtype_text = rdtype_text.replace('-', '_')
if not mod:
mod = _rdata_modules.get((dns.rdatatype.ANY, rdtype))
if not mod:
try:
mod = import_module('.'.join([_module_prefix,
rdclass_text, rdtype_text]))
_rdata_modules[(rdclass, rdtype)] = mod
except ImportError:
try:
mod = import_module('.'.join([_module_prefix,
'ANY', rdtype_text]))
_rdata_modules[(dns.rdataclass.ANY, rdtype)] = mod
except ImportError:
mod = None
if mod:
cls = getattr(mod, rdtype_text)
else:
cls = GenericRdata
return cls
def from_text(rdclass, rdtype, tok, origin=None, relativize=True):
"""Build an rdata object from text format.
This function attempts to dynamically load a class which
implements the specified rdata class and type. If there is no
class-and-type-specific implementation, the GenericRdata class
is used.
Once a class is chosen, its from_text() class method is called
with the parameters to this function.
If *tok* is a ``text``, then a tokenizer is created and the string
is used as its input.
*rdclass*, an ``int``, the rdataclass.
*rdtype*, an ``int``, the rdatatype.
*tok*, a ``dns.tokenizer.Tokenizer`` or a ``text``.
*origin*, a ``dns.name.Name`` (or ``None``), the
origin to use for relative names.
*relativize*, a ``bool``. If true, name will be relativized to
the specified origin.
Returns an instance of the chosen Rdata subclass.
"""
if isinstance(tok, string_types):
tok = dns.tokenizer.Tokenizer(tok)
cls = get_rdata_class(rdclass, rdtype)
if cls != GenericRdata:
# peek at first token
token = tok.get()
tok.unget(token)
if token.is_identifier() and \
token.value == r'\#':
#
# Known type using the generic syntax. Extract the
# wire form from the generic syntax, and then run
# from_wire on it.
#
rdata = GenericRdata.from_text(rdclass, rdtype, tok, origin,
relativize)
return from_wire(rdclass, rdtype, rdata.data, 0, len(rdata.data),
origin)
return cls.from_text(rdclass, rdtype, tok, origin, relativize)
def from_wire(rdclass, rdtype, wire, current, rdlen, origin=None):
"""Build an rdata object from wire format
This function attempts to dynamically load a class which
implements the specified rdata class and type. If there is no
class-and-type-specific implementation, the GenericRdata class
is used.
Once a class is chosen, its from_wire() class method is called
with the parameters to this function.
*rdclass*, an ``int``, the rdataclass.
*rdtype*, an ``int``, the rdatatype.
*wire*, a ``binary``, the wire-format message.
*current*, an ``int``, the offset in wire of the beginning of
the rdata.
*rdlen*, an ``int``, the length of the wire-format rdata
*origin*, a ``dns.name.Name`` (or ``None``). If not ``None``,
then names will be relativized to this origin.
Returns an instance of the chosen Rdata subclass.
"""
wire = dns.wiredata.maybe_wrap(wire)
cls = get_rdata_class(rdclass, rdtype)
return cls.from_wire(rdclass, rdtype, wire, current, rdlen, origin)
class RdatatypeExists(dns.exception.DNSException):
"""DNS rdatatype already exists."""
supp_kwargs = {'rdclass', 'rdtype'}
fmt = "The rdata type with class {rdclass} and rdtype {rdtype} " + \
"already exists."
def register_type(implementation, rdtype, rdtype_text, is_singleton=False,
rdclass=dns.rdataclass.IN):
"""Dynamically register a module to handle an rdatatype.
*implementation*, a module implementing the type in the usual dnspython
way.
*rdtype*, an ``int``, the rdatatype to register.
*rdtype_text*, a ``text``, the textual form of the rdatatype.
*is_singleton*, a ``bool``, indicating if the type is a singleton (i.e.
RRsets of the type can have only one member.)
*rdclass*, the rdataclass of the type, or ``dns.rdataclass.ANY`` if
it applies to all classes.
"""
existing_cls = get_rdata_class(rdclass, rdtype)
if existing_cls != GenericRdata:
raise RdatatypeExists(rdclass=rdclass, rdtype=rdtype)
_rdata_modules[(rdclass, rdtype)] = implementation
dns.rdatatype.register_type(rdtype, rdtype_text, is_singleton) | PypiClean |
/OMEMO-1.0.2.tar.gz/OMEMO-1.0.2/omemo/backend.py | from abc import ABC, abstractmethod
from typing import Any, Optional, Tuple
from .bundle import Bundle
from .message import Content, EncryptedKeyMaterial, PlainKeyMaterial, KeyExchange
from .session import Session
from .types import OMEMOException
__all__ = [ # pylint: disable=unused-variable
"Backend",
"BackendException",
"DecryptionFailed",
"KeyExchangeFailed",
"TooManySkippedMessageKeys"
]
class BackendException(OMEMOException):
"""
Parent type for all exceptions specific to :class:`Backend`.
"""
class DecryptionFailed(BackendException):
"""
Raised by various methods of :class:`Backend` in case of backend-specific failures during decryption.
"""
class KeyExchangeFailed(BackendException):
"""
Raised by :meth:`Backend.build_session_active` and :meth:`Backend.build_session_passive` in case of an
error during the processing of a key exchange for session building. Known error conditions are:
* The bundle does not contain and pre keys (active session building)
* The signature of the signed pre key could not be verified (active session building)
* An unkown (signed) pre key was referred to (passive session building)
Additional backend-specific error conditions might exist.
"""
class TooManySkippedMessageKeys(BackendException):
"""
Raised by :meth:`Backend.decrypt_key_material` if a message skips more message keys than allowed.
"""
class Backend(ABC):
"""
The base class for all backends. A backend is a unit providing the functionality of a certain OMEMO
version to the core library.
Warning:
Make sure to call :meth:`__init__` from your subclass to configure per-message and per-session skipped
message key DoS protection thresholds, and respect those thresholds when decrypting key material using
:meth:`decrypt_key_material`.
Note:
Most methods can raise :class:`~omemo.storage.StorageException` in addition to those exceptions
listed explicitly.
Note:
All usages of "identity key" in the public API refer to the public part of the identity key pair in
Ed25519 format. Otherwise, "identity key pair" is explicitly used to refer to the full key pair.
Note:
For backend implementors: as part of your backend implementation, you are expected to subclass various
abstract base classes like :class:`~omemo.session.Session`, :class:`~omemo.message.Content`,
:class:`~omemo.message.PlainKeyMaterial`, :class:`~omemo.message.EncryptedKeyMaterial` and
:class:`~omemo.message.KeyExchange`. Whenever any of these abstract base types appears in a method
signature of the :class:`Backend` class, what's actually meant is an instance of your respective
subclass. This is not correctly expressed through the type system, since I couldn't think of a clean
way to do so. Adding generics for every single of these types seemed not worth the effort. For now,
the recommended way to deal with this type inaccuray is to assert the types of the affected method
parameters, for example::
async def store_session(self, session: Session) -> Any:
assert isinstance(session, MySessionImpl)
...
Doing so tells mypy how to deal with the situation. These assertions should never fail.
Note:
For backend implementors: you can access the identity key pair at any time via
:meth:`omemo.identity_key_pair.IdentityKeyPair.get`.
"""
def __init__(
self,
max_num_per_session_skipped_keys: int = 1000,
max_num_per_message_skipped_keys: Optional[int] = None
) -> None:
"""
Args:
max_num_per_session_skipped_keys: The maximum number of skipped message keys to keep around per
session. Once the maximum is reached, old message keys are deleted to make space for newer
ones. Accessible via :attr:`max_num_per_session_skipped_keys`.
max_num_per_message_skipped_keys: The maximum number of skipped message keys to accept in a single
message. When set to ``None`` (the default), this parameter defaults to the per-session
maximum (i.e. the value of the ``max_num_per_session_skipped_keys`` parameter). This parameter
may only be 0 if the per-session maximum is 0, otherwise it must be a number between 1 and the
per-session maximum. Accessible via :attr:`max_num_per_message_skipped_keys`.
"""
if max_num_per_message_skipped_keys == 0 and max_num_per_session_skipped_keys != 0:
raise ValueError(
"The number of allowed per-message skipped keys must be nonzero if the number of per-session"
" skipped keys to keep is nonzero."
)
if max_num_per_message_skipped_keys or 0 > max_num_per_session_skipped_keys:
raise ValueError(
"The number of allowed per-message skipped keys must not be greater than the number of"
" per-session skipped keys to keep."
)
self.__max_num_per_session_skipped_keys = max_num_per_session_skipped_keys
self.__max_num_per_message_skipped_keys = max_num_per_session_skipped_keys if \
max_num_per_message_skipped_keys is None else max_num_per_message_skipped_keys
@property
def max_num_per_session_skipped_keys(self) -> int:
"""
Returns:
The maximum number of skipped message keys to keep around per session.
"""
return self.__max_num_per_session_skipped_keys
@property
def max_num_per_message_skipped_keys(self) -> int:
"""
Returns:
The maximum number of skipped message keys to accept in a single message.
"""
return self.__max_num_per_message_skipped_keys
@property
@abstractmethod
def namespace(self) -> str:
"""
Returns:
The namespace provided/handled by this backend implementation.
"""
@abstractmethod
async def load_session(self, bare_jid: str, device_id: int) -> Optional[Session]:
"""
Args:
bare_jid: The bare JID the device belongs to.
device_id: The id of the device.
Returns:
The session associated with the device, or `None` if such a session does not exist.
Warning:
Multiple sessions for the same device can exist in memory, however only one session per device can
exist in storage. Which one of the in-memory sessions is persisted in storage is controlled by
calling the :meth:`store_session` method.
"""
@abstractmethod
async def store_session(self, session: Session) -> Any:
"""
Store a session, overwriting any previously stored session for the bare JID and device id this session
belongs to.
Args:
session: The session to store.
Returns:
Anything, the return value is ignored.
Warning:
Multiple sessions for the same device can exist in memory, however only one session per device can
exist in storage. Which one of the in-memory sessions is persisted in storage is controlled by
calling this method.
"""
@abstractmethod
async def build_session_active(
self,
bare_jid: str,
device_id: int,
bundle: Bundle,
plain_key_material: PlainKeyMaterial
) -> Tuple[Session, EncryptedKeyMaterial]:
"""
Actively build a session.
Args:
bare_jid: The bare JID the device belongs to.
device_id: The id of the device.
bundle: The bundle containing the public key material of the other device required for active
session building.
plain_key_material: The key material to encrypt for the recipient as part of the initial key
exchange/session initiation.
Returns:
The newly built session, the encrypted key material and the key exchange information required by
the other device to complete the passive part of session building. The
:attr:`~omemo.session.Session.initiation` property of the returned session must return
:attr:`~omemo.session.Initiation.ACTIVE`. The :attr:`~omemo.session.Session.key_exchange` property
of the returned session must return the information required by the other party to complete its
part of the key exchange.
Raises:
KeyExchangeFailed: in case of failure related to the key exchange required for session building.
Warning:
This method may be called for a device which already has a session. In that case, the original
session must remain in storage and must remain loadable via :meth:`load_session`. Only upon
calling :meth:`store_session`, the old session must be overwritten with the new one. In summary,
multiple sessions for the same device can exist in memory, while only one session per device can
exist in storage, which can be controlled using the :meth:`store_session` method.
"""
@abstractmethod
async def build_session_passive(
self,
bare_jid: str,
device_id: int,
key_exchange: KeyExchange,
encrypted_key_material: EncryptedKeyMaterial
) -> Tuple[Session, PlainKeyMaterial]:
"""
Passively build a session.
Args:
bare_jid: The bare JID the device belongs to.
device_id: The id of the device.
key_exchange: Key exchange information for the passive session building.
encrypted_key_material: The key material to decrypt as part of the initial key exchange/session
initiation.
Returns:
The newly built session and the decrypted key material. Note that the pre key used to initiate
this session must somehow be associated with the session, such that :meth:`hide_pre_key` and
:meth:`delete_pre_key` can work.
Raises:
KeyExchangeFailed: in case of failure related to the key exchange required for session building.
DecryptionFailed: in case of backend-specific failures during decryption of the initial message.
Warning:
This method may be called for a device which already has a session. In that case, the original
session must remain in storage and must remain loadable via :meth:`load_session`. Only upon
calling :meth:`store_session`, the old session must be overwritten with the new one. In summary,
multiple sessions for the same device can exist in memory, while only one session per device can
exist in storage, which can be controlled using the :meth:`store_session` method.
"""
@abstractmethod
async def encrypt_plaintext(self, plaintext: bytes) -> Tuple[Content, PlainKeyMaterial]:
"""
Encrypt some plaintext symmetrically.
Args:
plaintext: The plaintext to encrypt symmetrically.
Returns:
The encrypted plaintext aka content, as well as the key material needed to decrypt it.
"""
@abstractmethod
async def encrypt_empty(self) -> Tuple[Content, PlainKeyMaterial]:
"""
Encrypt an empty message for the sole purpose of session manangement/ratchet forwarding/key material
transportation.
Returns:
The symmetrically encrypted empty content, and the key material needed to decrypt it.
"""
@abstractmethod
async def encrypt_key_material(
self,
session: Session,
plain_key_material: PlainKeyMaterial
) -> EncryptedKeyMaterial:
"""
Encrypt some key material asymmetrically using the session.
Args:
session: The session to encrypt the key material with.
plain_key_material: The key material to encrypt asymmetrically for each recipient.
Returns:
The encrypted key material.
"""
@abstractmethod
async def decrypt_plaintext(self, content: Content, plain_key_material: PlainKeyMaterial) -> bytes:
"""
Decrypt some symmetrically encrypted plaintext.
Args:
content: The content to decrypt. Not empty, i.e. :attr:`Content.empty` will return ``False``.
plain_key_material: The key material to decrypt with.
Returns:
The decrypted plaintext.
Raises:
DecryptionFailed: in case of backend-specific failures during decryption.
"""
@abstractmethod
async def decrypt_key_material(
self,
session: Session,
encrypted_key_material: EncryptedKeyMaterial
) -> PlainKeyMaterial:
"""
Decrypt some key material asymmetrically using the session.
Args:
session: The session to decrypt the key material with.
encrypted_key_material: The encrypted key material.
Returns:
The decrypted key material
Raises:
TooManySkippedMessageKeys: if the number of message keys skipped by this message exceeds the upper
limit enforced by :attr:`max_num_per_message_skipped_keys`.
DecryptionFailed: in case of backend-specific failures during decryption.
Warning:
Make sure to respect the values of :attr:`max_num_per_session_skipped_keys` and
:attr:`max_num_per_message_skipped_keys`.
Note:
When the maximum number of skipped message keys for this session, given by
:attr:`max_num_per_session_skipped_keys`, is exceeded, old skipped message keys are deleted to
make space for new ones.
"""
@abstractmethod
async def signed_pre_key_age(self) -> int:
"""
Returns:
The age of the signed pre key, i.e. the time elapsed since it was last rotated, in seconds.
"""
@abstractmethod
async def rotate_signed_pre_key(self) -> Any:
"""
Rotate the signed pre key. Keep the old signed pre key around for one additional rotation period, i.e.
until this method is called again.
Returns:
Anything, the return value is ignored.
"""
@abstractmethod
async def hide_pre_key(self, session: Session) -> bool:
"""
Hide a pre key from the bundle returned by :meth:`get_bundle` and pre key count returned by
:meth:`get_num_visible_pre_keys`, but keep the pre key for cryptographic operations.
Args:
session: A session that was passively built using :meth:`build_session_passive`. Use this session
to identity the pre key to hide.
Returns:
Whether the pre key was hidden. If the pre key doesn't exist (e.g. because it has already been
deleted), or was already hidden, do not throw an exception, but return `False` instead.
"""
@abstractmethod
async def delete_pre_key(self, session: Session) -> bool:
"""
Delete a pre key.
Args:
session: A session that was passively built using :meth:`build_session_passive`. Use this session
to identity the pre key to delete.
Returns:
Whether the pre key was deleted. If the pre key doesn't exist (e.g. because it has already been
deleted), do not throw an exception, but return `False` instead.
"""
@abstractmethod
async def delete_hidden_pre_keys(self) -> Any:
"""
Delete all pre keys that were previously hidden using :meth:`hide_pre_key`.
Returns:
Anything, the return value is ignored.
"""
@abstractmethod
async def get_num_visible_pre_keys(self) -> int:
"""
Returns:
The number of visible pre keys available. The number returned here should match the number of pre
keys included in the bundle returned by :meth:`get_bundle`.
"""
@abstractmethod
async def generate_pre_keys(self, num_pre_keys: int) -> Any:
"""
Generate and store pre keys.
Args:
num_pre_keys: The number of pre keys to generate.
Returns:
Anything, the return value is ignored.
"""
@abstractmethod
async def get_bundle(self, bare_jid: str, device_id: int) -> Bundle:
"""
Args:
bare_jid: The bare JID of this XMPP account, to be included in the bundle.
device_id: The id of this device, to be included in the bundle.
Returns:
The bundle containing public information about the cryptographic state of this backend.
Warning:
Do not include pre keys hidden by :meth:`hide_pre_key` in the bundle!
"""
@abstractmethod
async def purge(self) -> Any:
"""
Remove all data related to this backend from the storage.
Returns:
Anything, the return value is ignored.
"""
@abstractmethod
async def purge_bare_jid(self, bare_jid: str) -> Any:
"""
Delete all data corresponding to an XMPP account.
Args:
bare_jid: Delete all data corresponding to this bare JID.
Returns:
Anything, the return value is ignored.
""" | PypiClean |
/IdracRedfishSupport-0.0.8.tar.gz/IdracRedfishSupport-0.0.8/ExportThermalHistoryREDFISH.py |
import argparse
import getpass
import json
import logging
import re
import requests
import sys
import time
import warnings
from datetime import datetime
from pprint import pprint
warnings.filterwarnings("ignore")
parser=argparse.ArgumentParser(description="Python script using Redfish API with OEM extension to export server thermal history to a supported network share. NOTE: export locally is not supported for this OEM action.")
parser.add_argument('-ip',help='iDRAC IP address', required=False)
parser.add_argument('-u', help='iDRAC username', required=False)
parser.add_argument('-p', help='iDRAC password. If you do not pass in argument -p, script will prompt to enter user password which will not be echoed to the screen.', required=False)
parser.add_argument('-x', help='Pass in X-Auth session token for executing Redfish calls. All Redfish calls will use X-Auth token instead of username/password', required=False)
parser.add_argument('--ssl', help='SSL cert verification for all Redfish calls, pass in value \"true\" or \"false\". By default, this argument is not required and script ignores validating SSL cert for all Redfish calls.', required=False)
parser.add_argument('--script-examples', action="store_true", help='Prints script examples')
parser.add_argument('--shareip', help='Pass in IP address of the network share', required=False)
parser.add_argument('--sharetype', help='Pass in share type of the network share. Supported values are NFS and CIFS', required=False)
parser.add_argument('--sharename', help='Pass in network share name', required=False)
parser.add_argument('--username', help='Pass in CIFS username. This argument is only required when using CIFS share.', required=False)
parser.add_argument('--password', help='Pass in CIFS username password. This argument is only required when using CIFS share.', required=False)
parser.add_argument('--workgroup', help='Pass in workgroup of your CIFS network share. This argument is optional', required=False)
parser.add_argument('--filename', help='Pass in unique file name string for exporting thermal history file', required=False)
parser.add_argument('--filetype', help='Exported file type, supported values are XML or CSV', required=False)
args=vars(parser.parse_args())
logging.basicConfig(format='%(message)s', stream=sys.stdout, level=logging.INFO)
def script_examples():
print("""\n- ExportThermalHistoryREDFISH.py -ip 192.168.0.120 -u root -p calvin --shareip 192.168.0.130 --ssl True --sharetype CIFS --sharename cifs_share_vm --username administrator --password pass --filename export_thermal_history_R640.xml --filetype XML, this example will validate SSL cert for all Redfish calls, export server thermal history in XML file format to a CIFS share.
\n- ExportThermalHistoryREDFISH.py -ip 192.168.0.120 --shareip 192.168.0.130 --sharetype NFS --sharename /nfs --filename R740_thermal.xml --filetype xml -x 25342b24713cbaeaf9568ab14770z11w, this example uses iDRAC X-auth token session to export thermal history to NFS share.
\n- ExportThermalHistoryREDFISH.py -ip 192.168.0.120 -u root --shareip 192.168.0.130 --ssl True --sharetype CIFS --sharename cifs_share_vm --username administrator --password pass --filename export_thermal_history_R640.xml --filetype XML, this example will first prompt to enter iDRAC user password (will not be returned to the screen), validate SSL cert for all Redfish calls, export server thermal history in XML file format to a CIFS share.
\n- ExportThermalHistoryREDFISH.py -ip 192.168.0.120 -u root -p calvin --shareip 192.168.0.130 --sharetype NFS --sharename /nfs --filename export_thermal_history_R640.xml --filetype CSV, this example will export thermal history in CSV file format to NFS share.""")
sys.exit(0)
def check_supported_idrac_version():
if args["x"]:
response = requests.get('https://%s/redfish/v1/Dell/Systems/System.Embedded.1/DellMetricService' % idrac_ip, verify=verify_cert, headers={'X-Auth-Token': args["x"]})
else:
response = requests.get('https://%s/redfish/v1/Dell/Systems/System.Embedded.1/DellMetricService' % idrac_ip, verify=verify_cert, auth=(idrac_username, idrac_password))
data = response.json()
if response.status_code == 401:
logging.warning("\n- WARNING, status code %s returned, check your iDRAC username/password is correct or iDRAC user has correct privileges to execute Redfish commands" % response.status_code)
sys.exit(0)
if response.status_code != 200:
logging.warning("\n- WARNING, GET command failed to check supported iDRAC version, status code %s returned" % response.status_code)
sys.exit(0)
def export_thermal_history():
global job_id
url = 'https://%s/redfish/v1/Dell/Systems/System.Embedded.1/DellMetricService/Actions/DellMetricService.ExportThermalHistory' % (idrac_ip)
method = "ExportThermalHistory"
payload={}
if args["shareip"]:
payload["IPAddress"] = args["shareip"]
if args["sharetype"]:
payload["ShareType"] = args["sharetype"]
if args["sharename"]:
payload["ShareName"] = args["sharename"]
if args["filename"]:
payload["FileName"] = args["filename"]
if args["filetype"]:
payload["FileType"] = args["filetype"].upper()
if args["username"]:
payload["Username"] = args["username"]
if args["password"]:
payload["Password"] = args["password"]
if args["workgroup"]:
payload["Workgroup"] = args["workgroup"]
if args["x"]:
headers = {'content-type': 'application/json', 'X-Auth-Token': args["x"]}
response = requests.post(url, data=json.dumps(payload), headers=headers, verify=verify_cert)
else:
headers = {'content-type': 'application/json'}
response = requests.post(url, data=json.dumps(payload), headers=headers, verify=verify_cert, auth=(idrac_username,idrac_password))
data = response.json()
if response.status_code == 202:
logging.info("\n- PASS, POST command passed for %s method, status code 202 returned" % method)
else:
logging.error("\n- ERROR, POST command failed for %s method, status code is %s" % (method, response.status_code))
data = response.json()
logging.error("\n- POST command failure results:\n %s" % data)
sys.exit(0)
try:
job_id = response.headers['Location'].split("/")[-1]
except:
logging.error("- ERROR, unable to find job ID in headers POST response, headers output is:\n%s" % response.headers)
sys.exit(0)
logging.info("- PASS, job ID %s successfuly created for %s method\n" % (job_id, method))
def loop_job_status():
"""
Job ID returned from DellLCService.ExportHWInventory action, this will loop checking the job status until marked completed.
"""
start_time = datetime.now()
while True:
if args["x"]:
response = requests.get('https://%s/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/%s' % (idrac_ip, job_id), verify=verify_cert, headers={'X-Auth-Token': args["x"]})
else:
response = requests.get('https://%s/redfish/v1/Managers/iDRAC.Embedded.1/Jobs/%s' % (idrac_ip, job_id), verify=verify_cert,auth=(idrac_username, idrac_password))
current_time = (datetime.now() - start_time)
if response.status_code != 200:
logging.error("\n- FAIL, Command failed to check job status, return code is %s" % response.status_code)
logging.error("Extended Info Message: {0}".format(response.json()))
sys.exit(0)
data = response.json()
if str(current_time)[0:7] >= "0:05:00":
logging.error("\n- FAIL: Timeout of 5 minutes has been hit, script stopped\n")
sys.exit(0)
elif "Fail" in data['Message'] or "fail" in data['Message'] or data['JobState'] == "Failed" or "Unable" in data['Message']:
logging.error("- FAIL: job ID %s failed, failed message is: %s" % (job_id, data['Message']))
sys.exit(0)
elif data['JobState'] == "Completed":
if data['Message'] == "The command was successful":
logging.info("\n--- PASS, Final Detailed Job Status Results ---\n")
else:
logging.error("\n--- FAIL, Final Detailed Job Status Results ---\n")
for i in data.items():
pprint(i)
break
else:
logging.info("- INFO, job state not marked completed, current job status is running, polling again")
time.sleep(2)
if __name__ == "__main__":
if args["script_examples"]:
script_examples()
if args["ip"] or args["ssl"] or args["u"] or args["p"] or args["x"]:
idrac_ip = args["ip"]
idrac_username = args["u"]
if args["p"]:
idrac_password = args["p"]
if not args["p"] and not args["x"] and args["u"]:
idrac_password = getpass.getpass("\n- Argument -p not detected, pass in iDRAC user %s password: " % args["u"])
if args["ssl"]:
if args["ssl"].lower() == "true":
verify_cert = True
elif args["ssl"].lower() == "false":
verify_cert = False
else:
verify_cert = False
else:
verify_cert = False
check_supported_idrac_version()
else:
logging.error("\n- FAIL, invalid argument values or not all required parameters passed in. See help text or argument --script-examples for more details.")
sys.exit(0)
if args["filename"] and args["filetype"] and args["shareip"] and args["sharetype"]:
export_thermal_history()
loop_job_status()
else:
logging.error("\n- FAIL, invalid argument values or not all required parameters passed in. See help text or argument --script-examples for more details.") | PypiClean |
/CNFgen-0.9.2-py3-none-any.whl/cnfgen/graphs.py | import os
import io
import random
from io import StringIO
import copy
from bisect import bisect_right, bisect_left
import networkx
from cnfgen.localtypes import positive_int, non_negative_int
__all__ = [
"readGraph", "writeGraph",
"Graph", "DirectedGraph", "BipartiteGraph",
"supported_graph_formats",
"bipartite_random_left_regular", "bipartite_random_regular",
"bipartite_random_m_edges", "bipartite_random",
"dag_complete_binary_tree", "dag_pyramid", "dag_path"
]
#################################################################
# Import third party code
#################################################################
class BipartiteEdgeList():
"""Edge list for bipartite graphs"""
def __init__(self, B):
self.B = B
def __len__(self):
return self.B.number_of_edges()
def __contains__(self, t):
return len(t) == 2 and self.B.has_edge(t[0], t[1])
def __iter__(self):
for u in range(1, self.B.left_order() + 1):
yield from ((u, v) for v in self.B.right_neighbors(u))
class GraphEdgeList():
"""Edge list for bipartite graphs"""
def __init__(self, G):
self.G = G
def __len__(self):
return self.G.number_of_edges()
def __contains__(self, t):
return len(t) == 2 and self.G.has_edge(t[0], t[1])
def __iter__(self):
n = self.G.number_of_vertices()
G = self.G
for u in range(1, n):
pos = bisect_right(G.adjlist[u], u)
while pos < len(G.adjlist[u]):
v = G.adjlist[u][pos]
yield (u, v)
pos += 1
class DirectedEdgeList():
"""Edge list for bipartite graphs"""
def __init__(self, D, sort_by_predecessors=True):
self.D = D
self.sort_by_pred = sort_by_predecessors
def __len__(self):
return self.D.number_of_edges()
def __contains__(self, t):
return len(t) == 2 and self.D.has_edge(t[0], t[1])
def __iter__(self):
n = self.D.number_of_vertices()
if self.sort_by_pred:
successors = self.D.succ
for src in range(1, n+1):
for dest in successors[src]:
yield (src, dest)
else:
predecessors = self.D.pred
for dest in range(1, n+1):
for src in predecessors[dest]:
yield (src, dest)
class BaseGraph():
"""Base class for graphs"""
def is_dag(self):
"""Test whether the graph is directed acyclic
This is not a full test. It only checks that all directed edges (u,v)
have that u < v."""
raise NotImplementedError
def is_directed(self):
"Test whether the graph is directed"
raise NotImplementedError
def is_multigraph(self):
"Test whether the graph can have multi-edges"
return False
def is_bipartite(self):
"Test whether the graph is a bipartite object"
return False
def order(self):
return self.number_of_vertices()
def vertices(self):
return range(1, self.number_of_vertices()+1)
def number_of_vertices(self):
raise NotImplementedError
def number_of_edges(self):
raise NotImplementedError
def has_edge(self, u, v):
raise NotImplementedError
def add_edge(self, u, v):
raise NotImplementedError
def add_edges_from(self, edges):
for u, v in edges:
self.add_edge(u, v)
def edges(self):
raise NotImplementedError
def __len__(self):
return self.number_of_vertices()
def to_networkx(self):
"""Convert the graph TO a networkx object."""
raise NotImplementedError
@classmethod
def from_networkx(cls, G):
"""Create a graph object from a networkx graph"""
raise NotImplementedError
@classmethod
def normalize(cls, G):
"""Guarantees a cnfgen graph object"""
raise NotImplementedError
@classmethod
def supported_file_formats(cls):
"""File formats supported for graph I/O"""
raise NotImplementedError
@classmethod
def graph_type_name(cls):
"""File formats supported for graph I/O"""
raise NotImplementedError
@classmethod
def from_file(cls, fileorname, fileformat=None):
"""Load the graph from a file
The file format is either indicated in the `fileformat` variable or, if
that is `None`, or from the extension of the filename.
Parameters
-----------
fileorname: str or file-like object
the input file from which the graph is read. If it is a string
then the graph is read from a file with that string as
filename. Otherwise if the fileorname is a file object (or
a text stream), the graph is read from there.
Input files are assumed to be UTF-8 by default (for some
formats it is actually ascii)
fileformat: string, optional
The file format that the parser should expect to receive.
See also :py:func:`cnfgen.graph.supported_formats`. By default
it tries to autodetect it from the file name extension (when applicable)."""
# Reduce to the case of filestream
if isinstance(fileorname, str):
with open(fileorname, 'r', encoding='utf-8') as file_handle:
return cls.from_file(file_handle, fileformat)
# Discover and test file format
fileformat = guess_fileformat(fileorname, fileformat)
allowed = cls.supported_file_formats()
typename = cls.graph_type_name()
if fileformat not in allowed:
raise ValueError(
"Invalid file type."
" For {} graphs we support {}".format(typename,
allowed))
# Read file
return readGraph(fileorname, typename, fileformat)
class Graph(BaseGraph):
def is_dag(self):
return False
def is_directed(self):
return False
def __init__(self, n, name=None):
non_negative_int(n, 'n')
self.n = n
self.m = 0
self.adjlist = [[] for i in range(n+1)]
self.edgeset = set()
if name is None:
self.name = "a simple graph with {} vertices".format(n)
else:
self.name = name
def add_edge(self, u, v):
if not (1 <= u <= self.n and 1 <= v <= self.n and u != v):
raise ValueError(
"u,v must be distinct, between 1 and the number of nodes")
if (u, v) in self.edgeset:
return
u, v = min(u, v), max(u, v)
pos = bisect_right(self.adjlist[u], v)
self.adjlist[u].insert(pos, v)
pos = bisect_right(self.adjlist[v], u)
self.adjlist[v].insert(pos, u)
self.m += 1
self.edgeset.add((u, v))
self.edgeset.add((v, u))
def update_vertex_number(self, new_value):
"""Raises the number of vertices to `new_value`"""
non_negative_int(new_value, 'new_value')
for _ in range(self.n,new_value):
self.adjlist.append([])
self.n = max(self.n, new_value)
def remove_edge(self,u,v):
if not self.has_edge(u,v):
return
self.edgeset.remove((u,v))
self.edgeset.remove((v,u))
self.adjlist[u].remove(v)
self.adjlist[v].remove(u)
self.m -= 1
def has_edge(self, u, v):
return (u, v) in self.edgeset
def vertices(self):
return range(1, self.n+1)
def edges(self):
"""Outputs all edges in the graph"""
return GraphEdgeList(self)
def number_of_vertices(self):
return self.n
def number_of_edges(self):
return self.m
def to_networkx(self):
G = networkx.Graph()
G.add_nodes_from(range(1, self.n+1))
G.add_edges_from(self.edges())
return G
def neighbors(self, u):
"""Outputs the neighbors of vertex `u`
The sequence of neighbors is guaranteed to be sorted.
"""
if not(1 <= u <= self.n):
raise ValueError("vertex u not in the graph")
yield from self.adjlist[u]
def degree(self, u):
if not(1 <= u <= self.n):
raise ValueError("vertex u not in the graph")
return len(self.adjlist[u])
@classmethod
def from_networkx(cls, G):
if not isinstance(G, networkx.Graph):
raise ValueError('G is expected to be of type networkx.Graph')
G = normalize_networkx_labels(G)
C = cls(G.order())
C.add_edges_from(G.edges())
try:
C.name = G.name
except AttributeError:
C.name = '<unknown graph>'
return C
@classmethod
def graph_type_name(cls):
"""Simple graphs are laleled as 'simple'"""
return 'simple'
@classmethod
def supported_file_formats(cls):
"""File formats supported for simple graph I/O"""
# Check that DOT is a supported format
if has_dot_library():
return ['kthlist', 'gml', 'dot', 'dimacs']
else:
return ['kthlist', 'gml', 'dimacs']
@classmethod
def null_graph(cls):
return cls(0, 'the null graph')
@classmethod
def empty_graph(cls, n):
return cls(n, 'the empty graph of order '+str(n))
@classmethod
def complete_graph(cls, n):
G = cls(n, 'the complete graph of order '+str(n))
for u in range(1, n):
for v in range(u+1, n+1):
G.add_edge(u, v)
return G
@classmethod
def star_graph(cls, n):
G = cls(n+1, 'the star graph with {} arms'.format(n))
for u in range(1, n+1):
G.add_edge(u, n+1)
return G
@classmethod
def normalize(cls, G, varname=''):
"""Guarantees a cnfgen.graphs.Graph object
If the given graph `G` is a networkx.Graph object, this method
produces a CNFgen simple graph object, relabeling vertices so that
vertices are labeled as numbers from 1 to `n`, where `n` is the number
of vertices in `G`. If the vertices in the original graph have some
kind of order, the order is preserved.
If `G` is already a `cnfgen.graphs.Graph` object, nothing is done.
Parameters
----------
cls: a class
G : networkx.Graph or cnfgen.Graph
the graph to normalize/check
varname: str
the variable name, for error messages (default: 'G')
"""
typemsg = "type of argument '{}' must be either networx.Graph or cnfgen.Graph"
conversionmsg = "cannot convert '{}' into a cnfgen.Graph object"
if not isinstance(G, (Graph, networkx.Graph)):
raise TypeError(typemsg.format(varname))
if isinstance(G, Graph):
return G
try:
G2 = cls.from_networkx(G)
return G2
except AttributeError:
raise ValueError(conversionmsg.format(varname))
class DirectedGraph(BaseGraph):
def is_dag(self):
"""Is the graph acyclic?
The vertices in the graph are assumed to be topologically sorted,
therefore this function just determines whether there are edges going
backward with respect to this order, which can be done in O(1) because
edges can be added and not removed."""
return self.still_a_dag
def is_directed(self):
return True
def __init__(self, n, name='a simple directed graph'):
non_negative_int(n, 'n')
self.n = n
self.m = 0
self.edgeset = set()
self.still_a_dag = True
self.pred = [[] for i in range(n+1)]
self.succ = [[] for i in range(n+1)]
if name is None:
self.name = "a directed graph with {} vertices".format(n)
else:
self.name = name
def add_edge(self, src, dest):
if not (1 <= src <= self.n and 1 <= dest <= self.n):
raise ValueError(
"u,v must be distinct, between 1 and the number of nodes")
if self.has_edge(src, dest):
return
if src >= dest:
self.still_a_dag = False
pos = bisect_right(self.pred[dest], src)
self.pred[dest].insert(pos, src)
pos = bisect_right(self.succ[src], dest)
self.succ[src].insert(pos, dest)
self.m += 1
self.edgeset.add((src, dest))
def has_edge(self, src, dest):
"""True if graph contains directed edge (src,dest)"""
return (src, dest) in self.edgeset
def vertices(self):
return range(1, self.n+1)
def edges(self):
return DirectedEdgeList(self)
def edges_ordered_by_successors(self):
return DirectedEdgeList(self, sort_by_predecessors=False)
def number_of_vertices(self):
return self.n
def number_of_edges(self):
return self.m
def to_networkx(self):
G = networkx.DiGraph()
G.add_nodes_from(range(1, self.n+1))
G.add_edges_from(self.edges())
return G
def predecessors(self, u):
"""Outputs the predecessors of vertex `u`
The sequence of predecessors is guaranteed to be sorted."""
if not(1 <= u <= self.n):
raise ValueError("vertex u not in the graph")
yield from self.pred[u]
def successors(self, u):
"""Outputs the successors of vertex `u`
The sequence of successors is guaranteed to be sorted."""
if not(1 <= u <= self.n):
raise ValueError("vertex u not in the graph")
yield from self.succ[u]
def in_degree(self, u):
if not(1 <= u <= self.n):
raise ValueError("vertex u not in the graph")
return len(self.pred[u])
def out_degree(self, v):
if not(1 <= v <= self.n):
raise ValueError("vertex v not in the graph")
return len(self.succ[v])
@classmethod
def from_networkx(cls, G):
if not isinstance(G, networkx.DiGraph):
raise ValueError('G is expected to be of type networkx.DiGraph')
G = normalize_networkx_labels(G)
C = cls(G.order())
C.add_edges_from(G.edges())
try:
C.name = G.name
except AttributeError:
C.name = '<unknown graph>'
return C
@classmethod
def graph_type_name(cls):
"""Directed graphs are laleled as 'digraph'"""
return 'digraph'
@classmethod
def supported_file_formats(cls):
"""File formats supported for directed graph I/O"""
if has_dot_library():
return ['kthlist', 'gml', 'dot', 'dimacs']
else:
return ['kthlist', 'gml', 'dimacs']
@classmethod
def normalize(cls, G, varname='G'):
"""Guarantees a cnfgen.graphs.DirerctedGraph object
If the given graph `G` is a networkx.DiGraph object, this method
produces a CNFgen directed graph object, relabeling vertices so that
vertices are labeled as numbers from 1 to `n`, where `n` is the number
of vertices in `G`. If the vertices in the original graph have some
kind of order, the order is preserved.
If all edges go from lower vertices to higher vertices, with respect
to the labeling, then t he graph is considered a directed acyclic
graph DAG.
If `G` is already a `cnfgen.graphs.DirectedGraph` object, nothing is done.
Parameters
----------
cls: a class
G : networkx.DiGraph or cnfgen.DirectedGraph
the graph to normalize/check
varname: str
the variable name, for error messages (default: 'G')
"""
typemsg = "type of argument '{}' must be either networx.DiGraph or cnfgen.DirectedGraph"
conversionmsg = "cannot convert '{}' into a cnfgen.DirectedGraph object"
if not isinstance(G, (DirectedGraph, networkx.DiGraph)):
raise TypeError(typemsg.format(varname))
if isinstance(G, DirectedGraph):
return G
try:
G2 = cls.from_networkx(G)
return G2
except AttributeError:
raise ValueError(conversionmsg.format(varname))
class BaseBipartiteGraph(BaseGraph):
"""Base class for bipartite graphs"""
def __init__(self, L, R, name=None):
non_negative_int(L, 'L')
non_negative_int(R, 'R')
self.lorder = L
self.rorder = R
if name is None:
self.name = 'a bipartite graph with ({},{}) vertices'.format(L, R)
else:
self.name = name
def is_bipartite(self):
return True
def number_of_vertices(self):
return self.lorder + self.rorder
def edges(self):
return BipartiteEdgeList(self)
def left_order(self):
return self.lorder
def right_order(self):
return self.rorder
def left_degree(self, v):
return len(self.left_neighbors(v))
def right_degree(self, u):
return len(self.right_neighbors(u))
def left_neighbors(self, v):
raise NotImplementedError
def right_neighbors(self, u):
raise NotImplementedError
def parts(self):
return range(1, self.lorder + 1), range(1, self.rorder + 1)
def to_networkx(self):
G = networkx.Graph()
n, m = self.lorder, self.rorder
G.add_nodes_from(range(1, n+1), bipartite=0)
G.add_nodes_from(range(n+1, m+n+1), bipartite=1)
G.add_edges_from((u, v+n) for (u, v) in self.edges())
G.name = self.name
return G
class BipartiteGraph(BaseBipartiteGraph):
def __init__(self, L, R, name=None):
non_negative_int(L, 'L')
non_negative_int(R, 'R')
BaseBipartiteGraph.__init__(self, L, R, name)
self.ladj = {}
self.radj = {}
self.edgeset = set()
def has_edge(self, u, v):
return (u, v) in self.edgeset
def add_edge(self, u, v):
"""Add an edge to the graph.
- multi-edges are not allowed
- neighbors of a vertex are kept in numberic order
Examples
--------
>>> G = BipartiteGraph(3,5)
>>> G.add_edge(2,3)
>>> G.add_edge(2,2)
>>> G.add_edge(2,3)
>>> G.right_neighbors(2)
[2, 3]
"""
if not (1 <= u <= self.lorder and 1 <= v <= self.rorder):
raise ValueError("Invalid choice of vertices")
if (u, v) in self.edgeset:
return
if u not in self.ladj:
self.ladj[u] = []
if v not in self.radj:
self.radj[v] = []
pv = bisect_right(self.ladj[u], v)
pu = bisect_right(self.radj[v], u)
self.ladj[u].insert(pv, v)
self.radj[v].insert(pu, u)
self.edgeset.add((u, v))
def number_of_edges(self):
return len(self.edgeset)
def right_neighbors(self, u):
"""Outputs the neighbors of a left vertex `u`
The sequence of neighbors is guaranteed to be sorted."""
if not (1 <= u <= self.lorder):
raise ValueError("Invalid choice of vertex")
return self.ladj.get(u, [])[:]
def left_neighbors(self, v):
"""Outputs the neighbors of right vertex `u`
The sequence of neighbors is guaranteed to be sorted."""
if not (1 <= v <= self.rorder):
raise ValueError("Invalid choice of vertex")
return self.radj.get(v, [])[:]
@classmethod
def from_networkx(cls, G):
"""Convert a :py:class:`networkx.Graph` into a :py:class:`cnfgen.graphs.BipartiteGraph`
In order to convert a :py:class:`networkx.Graph` object `G`,
it is necessary that all nodes in `G` have the property
`bipartite` set to either `0` or `1`.
If this is not the case, or if there are edges between the two
parts, :py:class:`ValueError` is raised.
Example
-------
>>> G = networkx.bipartite.complete_bipartite_graph(5,7)
>>> B = BipartiteGraph.from_networkx(G)
>>> print(B.order())
12
>>> print(B.left_order())
5
>>> print(B.has_edge(2,3))
True
"""
if not isinstance(G, networkx.Graph):
raise ValueError('G is expected to be of type networkx.Graph')
side = [[], []]
index = [{}, {}]
for u in G.nodes():
try:
color = G.nodes[u]['bipartite']
assert color in ['0', 0, '1', 1]
except (KeyError, AssertionError):
raise ValueError(
"Node {} lacks the 'bipartite' property set to 0 or 1".format(u))
side[int(color)].append(u)
B = cls(len(side[0]), len(side[1]))
index[0] = {u: i for (i, u) in enumerate(side[0], start=1)}
index[1] = {v: i for (i, v) in enumerate(side[1], start=1)}
for u, v in G.edges():
ucolor = 0 if (u in index[0]) else 1
vcolor = 1 if (v in index[1]) else 0
if ucolor == vcolor:
raise ValueError(
"Edge ({},{}) across the bipartition".format(u, v))
iu, iv = index[ucolor][u], index[vcolor][v]
if ucolor == 0:
B.add_edge(iu, iv)
else:
B.add_edge(iv, iu)
try:
B.name = G.name
except AttributeError:
B.name = '<unknown graph>'
return B
@classmethod
def graph_type_name(cls):
"""Bipartite graphs are laleled as 'bipartite'"""
return 'bipartite'
@classmethod
def supported_file_formats(cls):
"""File formats supported for bipartite graph I/O"""
if has_dot_library():
return ['kthlist', 'gml', 'dot', 'matrix']
else:
return ['kthlist', 'gml', 'matrix']
@classmethod
def normalize(cls, G, varname='G'):
"""Guarantees a cnfgen.graphs.BipartiteGraph object
If the given graph `G` is a networkx.Graph object with a bipartition,
this method produces a CNFgen bipartite graph object, relabeling
vertices so that vertices og each side are labeled as numbers from 1
to `n` and 1 to `m` respectively, where `n` and `m` are the numbers of
vertices in `G` on the left and right side, respectively. If the
vertices in the original graph have some kind of order, the order
is preserved.
If `G` is already a `cnfgen.graphs.BipartiteGraph` object, nothing is done.
"""
typemsg = "type of argument '{}' must be either networx.Graph or cnfgen.BipartiteGraph"
conversionmsg = "cannot convert '{}' to a bipartite graph: inconsistent 'bipartite' labeling"
if not isinstance(G, (BipartiteGraph, networkx.Graph)):
raise TypeError(typemsg.format(varname))
if isinstance(G, BipartiteGraph):
return G
try:
G2 = cls.from_networkx(G)
return G2
except AttributeError:
raise ValueError(conversionmsg.format(varname))
class CompleteBipartiteGraph(BipartiteGraph):
def __init__(self, L, R):
BipartiteGraph.__init__(self, L, R)
self.name = 'Complete bipartite graph with ({},{}) vertices'.format(
L, R)
def has_edge(self, u, v):
return (1 <= u <= self.lorder and 1 <= v <= self.rorder)
def add_edge(self, u, v):
pass
def number_of_edges(self):
return self.lorder * self.rorder
def right_neighbors(self, u):
return range(1, self.rorder + 1)
def left_neighbors(self, v):
return range(1, self.lorder + 1)
def has_dot_library():
"""Test the presence of pydot
"""
try:
# newer version of networkx
from networkx import nx_pydot
import pydot
del pydot
return True
except ImportError:
pass
return False
#################################################################
# Graph reader/writer
#################################################################
def guess_fileformat(fileorname, fileformat=None):
"""Guess the file format for the file or filename """
if fileformat is not None:
return fileformat
try:
if isinstance(fileorname, str):
name = fileorname
else:
name = fileorname.name
return os.path.splitext(name)[-1][1:]
except (AttributeError, ValueError, IndexError):
raise ValueError(
"Cannot guess a file format from arguments. Please specify the format manually.")
def _process_graph_io_arguments(iofile, graph_type, file_format, multi_edges):
"""Test if the argument for the graph I/O functions make sense"""
# Check the file
if not isinstance(iofile, io.TextIOBase) and \
not isinstance(iofile, io.IOBase) and \
not isinstance(iofile, StringIO):
raise ValueError(
"The IO stream \"{}\" does not correspond to a file".format(
iofile))
# Check the graph type specification
if graph_type not in ['dag', 'digraph', 'simple', 'bipartite']:
raise ValueError("The graph type must be one of " +
list(_graphformats.keys()))
if multi_edges:
raise NotImplementedError("Multi edges not supported yet")
elif graph_type in ["dag", "digraph"]:
grtype = DirectedGraph
elif graph_type == "simple":
grtype = Graph
elif graph_type == "bipartite":
grtype = BipartiteGraph
else:
raise RuntimeError(
"Unknown graph type argument: {}".format(graph_type))
# Check/discover file format specification
if file_format == 'autodetect':
try:
extension = os.path.splitext(iofile.name)[-1][1:]
except AttributeError:
raise ValueError(
"Cannot guess a file format from an IO stream with no name. Please specify the format manually."
)
if extension not in grtype.supported_file_formats():
raise ValueError("Cannot guess a file format for {} graphs from the extension of \"{}\". Please specify the format manually.".
format(graph_type, iofile.name))
else:
file_format = extension
elif file_format not in grtype.supported_file_formats():
raise ValueError(
"For {} graphs we only support these formats: {}".format(
graph_type, grtype.supported_file_formats()))
return (grtype, file_format)
def normalize_networkx_labels(G):
"""Relabel all vertices as integer starting from 1"""
# Normalize GML file. All nodes are integers starting from 1
try:
G = networkx.convert_node_labels_to_integers(
G, first_label=1, ordering='sorted')
except TypeError:
# Ids cannot be sorted natively
G = networkx.convert_node_labels_to_integers(
G, first_label=1, ordering='default')
return G
def readGraph(input_file,
graph_type,
file_format='autodetect',
multi_edges=False):
"""Read a Graph from file
In the case of "bipartite" type, the graph obtained is of
:py:class:`cnfgen.graphs.BipartiteGraph`.
In the case of "simple" type, the graph is obtained of
:py:class:`cnfgen.graphs.Graph`.
In the case of "dag" or "directed" type, the graph obtained is of
:py:class:`cnfgen.graphs.DirectedGraph`.
The supported file formats are enumerated by the respective class method
``supported_file_formats``
In the case of "dag" type, the graph read in input must have
increasing edges, in the sense that all edges must be such that
the source has lower identifier than the sink. (I.e. the numeric
identifiers of the vertices are a topological order for the
graph)
Parameters
-----------
input_file: str or file-like object
the input file from which the graph is read. If it is a string
then the graph is read from a file with that string as
filename. Otherwise if the input_file is a file object (or
a text stream), the graph is read from there.
Input files are assumed to be UTF-8 by default.
graph_type: string in {"simple","digraph","dag","bipartite"}
file_format: string, optional
The file format that the parser should expect to receive.
See also the method py:method::``supported_file_formats``. By default
it tries to autodetect it from the file name extension (when applicable).
multi_edges: bool,optional
are multiple edge allowed in the graph? By default this is not allowed.
Returns
-------
a graph object
one type among Graph, DirectedGraph, BipartiteGraph
Raises
------
ValueError
raised when either ``input_file`` is neither a file object
nor a string, or when ``graph_type`` and ``file_format`` are
invalid choices.
IOError
it is impossible to read the ``input_file``
See Also
--------
writeGraph, is_dag, has_bipartition
"""
if multi_edges:
raise NotImplementedError("Multi edges not supported yet")
# file name instead of file object
if isinstance(input_file, str):
with open(input_file, 'r', encoding='utf-8') as file_handle:
return readGraph(file_handle, graph_type, file_format, multi_edges)
graph_class, file_format = _process_graph_io_arguments(input_file,
graph_type,
file_format,
multi_edges)
if file_format == 'dot':
# This is a workaround. In theory a broken dot file should
# cause a pyparsing.ParseError but the dot_reader used by
# networkx seems to mismanage that and to cause a TypeError
#
try:
G = networkx.nx_pydot.read_dot(input_file)
try:
# work around for a weird parse error in pydot, which
# adds an additiona vertex '\\n' in the graph.
G.remove_node('\\n')
except networkx.exception.NetworkXError:
pass
G = graph_class.normalize(G)
except TypeError:
raise ValueError('Parse Error in dot file')
elif file_format == 'gml':
# Networkx's GML reader expects to read from ascii encoded
# binary file. We could have sent the data to a temporary
# binary buffer but for some reasons networkx's GML reader
# function is poorly written and does not like such buffers.
# It turns out we can pass the data as a list of
# encoded ascii lines.
#
# The 'id' field in the vertices are supposed to be an integer
# and will be used as identifiers for the vertices in Graph
# object too.
#
try:
G = networkx.read_gml((line.encode('ascii')
for line in input_file), label='id')
G = graph_class.normalize(G)
except networkx.NetworkXError as errmsg:
raise ValueError("[Parse error in GML input] {} ".format(errmsg))
except UnicodeEncodeError as errmsg:
raise ValueError(
"[Non-ascii chars in GML file] {} ".format(errmsg))
elif file_format == 'kthlist' and graph_type == 'bipartite':
G = _read_bipartite_kthlist(input_file)
elif file_format == 'kthlist' and graph_type != 'bipartite':
G = _read_nonbipartite_kthlist(input_file, graph_class)
elif file_format == 'dimacs':
G = _read_graph_dimacs_format(input_file, graph_class)
elif file_format == 'matrix':
G = _read_graph_matrix_format(input_file)
else:
raise RuntimeError(
"[Internal error] Format {} not implemented".format(file_format))
if graph_type == "dag" and not G.is_dag():
raise ValueError(
"[Input error] Graph must be explicitly acyclic (src->dest edges where src<dest)")
return G
def writeGraph(G, output_file, graph_type, file_format='autodetect'):
"""Write a graph to a file
Parameters
-----------
G : BaseGraph
output_file: file object
the output file to which the graph is written. If it is a string
then the graph is written to a file with that string as
filename. Otherwise if ``output_file`` is a file object (or
a text stream), the graph is written there.
The file is written in UTF-8 by default.
graph_type: string in {"simple","digraph","dag","bipartite"}
see also :py:func:`cnfgen.graph.supported_formats`
file_format: string, optional
The file format that the parser should expect to receive.
See also :py:func:`cnfgen.graph.supported_formats`. By default
it tries to autodetect it from the file name extension (when applicable).
Returns
-------
None
Raises
------
ValueError
raised when either ``output_file`` is neither a file object
nor a string, or when ``graph_type`` and ``file_format`` are
invalid choices.
IOError
it is impossible to write on the ``output_file``
See Also
--------
readGraph
"""
if not isinstance(G, BaseGraph):
raise TypeError("G must be a cnfgen.graphs.BaseGraph")
# file name instead of file object
if isinstance(output_file, str):
with open(output_file, 'w', encoding='utf-8') as file_handle:
return writeGraph(G, file_handle, graph_type, file_format)
_, file_format = _process_graph_io_arguments(output_file, graph_type,
file_format, False)
if file_format == 'dot':
G = G.to_networkx()
networkx.nx_pydot.write_dot(G, output_file)
elif file_format == 'gml':
# Networkx's GML writer expects to write to an ascii encoded
# binary file. Thus we need to let Networkx write to
# a temporary binary ascii encoded buffer and then convert the
# content before sending it to the output file.
tempbuffer = io.BytesIO()
G = G.to_networkx()
networkx.write_gml(G, tempbuffer)
print(tempbuffer.getvalue().decode('ascii'), file=output_file)
elif file_format == 'kthlist' and graph_type != 'bipartite':
_write_graph_kthlist_nonbipartite(G, output_file)
elif file_format == 'kthlist' and graph_type == 'bipartite':
_write_graph_kthlist_bipartite(G, output_file)
elif file_format == 'dimacs':
_write_graph_dimacs_format(G, output_file)
elif file_format == 'matrix':
_write_graph_matrix_format(G, output_file)
else:
raise RuntimeError(
"[Internal error] Format {} not implemented".format(file_format))
#
# In-house parsers
#
def _kthlist_parse(inputfile):
"""Read a graph from file, and produce the datas.
First yeild (#vertex,first comment line)
Then generates a sequence of (s,target,lineno)
Raises:
ValueError is parsing fails for some reason
"""
# vertex number
size = -1
name = ""
for i, l in enumerate(inputfile.readlines()):
# first non empty comment line is the graph name
# must be before the graph size
if l[0] == 'c':
if size < 0 and len(name) == 0 and len(l[2:].strip()) != 0:
name += l[2:]
continue
# empty line
if len(l.strip()) == 0:
continue
if ':' not in l:
# vertex number spec
if size >= 0:
raise ValueError(
"Line {} contains a second spec directive.".format(i))
try:
size = int(l.strip())
if size < 0:
raise ValueError
except ValueError:
raise ValueError(
"Non negative number expected at line {}.".format(i))
yield (size, name)
continue
# Load edges from this line
left, right = l.split(':')
try:
left = int(left.strip())
right = [int(s) for s in right.split()]
except ValueError:
raise ValueError("Non integer vertex ID at line {}.".format(i))
if len(right) < 1 or right[-1] != 0:
raise ValueError("Line {} must end with 0.".format(i))
if left < 1 or left > size:
raise ValueError(
"Vertex ID out of range [1,{}] at line {}.".format(size, i))
right.pop()
if len([x for x in right if x < 1 or x > size]) > 0:
raise ValueError(
"Vertex ID out of range [1,{}] at line {}.".format(size, i))
yield left, right, i
def _read_bipartite_kthlist(inputfile):
"""Read a bipartite graph from file, in the KTH reverse adjacency lists format.
Assumes the adjacecy list is given in order.
- vertices are listed in increasing order
- if bipartite, only the adjiacency list of the left side must be
given, no list for a vertex of the right side is allowed.
Parameters
----------
inputfile : file object
file handle of the input
Raises
------
ValueError
Error parsing the file
"""
# vertex number
parser = _kthlist_parse(inputfile)
size, name = next(parser)
bipartition_ambiguous = [1, size]
edges = {}
previous = 0
for left, right, lineno in parser:
if left <= previous:
raise ValueError(
"Vertex at line {} is smaller than the previous one.".format(lineno))
# Check the bi-coloring on both side
if left > bipartition_ambiguous[1]:
raise ValueError(
"Bipartition violation al line {}. Vertex {} cannot be on the left side."
.format(lineno, left))
bipartition_ambiguous[0] = max(bipartition_ambiguous[0], left + 1)
for v in right:
if v < bipartition_ambiguous[0]:
raise ValueError(
"Bipartition violation. Invalid edge ({},{}) at line {}."
.format(left, v, lineno))
bipartition_ambiguous[1] = min(bipartition_ambiguous[1], v - 1)
# after vertices, add the edges
edges[left] = right
# fix the bipartition
# unsassigned vertices go to the right size
L = bipartition_ambiguous[0]-1
R = size - bipartition_ambiguous[0]+1
G = BipartiteGraph(L, R, name)
for u in edges:
for v in edges[u]:
G.add_edge(u, v - L)
if size != G.number_of_vertices():
raise ValueError("{} vertices expected. Got {} instead.".format(
size, G.number_of_vertices()))
return G
def _read_nonbipartite_kthlist(inputfile, graph_class):
"""Read a graph from file, in the KTH reverse adjacency lists format.
Only for simple and directed graph
Assumes the adjacecy list is given in order.
- vertices are listed in increasing order
- if directed graph the adjacency list specifies incoming neighbous
- if DAG, the graph must be given in topological order source->sink
Parameters
----------
inputfile : file object
file handle of the input
graph_class: class
either Graph or DirectedGraph
Raises
------
ValueError
Error parsing the file
"""
assert graph_class in [Graph, DirectedGraph]
# vertex number
parser = _kthlist_parse(inputfile)
size, name = next(parser)
G = graph_class(size, name)
previous = 0
for succ, predecessors, lineno in parser:
if succ <= previous:
raise ValueError(
"Vertex at line {} is smaller than the previous one.".format(lineno))
# after vertices, add the edges
for v in predecessors:
G.add_edge(v, succ)
previous = succ
if size != G.order():
raise ValueError("{} vertices expected. Got {} instead.".format(
size, G.order()))
return G
def _read_graph_dimacs_format(inputfile, graph_class):
"""Read a graph simple from file, in the DIMACS edge format.
Parameters
----------
inputfile : file object
file handle of the input
graph_class: class object
either Graph or DirectedGraph
"""
assert graph_class in [Graph, DirectedGraph]
G = None
name = ''
n = -1
m = -1
m_cnt = 0
# is the input topologically sorted?
for i, l in enumerate(inputfile.readlines()):
l = l.strip()
# add the comment to the header
if l[0] == 'c':
name += l[2:]
continue
# parse spec line
if l[0] == 'p':
if G is not None:
raise ValueError(
"[Syntax error] " +
"Line {} contains a second spec line.".format(i+1))
_, fmt, nstr, mstr = l.split()
if fmt != 'edge':
raise ValueError("[Input error] " +
"Dimacs \'edge\' format expected at line {}.".format(i+1))
n = int(nstr)
m = int(mstr)
G = graph_class(n, name)
continue
# parse spec line
if l[0] == 'e':
if G is None:
raise ValueError("[Input error] " +
"Edge before preamble at line".format(i))
m_cnt += 1
_, v, w = l.split()
try:
G.add_edge(int(v), int(w))
except ValueError:
raise ValueError("[Syntax error] " +
"Line {} syntax error: edge must be 'e u v' where u, v are vertices".format(i))
if m != m_cnt:
raise ValueError("[Syntax error] " +
"{} edges were expected.".format(m))
return G
def _read_graph_matrix_format(inputfile):
"""Read a bipartite graph from file, in the adjiacency matrix format.
This is an example of an adjacency matrix for a bipartite graph
with 9 vertices on one side and 15 on the another side.
.. 9 15
1 1 0 1 0 0 0 1 0 0 0 0 0 0 0
0 1 1 0 1 0 0 0 1 0 0 0 0 0 0
0 0 1 1 0 1 0 0 0 1 0 0 0 0 0
0 0 0 1 1 0 1 0 0 0 1 0 0 0 0
0 0 0 0 1 1 0 1 0 0 0 1 0 0 0
0 0 0 0 0 1 1 0 1 0 0 0 1 0 0
0 0 0 0 0 0 1 1 0 1 0 0 0 1 0
0 0 0 0 0 0 0 1 1 0 1 0 0 0 1
1 0 0 0 0 0 0 0 1 1 0 1 0 0 0
Parameters
----------
inputfile: file object
the file containing the graph specification
Returns
-------
G : BipartiteGraph
"""
def scan_integer(inputfile):
num_buffer = []
line_cnt = 0
while True:
if len(num_buffer) == 0:
line = inputfile.readline()
if len(line) == 0:
return
line_cnt += 1
tokens = line.split()
if len(tokens) == 0 or tokens[0][0] == '#':
continue # comment line
try:
num_buffer.extend((int(lit), line_cnt) for lit in tokens)
except ValueError:
raise ValueError("[Syntax error] " +
"Line {} contains a non numeric entry.".
format(line_cnt))
yield num_buffer.pop(0)
scanner = scan_integer(inputfile)
try:
n = next(scanner)[0]
m = next(scanner)[0]
G = BipartiteGraph(n, m)
G.name = ''
# read edges
for i in range(1, n + 1):
for j in range(1, m + 1):
(b, l) = next(scanner)
if b == 1:
G.add_edge(i, j)
elif b == 0:
pass
else:
raise ValueError(
"[Input error at line {}] Only 0 or 1 are allowed".
format(l))
except StopIteration:
raise ValueError("[Input error] Unexpected end of the matrix")
# check that there are is no more data
try:
(b, l) = next(scanner)
raise ValueError(
"[Input error at line {}] There are more than {}x{} entries".
format(l, n, m))
except StopIteration:
pass
return G
#
# In-house graph writers
#
def _write_graph_kthlist_nonbipartite(G, output_file):
"""Wrire a graph to a file, in the KTH reverse adjacency lists format.
Parameters
----------
G : Graph or DirectGraph
the graph to write on file
output_file : file object
file handle of the output
"""
assert isinstance(G, (Graph, DirectedGraph))
print("c {}".format(G.name), file=output_file)
print("{}".format(G.order()), file=output_file)
from io import StringIO
output = StringIO()
for v in G.vertices():
if G.is_directed():
nbors = G.predecessors(v)
else:
nbors = G.neighbors(v)
output.write(str(v) + " :")
output.write("".join([' '+str(i) for i in nbors]))
output.write(" 0\n")
print(output.getvalue(), file=output_file)
def _write_graph_kthlist_bipartite(G, output_file):
"""Wrire a bipartite graph to a file,
in the KTH reverse adjacency lists format.
Parameters
----------
G : BipartiteGraph
the graph to write on file
output_file : file object
file handle of the output
"""
assert isinstance(G, BipartiteGraph)
print("c {}".format(G.name), file=output_file)
print("{}".format(G.order()), file=output_file)
from io import StringIO
output = StringIO()
U, _ = G.parts()
offset = len(U)
for u in U:
output.write(str(u) + " :")
output.write("".join([' '+str(v + offset)
for v in G.right_neighbors(u)]))
output.write(" 0\n")
print(output.getvalue(), file=output_file)
def _write_graph_dimacs_format(G, output_file):
"""Wrire a graph to a file, in DIMACS format.
Parameters
----------
G : Graph or DirectGraph
the graph to write on file
output_file : file object
file handle of the output
"""
assert isinstance(G, (Graph, DirectedGraph))
print("c {}".format(G.name).strip(), file=output_file)
n = G.number_of_vertices()
m = G.number_of_edges()
print("p edge {} {}".format(n, m), file=output_file)
for v, w in G.edges():
print("e {} {}".format(v, w), file=output_file)
def _write_graph_matrix_format(G, output_file):
"""Wrire a graph to a file, in \"matrix\" format.
Parameters
----------
G : BipartiteGraph
the graph to write in output
output_file : file object
file handle of the output
"""
assert isinstance(G, BipartiteGraph)
print("{} {}".format(G.left_order(), G.right_order()),
file=output_file)
L, R = G.parts()
for u in L:
adj_row = []
for v in R:
if G.has_edge(u, v):
adj_row.append("1")
else:
adj_row.append("0")
print(" ".join(adj_row), file=output_file)
#
# Bipartite graph generator
# (we do not want to use networkx)
#
def bipartite_random_left_regular(l, r, d, seed=None):
"""Returns a random bipartite graph with constant left degree.
Each vertex on the left side has `d` neighbors on the right side,
picked uniformly at random without repetition.
Each vertex in the graph has an attribute `bipartite` which is 0
for the vertices on the left side and 1 for the vertices on the
right side.
Parameters
----------
l : int
vertices on the left side
r : int
vertices on the right side
d : int
degree on the left side.
seed : hashable object
seed the random generator
Returns
-------
BipartiteGraph
Raises
------
ValueError
unless ``l``, ``r`` and ``d`` are non negative.
"""
import random
if seed is not None:
random.seed(seed)
if l < 0 or r < 0 or d < 0:
raise ValueError(
"bipartite_random_left_regular(l,r,d) needs l,r,d >=0.")
G = BipartiteGraph(l, r)
G.name = "bipartite_random_left_regular({},{},{})".format(l, r, d)
d = min(r, d)
L, R = G.parts()
for u in L:
for v in sorted(random.sample(R, d)):
G.add_edge(u, v)
return G
def bipartite_random_m_edges(L, R, m, seed=None):
"""Returns a random bipartite graph with M edges
Build a random bipartite graph with :math:`L` left vertices,
:math:`R` right vertices and :math:`m` edges sampled at random
without repetition.
Parameters
----------
L : int
vertices on the left side
R : int
vertices on the right side
m : int
number of edges.
seed : hashable object
seed the random generator
Returns
-------
BipartiteGraph
Raises
------
ValueError
unless ``L``, ``R`` and ``m`` are non negative.
"""
import random
if seed is not None:
random.seed(seed)
if L < 1 or R < 1 or m < 0 or m > L * R:
raise ValueError(
"bipartite_random_m_edges(L,R,m) needs L, R >= 1, 0<=m<=L*R")
G = BipartiteGraph(L, R)
G.name = "bipartite_random_m_edges({},{},{})".format(L, R, m)
U, V = G.parts()
if m > L * R // 3:
# Sampling strategy (dense)
E = ((u, v) for u in U for v in V)
for u, v in random.sample(E, m):
G.add_edge(u, v)
else:
# Sampling strategy (sparse)
count = 0
while count < m:
u = random.randint(1, L)
v = random.randint(1, R)
if not G.has_edge(u, v):
G.add_edge(u, v)
count += 1
assert G.number_of_edges() == m
return G
def bipartite_random(L, R, p, seed=None):
"""Returns a random bipartite graph with independent edges
Build a random bipartite graph with :math:`L` left vertices,
:math:`R` right vertices, where each edge is sampled independently
with probability :math:`p`.
Parameters
----------
L : int
vertices on the left side
R : int
vertices on the right side
p : float
probability to pick an edge
seed : hashable object
seed the random generator
Returns
-------
BipartiteGraph
Raises
------
ValueError
unless ``L``, ``R`` are non negative and 0<=``p``<=1.
"""
import random
if seed is not None:
random.seed(seed)
if L < 1 or R < 1 or p < 0 or p > 1:
raise ValueError(
"bipartite_random_graph(L,R,p) needs L, R >= 1, p in [0,1]")
G = BipartiteGraph(L, R)
G.name = "bipartite_random_graph({},{},{})".format(L, R, p)
U, V = G.parts()
for u in U:
for v in V:
if random.random() <= p:
G.add_edge(u, v)
return G
def bipartite_shift(N, M, pattern=[]):
"""Returns a bipartite graph where edges are a fixed shifted sequence.
The graph has :math:`N` vertices on the left (numbered from
:math:`1` to :math:`N`), and :math:`M` vertices on the right
(numbered from :math:`1` to :math:`M`),
Each vertex :math:`v` on the left side has edges to vertices
:math:`v+d_1`, :math:`v+d_2`, :math:`v+d_3`,... with vertex
indices on the right wrap around :wrap around over
:math:`[1..M]`).
Notice that this construction does not produces multiedges even if
two offsets end up on the same right vertex.
Parameters
----------
N : int
vertices on the left side
M : int
vertices on the right side
pattern : list(int)
pattern of neighbors
Returns
-------
BipartiteGraph
Raises
------
ValueError
unless ``N``, ``M`` are non negative and ``pattern`` has vertices outside the range.
"""
if N < 1 or M < 1:
raise ValueError("bipartite_shift(N,M,pattern) needs N,M >= 0.")
G = BipartiteGraph(N, M)
G.name = "bipartite_shift_regular({},{},{})".format(N, M, pattern)
L, R = G.parts()
pattern.sort()
for u in L:
for offset in pattern:
G.add_edge(u, 1 + (u - 1 + offset) % M)
return G
def bipartite_random_regular(l, r, d, seed=None):
"""Returns a random bipartite graph with constant degree on both sides.
The graph is d-regular on the left side and regular on the right
size, so it must be that d*l / r is an integer number.
Parameters
----------
l : int
vertices on the left side
r : int
vertices on the right side
d : int
degree of vertices at the left side
seed : hashable object
seed of random generator
Returns
-------
BipartiteGraph
Raises
------
ValueError
if one among ``l``, ``r`` and ``d`` is negative or
if ``r`` does not divides `l*d`
References
----------
[1] http://...
"""
import random
if seed is not None:
random.seed(seed)
if l < 0 or r < 0 or d < 0:
raise ValueError("bipartite_random_regular(l,r,d) needs l,r,d >=0.")
if (l * d) % r != 0:
raise ValueError(
"bipartite_random_regular(l,r,d) needs r to divid l*d.")
G = BipartiteGraph(l, r)
G.name = "bipartite_random_regular({},{},{})".format(l, r, d)
L, R = G.parts()
A = list(L) * d
B = list(R) * (l * d // r)
assert len(B) == l * d
for i in range(l * d):
# Sample an edge, do not add it if it existed
# We expect to sample at most d^2 edges
for retries in range(3 * d * d):
ea = random.randint(i, l * d - 1)
eb = random.randint(i, l * d - 1)
if not G.has_edge(A[ea], B[eb]):
G.add_edge(A[ea], B[eb])
A[i], A[ea] = A[ea], A[i]
B[i], B[eb] = B[eb], B[i]
break
else:
# Sampling takes too long, maybe no good edge exists
failure = True
for ea in range(i, l * d):
for eb in range(i, l * d):
if not G.has_edge(A[ea], B[eb]):
failure = False
break
if not failure:
break
if failure:
return bipartite_random_regular(l, r, d)
return G
def dag_pyramid(height):
"""Generates the pyramid DAG
Vertices are indexed from the bottom layer, starting from index 1
Parameters
----------
height : int
the height of the pyramid graph (>=0)
Returns
-------
cnfgen.graphs.DirectedGraph
Raises
------
ValueError
"""
if height < 0:
raise ValueError("The height of the tree must be >= 0")
n = (height+1)*(height+2) // 2 # number of vertices
D = DirectedGraph(n, 'Pyramid of height {}'.format(height))
# edges
leftsrc = 1
dest = height+2
for layer in range(1, height+1):
for i in range(1, height-layer+2):
D.add_edge(leftsrc, dest)
D.add_edge(leftsrc+1, dest)
leftsrc += 1
dest += 1
leftsrc += 1
return D
def dag_complete_binary_tree(height):
"""Generates the complete binary tree DAG
Vertices are indexed from the bottom layer, starting from index 1
Parameters
----------
height : int
the height of the tree
Returns
-------
cnfgen.graphs.DirectedGraph
Raises
------
ValueError
"""
if height < 0:
raise ValueError("The height of the tree must be >= 0")
# vertices plus 1
N = 2 * (2**height)
name = 'Complete binary tree of height {}'.format(height)
D = DirectedGraph(N-1, name)
# edges
leftsrc = 1
for dest in range(N // 2 + 1, N):
D.add_edge(leftsrc, dest)
D.add_edge(leftsrc+1, dest)
leftsrc += 2
return D
def dag_path(length):
"""Generates a directed path DAG
Vertices are indexed from 1..length+1
Parameters
----------
length : int
the length of the path
Returns
-------
cnfgen.graphs.DirectedGraph
Raises
------
ValueError
"""
if length < 0:
raise ValueError("The lenght of the path must be >= 0")
name = 'Directed path of length {}'.format(length)
D = DirectedGraph(length+1, name)
# edges
for i in range(1, length+1):
D.add_edge(i, i + 1)
return D
def split_random_edges(G,k, seed=None):
"""Split m random missing edges to G
If :math:`G` is a simple graph, it picks k random edges (and fails
if there are not enough of them), and splits the edges in 2 adding
a new vertex for each of them.
Parameters
----------
G : Graph
a graph with at least :math:`m` missing edges
k : int
the number of edges to sample
seed : hashable object
seed of random generator
Example
-------
>>> G = Graph(5)
>>> G.add_edges_from([(1,4),(4,5),(2,4),(2,3)])
>>> G.number_of_edges()
4
>>> split_random_edges(G,2)
>>> G.number_of_edges()
6
>>> G.number_of_vertices()
7
"""
if seed is not None:
random.seed(seed)
if not isinstance(G,Graph):
raise TypeError("Edge splitting is only implemented for simple graphs")
non_negative_int(k,'k')
if k > G.number_of_edges():
raise ValueError("The graph does not have {} edges.".format(k))
tosplit = random.sample(list(G.edges()),k)
nv = G.number_of_vertices()
G.update_vertex_number(nv+k)
x = nv + 1
for u,v in tosplit:
G.remove_edge(u,v)
G.add_edge(u,x)
G.add_edge(x,v)
x += 1
def add_random_missing_edges(G, m, seed=None):
"""Add m random missing edges to G
If :math:`G` is not complete and has at least :math:`m` missing
edges, :math:`m` of them are sampled and added to the graph.
Parameters
----------
G : Graph
a graph with at least :math:`m` missing edges
m : int
the number of missing edges to sample
seed : hashable object
seed of random generator
Raises
------
ValueError
if :math:`G` doesn't have :math:`m` missing edges
RuntimeError
Sampling failure in the sparse case
"""
if seed is not None:
random.seed(seed)
if m < 0:
raise ValueError("You can only sample a non negative number of edges.")
total_number_of_edges = None
if G.is_bipartite():
Left, Right = G.parts()
total_number_of_edges = len(Left) * len(Right)
def edge_sampler():
u = random.sample(Left, 1)[0]
v = random.sample(Right, 1)[0]
return (u, v)
def available_edges():
return [(u, v) for u in Left for v in Right if not G.has_edge(u, v)]
else:
V = G.number_of_vertices()
total_number_of_edges = V * (V - 1) / 2
def edge_sampler():
return random.sample(range(1, V+1), 2)
def available_edges():
result = []
for u in range(1, V):
for v in range(u+1, V+1):
if not G.has_edge(u, v):
result.append((u, v))
return result
# How many edges we want in the end?
goal = G.number_of_edges() + m
if goal > total_number_of_edges:
raise ValueError(
"The graph does not have {} missing edges to sample.".format(m))
# Sparse case: sample and retry
for _ in range(10 * m):
if G.number_of_edges() >= goal:
break
u, v = edge_sampler()
if not G.has_edge(u, v):
G.add_edge(u, v)
if G.number_of_edges() < goal:
# Very unlikely case: sampling process failed and the solution
# is to use the sampling process tailored for denser graph, so
# that a correct result is guaranteed. This requires
# generating all available edges
for u, v in random.sample(available_edges(),
goal - G.number_of_edges()):
G.add_edge(u, v)
def supported_graph_formats():
"""File formats supported for graph I/O
Given as a dictionary that maps graph types to the respective
supported formats.
E.g. 'dag' -> ['dimacs', 'kthlist']
"""
return {'simple': Graph.supported_file_formats(),
'digraph': DirectedGraph.supported_file_formats(),
'dag': DirectedGraph.supported_file_formats(),
'bipartite': BipartiteGraph.supported_file_formats()} | PypiClean |
/DesignSpark.ESDK-23.2.1-py3-none-any.whl/DesignSpark/ESDK/MAIN.py | import smbus2
import toml
import threading
import re
import subprocess
import pkg_resources
import imp
import inspect
import os
import RPi.GPIO as GPIO
from gpsdclient import GPSDClient
from . import AppLogger
from . import MAIN, THV, CO2, PM2, NO2, NRD, FDH
possibleModules = {
"THV": 0x44,
"CO2": 0x62,
"PM2": 0x69,
"NO2": 0x40,
"NRD": 0x60,
"FDH": 0x5D
}
moduleTypeDict = {
'THV': THV,
'CO2': CO2,
'PM2': PM2,
'NO2': NO2,
"NRD": NRD,
"FDH": FDH
}
# GPIOs used for board features
SENSOR_3V3_EN = 7
SENSOR_5V_EN = 16
BUZZER_PIN = 26
GPIO_LIST = [SENSOR_3V3_EN, SENSOR_5V_EN, BUZZER_PIN]
strip_unicode = re.compile("([^-_a-zA-Z0-9!@#%&=,/'\";:~`\$\^\*\(\)\+\[\]\.\{\}\|\?\<\>\\]+|[^\s]+)")
class ModMAIN:
""" This class handles the ESDK mainboard, and it's various features.
:param config: A dictionary containing configuration data with a minimum of:
.. code-block:: text
{
"esdk":{
"GPS":False
}
}
:type config: dict
:param debug: Debug logging control, defaults to False
:type debug: bool, optional
:param loggingLevel: One of 'off', 'error' or 'full' to control file logging, defaults to 'full'
:type loggingLevel: str, optional
:param pluginDir: A string value containing a file path to a plugin directory, defaults to None
:type pluginDir: str, optional
"""
def __init__(self, config, debug=False, loggingLevel='full', pluginDir=None):
self.logger = AppLogger.getLogger(__name__, debug, loggingLevel)
try:
self.bus = smbus2.SMBus(1)
GPIO.setmode(GPIO.BCM)
GPIO.setup(GPIO_LIST, GPIO.OUT)
self.buzzer_pwm = GPIO.PWM(BUZZER_PIN, 1000)
except Exception as e:
raise e
self.moduleNames = []
self.sensorModules = {}
self.sensorData = {}
self.configDict = config
self.location = {}
self.gpsStatus = {"gpsStatus": {}}
self.pluginDir = pluginDir
self.pluginsModuleList = []
self.plugins = []
self._parseConfig()
def _parseConfig(self):
""" Parse config when mainboard initialised """
if 'ESDK' in self.configDict:
if 'gps' in self.configDict['ESDK']:
if self.configDict['ESDK']['gps'] is not None:
if self.configDict['ESDK']['gps'] == True:
self.logger.info("GPS is enabled")
self.gps = GPSDClient(host="localhost")
gpsHandlerThreadHandle = threading.Thread(target=self._gpsHandlerThread, daemon=True)
gpsHandlerThreadHandle.name = "gpsHandlerThread"
gpsHandlerThreadHandle.start()
def _gpsHandlerThread(self):
""" Thread for polling GPS module. """
self.logger.debug("Started GPS handling thread")
while True:
try:
for result in self.gps.dict_stream():
if result["class"] == "TPV":
self.location['lat'] = result.get("lat", "n/a")
self.location['lon'] = result.get("lon", "n/a")
self.logger.debug("GPS location {}".format(self.location))
self.gpsStatus['gpsStatus'].update({'mode': result.get("mode", 0)})
if result["class"] == "SKY":
satellitesList = result.get("satellites", "")
satellitesUsedCount = 0
for satellite in satellitesList:
if satellite['used']:
satellitesUsedCount = satellitesUsedCount + 1
self.gpsStatus['gpsStatus'].update({'satellitesUsed': satellitesUsedCount})
except Exception as e:
self.logger.error("Error getting GPS location, reason: {}".format(e))
def _probeModules(self):
""" Probes I2C bus to attempt to find sensor modules. """
self.moduleNames.clear()
self.logger.debug("Starting module probe")
for module, addr in possibleModules.items():
try:
# ADC used on NO2 board is an annoying edge case, does not seemingly acknowledge 0x0
if module != "NO2":
self.bus.write_byte(addr, 0)
self.moduleNames.append(module)
else:
# Instead issue reset command, and check for an acknowledgement
self.bus.write_byte(addr, 0x06)
self.moduleNames.append(module)
except Exception as e:
# Ignore any that fail - the modules aren't present on the bus
pass
self.logger.info("Found modules {}".format(self.moduleNames))
def getLocation(self):
""" Returns a dictionary containing GPS location, or configuration file location if GPS is disabled.
:return: A dictionary containing:
.. code-block:: text
{
"lat":0.0,
"lon":0.0
}
:rtype: dict
"""
if self.configDict['ESDK']['gps'] == False or self.configDict['ESDK']['gps'] is None:
self.location['lat'] = self.configDict['ESDK']['latitude']
self.location['lon'] = self.configDict['ESDK']['longitude']
return self.location
if self.configDict['ESDK']['gps'] == True:
if "lat" and "lon" in self.location:
return self.location
else:
return {}
def getGPSStatus(self):
""" Returns a dictionary containing GPS status.
:return: A dictionary containing:
.. code-block:: text
{
"gpsStatus":{
"mode":0,
"satellites":13,
"satellitesUsed":5
}
}
:rtype: dict
"""
return self.gpsStatus
def createModules(self):
""" Discovers and instantiates module objects for use with ``readAllModules()``. """
self._probeModules()
self.logger.debug("Creating module objects")
for moduleName in self.moduleNames:
try:
if moduleName == "THV":
self.sensorModules[moduleName] = moduleTypeDict[moduleName].ModTHV()
if moduleName == "CO2":
self.sensorModules[moduleName] = moduleTypeDict[moduleName].ModCO2()
if moduleName == "PM2":
self.sensorModules[moduleName] = moduleTypeDict[moduleName].ModPM2()
if moduleName == "NO2":
if "NO2" in self.configDict:
sensitivityCode = self.configDict["NO2"]["sensitivity"]
self.sensorModules[moduleName] = moduleTypeDict[moduleName].ModNO2(sensitivity=sensitivityCode)
else:
raise Exception("No NO2 module configuration provided")
if moduleName == "NRD":
self.sensorModules[moduleName] = moduleTypeDict[moduleName].ModNRD()
if moduleName == "FDH":
self.sensorModules[moduleName] = moduleTypeDict[moduleName].ModFDH()
except Exception as e:
self.logger.error("Could not create module {}, reason: {}".format(moduleName, e))
def readAllModules(self):
""" Reads all sensor modules and returns a dictionary containing sensor data. """
try:
for name, module in self.sensorModules.items():
self.logger.debug("Trying to read sensor {}".format(name))
data = module.readSensors()
if data != -1:
self.sensorData.update(data)
except Exception as e:
self.logger.error("Could not read module {}, reason: {}".format(name, e))
# Read loaded plugins
try:
for plugin in self.plugins:
pluginName = plugin.__class__.__name__
self.logger.debug("Trying to read plugin {}".format(pluginName))
try:
data = plugin.readSensors()
if data != -1:
self.sensorData.update(data)
except Exception as e:
self.logger.error("Could not read plugin {}, reason: {}".format(pluginName, e))
except Exception as e:
self.logger.error("Error handling plugins, reason: {}".format(e))
self.logger.debug("Sensor data {}".format(self.sensorData))
return self.sensorData
def getSerialNumber(self):
""" Returns a dictionary containing the Raspberry Pi serial number.
:return: A dictionary containing:
.. code-block:: text
{
"serialNumber":"RPI0123456789"
}
:rtype: dict
"""
try:
serialNumber = {}
with open('/sys/firmware/devicetree/base/serial-number') as f:
serialNumber['hardwareId'] = "RPI{}".format(strip_unicode.sub('',f.read()))
self.logger.info("Hardware ID is {}".format(serialNumber['hardwareId']))
return serialNumber
except Exception as e:
self.logger.error("Could not retrieve serial number, reason: {}".format(e))
return -1
def getModuleVersion(self):
""" Returns a dictionary containing ESDK module version.
:return: A dictionary containing:
.. code-block:: text
{
"moduleVerson":"0.0.1"
}
:rtype: dict
"""
return {"moduleVersion": pkg_resources.get_distribution('DesignSpark.ESDK').version}
def getUndervoltageStatus(self):
""" Returns a dictionary containing the Raspberry Pi throttle status and code.
:return: A dictionary containing (throttle_state is optional, and only populated should a nonzero code exist)
.. code-block:: text
{
"throttle_state":{
"code":0,
"throttle_state":""
}
}
:rtype: dict
"""
try:
cmdOutput = subprocess.run(["vcgencmd", "get_throttled"], capture_output=True)
statusData = cmdOutput.stdout.decode('ascii').strip().strip("throttled=")
code = int(statusData, 16)
status = {"code": code}
response = {"throttle_state": status}
if statusData == "0x0":
return response
statusBits = [[0, "Under_Voltage detected"],
[1, "Arm frequency capped"],
[2, "Currently throttled"],
[3, "Soft temperature limit active"],
[16, "Under-voltage has occurred"],
[17, "Arm frequency capping has occurred"],
[18, "Throttling has occurred"],
[19, "Soft temperature limit has occurred"]]
statusStrings = []
for x in range(0, len(statusBits)):
statusBitString = statusBits[x][1]
if (code & (1 << statusBits[x][0])):
statusStrings.append(statusBitString)
status.update({"status_strings": statusStrings})
response = {"throttle_state": status}
return response
except Exception as e:
self.logger.error("Could not retrieve undervoltage status, reason: {}".format(e))
return -1
def setPower(self, vcc3=False, vcc5=False):
""" Switches 3.3V and 5V sensor power supply rails according to supplied arguments.
:param vcc3: 3.3V sensor power supply status, defaults to False
:type vcc3: bool, optional
:param vcc5: 5V sensor power supply status, defaults to False
:type vcc5: bool, optional
"""
try:
self.logger.debug("Setting sensor power rails, 3V3: {}, 5V: {}".format(vcc3, vcc5))
GPIO.output(SENSOR_3V3_EN, vcc3)
GPIO.output(SENSOR_5V_EN, vcc5)
except Exception as e:
raise e
def setBuzzer(self, freq=0):
""" Sets a PWM frequency on the buzzer output.
:param freq: Buzzer frequency, 0 stops the buzzer
:type freq: int, optional
"""
try:
if freq > 0:
self.logger.debug("Setting buzzer frequency to {}".format(freq))
self.buzzer_pwm.start(50)
self.buzzer_pwm.ChangeFrequency(freq)
if freq == 0:
self.logger.debug("Stopping buzzer")
self.buzzer_pwm.stop()
except Exception as e:
raise e
def loadPlugins(self):
""" Attempts to load and instantiate plugins from a specified folder. """
if self.pluginDir == None:
cwd = os.getcwd()
self.pluginFullPath = cwd + "/plugins"
self.logger.debug("No plugin folder provided, using default")
self.logger.debug("Current working directory: {}, plugins path: {}".format(cwd, self.pluginFullPath))
else:
self.pluginFullPath = self.pluginDir
# Create a list of available plugin modules
for filename in os.listdir(self.pluginFullPath):
modulename, extension = os.path.splitext(filename)
if extension == '.py':
file, path, descr = imp.find_module(modulename, [self.pluginFullPath])
if file:
try:
self.logger.debug("Found plugin module: {}".format(file.name))
module = imp.load_module(modulename, file, path, descr)
self.pluginsModuleList.append(module)
except Exception as e:
self.logger.error("Could not load plugin {}! Reason {}".format(file.name, e))
# Create a list of instantiated plugin classes
for pluginModule in self.pluginsModuleList:
for name, obj in inspect.getmembers(pluginModule):
if inspect.isclass(obj):
self.logger.debug("Created plugin class {}".format(obj))
self.plugins.append(obj())
self.logger.info("Loaded {} plugin(s)".format(len(self.plugins))) | PypiClean |
/ElectricalWireSizes-0.1.30rc1.tar.gz/ElectricalWireSizes-0.1.30rc1/electricalwiresizes/dbcircuitcd.py | from tabulate import tabulate
from .bd import dbConductorCuStd
import math, time
from .mbtcustd import mbtcustd
from .basicelecfunc import Rn, RnCd, Rcd, FCT
def dbcircuitcd(carga=None,view=None,conductor=None):
if(carga==None or view==None or conductor==None):
t = time.localtime()
print(":::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
print(" ElectricalWireSizes ")
print(" ",time.asctime(t))
print(":::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
print(" ")
print(" ─▄▀─▄▀")
print(" ──▀──▀")
print(" █▀▀▀▀▀█▄")
print(" █░░░░░█─█")
print(" ▀▄▄▄▄▄▀▀")
print(" ")
print("-------------------------------------------------------------")
print("| Los parámetros no son correctos |")
print("| para el módulo DBCIRCUITCD(carga,view,conductor) |")
print("-------------------------------------------------------------")
return
dbcircuit = [[str(i + 1)] for i in range(len(carga))]
datos=[]
for i in range(len(carga)):
if conductor == 1:
datos.append(mbtcustd(carga[i][1],carga[i][2],carga[i][3],carga[i][4],carga[i][5],carga[i][6],carga[i][7],carga[i][8],carga[i][9],carga[i][10],carga[i][11],carga[i][12]))
if view==1:
print("Id [",i+1,"]============================================================================================================")
print(tabulate(datos[i], headers=["AWG/KCM","Kcd [A,B,C]", "", "60", "75", "90","%Vd","Nc", "In", "60", "75", "90", "Op", "ITM"], tablefmt='psql'))
elif conductor == 2:
datos.append(mbtcustd(carga[i][1],carga[i][2],carga[i][3],carga[i][4],carga[i][5],carga[i][6],carga[i][7],carga[i][8],carga[i][9],carga[i][10],carga[i][11],carga[i][12]))
if view==1:
print("Id [",i+1,"]============================================================================================================")
print(tabulate(datos[i], headers=["AWG/KCM","Kcd [A,B,C]", "", "60", "75", "90","%Vd","Nc", "In", "60", "75", "90", "Op", "ITM"], tablefmt='psql'))
if conductor==1:
dbConductor=dbConductorCuStd
elif conductor==2:
dbConductor=dbConductorCuStd
for i in range(len(carga)):
for j in range(len(dbConductor)):
if datos[i][j][11]=="Yes":
dbcircuit[i].append(datos[i][j][0])
dbcircuit[i].append(datos[i][j][1])
dbcircuit[i].append(carga[i][2])
dbcircuit[i].append(carga[i][7])
dbcircuit[i].append("CD [+-]")
dbcircuit[i].append(FCT(carga[i][6]))
dbcircuit[i].append(datos[i][j][2])
dbcircuit[i].append(datos[i][j][3])
dbcircuit[i].append(datos[i][j][4])
dbcircuit[i].append(datos[i][j][5])
dbcircuit[i].append(datos[i][j][6])
dbcircuit[i].append(datos[i][j][7])
dbcircuit[i].append(datos[i][j][8])
dbcircuit[i].append(datos[i][j][9])
dbcircuit[i].append(datos[i][j][10])
#dbcircuit[i].append(datos[i][j][11])
dbcircuit[i].append(datos[i][j][12])
break
#return dbcircuit
print("::::::: [ RESUMEN DE CARGAS ]::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::::")
#print(tabulate(dbcircuit, headers=["Id","#CAL","L[m]", "Vd","FP","ALM", "Fct","Fa","60", "75", "90","Vd[%]","Nc", "In", "60", "75", "90", "ITM"], tablefmt='psql'))
#print(tabulate(dbcircuit, headers=["Idx","#CAL","L[m]", "Vd","ALM", "Fct","60", "75", "90","Vd[%]","Nc", "In", "60", "75", "90", "ITM"], tablefmt='psql'))
print(tabulate(dbcircuit, headers=["Id","#CAL","Kcd [A,B,C]","L[m]", "Vd", "ALM", "Fct", "60", "75", "90", "Vd[%]", "Nc", "In", "60", "75", "90", "ITM"], tablefmt='psql')) | PypiClean |
/ConcurrentPandas-0.1.2.tar.gz/ConcurrentPandas-0.1.2/concurrentpandas.py | __author__ = 'Brian M Wilcox'
__version__ = '0.1.2'
"""
Copyright 2014 Brian M Wilcox
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import Quandl
import collections
import time
import sys
import pandas.io.data
from pandas.io.data import Options
import multiprocessing
from multiprocessing import Process, Manager
from multiprocessing.pool import ThreadPool
from random import randrange
def data_worker(**kwargs):
"""
Function to be spawned concurrently,
consume data keys from input queue, and push the resulting dataframes to output map
"""
if kwargs is not None:
if "function" in kwargs:
function = kwargs["function"]
else:
Exception("Invalid arguments, no function specified")
if "input" in kwargs:
input_queue = kwargs["input"]
else:
Exception("Invalid Arguments, no input queue")
if "output" in kwargs:
output_map = kwargs["output"]
else:
Exception("Invalid Arguments, no output map")
if "token" in kwargs:
argsdict = {"quandl_token": kwargs["token"]}
else:
if "Quandl" in function.__module__:
Exception("Invalid Arguments, no Quandl token")
if ("source" and "begin" and "end") in kwargs:
argsdict = {"data_source": kwargs["source"], "begin": kwargs["begin"], "end": kwargs["end"]}
else:
if "pandas.io.data" in function.__module__:
Exception("Invalid Arguments, no pandas data source specified")
if ("source" in kwargs) and (("begin" and "end") not in kwargs):
argsdict = {"data_source": kwargs["source"]}
else:
if "pandas.io.data" in function.__module__:
Exception("Invalid Arguments, no pandas data source specified")
else:
Exception("Invalid Arguments")
retries = 5
while not input_queue.empty():
data_key = input_queue.get()
get_data(function, data_key, output_map, retries, argsdict)
def get_data(data_get, data_key, output_map, retries_left, argdict):
"""
Function to use Python Pandas and / or Quandl to download a dataframe
Insert resulting dataframe into output map
"""
if retries_left <= 0:
print(data_key + " Failed to download.")
return
"""
Identify type of function to use, insert result into output map
"""
if "Quandl" in data_get.__module__:
output_map[data_key] = data_get(data_key, authtoken=argdict["quandl_token"])
return
if "pandas.io.data" in data_get.__module__:
# Verify we are not dealing with options
if 'get_call_data' not in dir(data_get):
if ("source" and "begin" and "end") in argdict:
try:
output_map[data_key] = data_get(data_key, argdict["data_source"], argdict["begin"], argdict["end"])
return
except:
print(data_key + " failed to download. Retrying up to " + retries_left.__str__() + " more times...")
else:
try:
output_map[data_key] = data_get(data_key, argdict["data_source"])
return
except:
print(data_key + " failed to download. Retrying up to " + retries_left.__str__() + " more times...")
# Verify we are dealing with options
if 'get_call_data' in dir(data_get):
try:
# Note options data will always be pulled from yahoo
temp = data_get(data_key, 'yahoo')
# For simplicities sake assume user wants all options data
output_map[data_key] = temp.get_all_data()
return
except:
print(data_key + " options failed to download. Retrying up to " + retries_left.__str__() + " more times...")
print("WARNING: If your version of Pandas is not up to date this may fail!")
"""
Retry at random times progressively slower in case of failures when number of retries remaining gets low
"""
if (retries_left == 3):
time.sleep(randrange(0, 4))
if (retries_left == 2):
time.sleep(randrange(2, 6))
if (retries_left == 1):
time.sleep(randrange(5, 15))
get_data(data_get, data_key, output_map, (retries_left-1), argdict)
class ConcurrentPandas:
"""
Concurrent Pandas is a class for concurrent asynchronous data downloads
from a variety of sources using either threads, or processes.
"""
def __init__(self):
self.output_map = Manager().dict()
self.input_queue = Manager().Queue()
self.data_worker = None
self.worker_args = None
self.source_name = None
def consume_keys(self):
"""
Work through the keys to look up sequentially
"""
print("\nLooking up " + self.input_queue.qsize().__str__() + " keys from " + self.source_name + "\n")
self.data_worker(**self.worker_args)
def consume_keys_asynchronous_processes(self):
"""
Work through the keys to look up asynchronously using multiple processes
"""
print("\nLooking up " + self.input_queue.qsize().__str__() + " keys from " + self.source_name + "\n")
jobs = multiprocessing.cpu_count()*4 if (multiprocessing.cpu_count()*4 < self.input_queue.qsize()) \
else self.input_queue.qsize()
pool = multiprocessing.Pool(processes=jobs, maxtasksperchild=10)
for x in range(jobs):
pool.apply(self.data_worker, [], self.worker_args)
pool.close()
pool.join()
def consume_keys_asynchronous_threads(self):
"""
Work through the keys to look up asynchronously using multiple threads
"""
print("\nLooking up " + self.input_queue.qsize().__str__() + " keys from " + self.source_name + "\n")
jobs = multiprocessing.cpu_count()*4 if (multiprocessing.cpu_count()*4 < self.input_queue.qsize()) \
else self.input_queue.qsize()
pool = ThreadPool(jobs)
for x in range(jobs):
pool.apply(self.data_worker, [], self.worker_args)
pool.close()
pool.join()
def return_map(self):
"""
Return hashmap consisting of key string -> data frame
"""
return self.output_map
def return_input_queue(self):
"""
Return input Queue
"""
return self.input_queue
def insert_keys(self, *args):
"""
Unpack each key and add to queue
"""
for key in args:
self.unpack(key)
def unpack(self, to_unpack):
"""
Unpack is a recursive function that will unpack anything that inherits
from abstract base class Container provided it is not also inheriting from Python basestring.
Raise Exception if resulting object is neither a container or a string
Code working in both Python 2 and Python 3
"""
# Python 3 lacks basestring type, work around below
try:
isinstance(to_unpack, basestring)
except NameError:
basestring = str
# Base Case
if isinstance(to_unpack, basestring):
self.input_queue.put(to_unpack)
return
for possible_key in to_unpack:
if isinstance(possible_key, basestring):
self.input_queue.put(possible_key)
elif sys.version_info >= (3, 0):
if isinstance(possible_key, collections.abc.Container) and not isinstance(possible_key, basestring):
self.unpack(possible_key)
else:
raise Exception("A type that is neither a string or a container was passed to unpack. "
"Aborting!")
else:
if isinstance(possible_key, collections.Container) and not isinstance(possible_key, basestring):
self.unpack(possible_key)
else:
raise Exception("A type that is neither a string or a container was passed to unpack. "
"Aborting!")
def set_source_quandl(self, quandl_token):
"""
Set data source to Quandl
"""
self.data_worker = data_worker
self.worker_args = {"function": Quandl.get, "input": self.input_queue, "output": self.output_map,
"token": quandl_token}
self.source_name = "Quandl"
def set_source_yahoo_finance(self):
"""
Set data source to Yahoo Finance
"""
self.data_worker = data_worker
self.worker_args = {"function": pandas.io.data.DataReader, "input": self.input_queue, "output": self.output_map,
"source": 'yahoo'}
self.source_name = "Yahoo Finance"
def set_source_google_finance(self):
"""
Set data source to Google Finance
"""
self.data_worker = data_worker
self.worker_args = {"function": pandas.io.data.DataReader, "input": self.input_queue, "output": self.output_map,
"source": 'google'}
self.source_name = "Google Finance"
def set_source_federal_reserve_economic_data(self):
"""
Set data source to Federal Reserve Economic Data
"""
self.data_worker = data_worker
self.worker_args = {"function": pandas.io.data.DataReader, "input": self.input_queue, "output": self.output_map,
"source": 'fred'}
self.source_name = "Federal Reserve Economic Data"
def set_source_yahoo_options(self):
"""
Set data source to yahoo finance, specifically to download financial options data
"""
self.data_worker = data_worker
self.worker_args = {"function": Options, "input": self.input_queue, "output": self.output_map,
"source": 'yahoo'}
self.source_name = "Yahoo Finance Options" | PypiClean |
/MultiApp-1.0.tar.gz/MultiApp-1.0/multiapp.py | import sys
import os
from os.path import basename
from iotk import *
class MultiApp:
"""
This is a base class for MultiApp. You can populate it with commands by
creating, documenting, and providing certain attributes to methods of a
subclass of MultiApp.
"""
name = "<<MultiApp>>"
version = "2008.12.2"
shortdesc = "This is the base class for command-line apps"
topics = dict()
commands = dict()
help_cmd = "help"
def cmd_list(self):
"""
This will hunt down every command, even those in base classes. Code
lovingly ripped off from ``cmd.Cmd`` in the Standard Library.
"""
names = []
classes = [self.__class__]
while classes:
aclass = classes.pop(0)
if aclass.__bases__:
classes = classes + list(aclass.__bases__)
names = names + dir(aclass)
return [attrib for attrib in names if attrib.startswith("do_")]
def run(self, args=sys.argv):
self.ename = basename(args[0])
if len(args) < 2:
self.default()
return
cmdname = args[1]
if len(args) > 2:
arguments = args[2:]
else:
arguments = []
if self.cmd_to_fn(cmdname) in self.cmd_list():
command = self.getcmd(cmdname)
try:
command(*arguments)
except TypeError:
if hasattr(command, "usage"):
print self.gen_usage(cmdname, command.usage)
else:
print "Wrong parameters supplied."
print "Type '" + self.gen_help() + "' for help topics"
else:
self.notfound(cmdname)
def gen_usage(self, name, usage):
return "USAGE: " + self.ename + " " + name + " " + usage
def cmd_to_fn(self, name):
return "do_" + name.replace("-", "_")
def fn_to_cmd(self, name):
return name[3:].replace("_", "-")
def gen_help(self):
return self.ename + " " + self.help_cmd
def notfound(self, cmdname):
print "Command", cmdname, "does not exist."
print "Type '" + self.gen_help() + "' for help topics'"
def default(self):
header = self.name + " " + self.version
print_header(header)
print self.shortdesc
print
print "USAGE: " + self.ename + " [subcommand] [arguments]"
print "Type '" + self.gen_help() + "' for help topics"
def do_help(self, *args):
"""
This is the online help system. It displays information about commands
and assorted other help topics defined by the application. Simply
typing "help" will list all help topics, while typing "help something"
will display the full command.
"""
if not args:
self.help_index()
else:
self.help_topic(args[0])
do_help.usage = "[COMMAND-OR-TOPIC]"
do_help.descr = "Get info on commands and other functions."
def gen_descrs(self):
descrs = dict()
for comname in self.cmd_list():
command = getattr(self, comname)
if hasattr(command, "descr"):
descrs[self.fn_to_cmd(comname)] = command.descr
else:
descrs[self.fn_to_cmd(comname)] = "---"
return descrs
def help_index(self):
print_header(self.name + " Help Topics")
if self.__doc__:
print trim_docstring(self.__doc__)
print
if self.topics:
print_header("Topics", underline="-", just="left")
print_list(self.topics.keys(), sort=True)
print
if self.cmd_list():
print_header("Commands", underline="-", just="left")
print_dict(self.gen_descrs(), space=2, sort=True)
print
def help_topic(self, lookup):
if self.cmd_to_fn(lookup) in self.cmd_list():
self.cmd_help(lookup)
elif lookup in self.topics:
self.topic_help(lookup)
else:
print "There's not a help topic named that."
print "Type '" + self.gen_help() + "' for help topics"
return
def cmd_help(self, cmdname):
command = self.getcmd(cmdname)
print
print_header(self.name + ": Help for command '" + cmdname + "'")
if hasattr(command, "usage"):
print self.gen_usage(cmdname, command.usage)
if command.__doc__:
print
print trim_docstring(command.__doc__)
else:
if hasattr(command, "descr"):
print
print command.descr
else:
print
print "Sorry, no help for this command."
print
def topic_help(self, lookup):
print
print_header(self.name + ": Help on topic '" + lookup + "'")
print self.topics[lookup]
print
def getcmd(self, something):
cmdname = self.cmd_to_fn(something)
if hasattr(self, cmdname):
return getattr(self, cmdname)
else:
print "Horrible Error: Command", cmdname, "not found."
if __name__ == "__main__":
class TestApp(MultiApp):
"""
This is just a simple test application. You can use this to get an idea
of how to use MultiApp.
"""
name = "MultiApp Test"
version = "0.0"
shortdesc = "This is an app designed to test out MultiApp."
topics = {'dummy-topic': "This is just a dummy help topic. Disregard it."}
def do_alpha(self, *args):
"""
Prints the arguments passed to it. Note that this does not include
the program's name and ``alpha`` itself, just everything after that.
"""
print "Arguments: "
for arg in args:
print arg
do_alpha.usage = "[ARGUMENTS...]"
do_alpha.descr = "Lists all of the arguments passed to it."
def do_bravo(self, name):
"""
Says "Hello, NAME!" Intended to test out argument-error-catching
facilities.
"""
print "Hello, " + name + "!"
do_bravo.usage = "NAME"
do_bravo.descr = "Says Hello, NAME!"
def do_charlie_delta(self, *args):
"""
Prints the arguments passed to it. It's just like ``alpha``, but it
has a hyphen to test out the concept of hyphenated commands.
"""
print "Arguments: "
for arg in args:
print arg
do_charlie_delta.usage = "[ARGUMENTS...]"
do_charlie_delta.descr = "Does the same thing as alpha, but " \
"with a hyphen in the name and a longer description."
def do_echo(self, *args):
# Completely undocumented.
print "Arguments: "
for arg in args:
print arg
app = TestApp()
app.run() | PypiClean |
/ConSSL-0.0.1-py3-none-any.whl/CSSL/models/self_supervised/resnets.py | import torch
from torch import nn as nn
from CSSL.utils import _TORCHVISION_AVAILABLE
from CSSL.utils.warnings import warn_missing_pkg
if _TORCHVISION_AVAILABLE:
from torchvision.models.utils import load_state_dict_from_url
else: # pragma: no cover
warn_missing_pkg('torchvision')
__all__ = [
'ResNet',
'resnet18',
'resnet34',
'resnet50',
'resnet101',
'resnet152',
'resnext50_32x4d',
'resnext101_32x8d',
'wide_resnet50_2',
'wide_resnet101_2',
]
MODEL_URLS = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
'resnext50_32x4d': 'https://download.pytorch.org/models/resnext50_32x4d-7cdf4587.pth',
'resnext101_32x8d': 'https://download.pytorch.org/models/resnext101_32x8d-8ba56ff5.pth',
'wide_resnet50_2': 'https://download.pytorch.org/models/wide_resnet50_2-95faca4d.pth',
'wide_resnet101_2': 'https://download.pytorch.org/models/wide_resnet101_2-32ee1156.pth',
}
def conv3x3(in_planes, out_planes, stride=1, groups=1, dilation=1):
"""3x3 convolution with padding"""
return nn.Conv2d(
in_planes,
out_planes,
kernel_size=3,
stride=stride,
padding=dilation,
groups=groups,
bias=False,
dilation=dilation
)
def conv1x1(in_planes, out_planes, stride=1):
"""1x1 convolution"""
return nn.Conv2d(in_planes, out_planes, kernel_size=1, stride=stride, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(
self, inplanes, planes, stride=1, downsample=None, groups=1, base_width=64, dilation=1, norm_layer=None
):
super(BasicBlock, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
if groups != 1 or base_width != 64:
raise ValueError('BasicBlock only supports groups=1 and base_width=64')
if dilation > 1:
raise NotImplementedError("Dilation > 1 not supported in BasicBlock")
# Both self.conv1 and self.downsample layers downsample the input when stride != 1
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = norm_layer(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = norm_layer(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(
self, inplanes, planes, stride=1, downsample=None, groups=1, base_width=64, dilation=1, norm_layer=None
):
super(Bottleneck, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
width = int(planes * (base_width / 64.)) * groups
# Both self.conv2 and self.downsample layers downsample the input when stride != 1
self.conv1 = conv1x1(inplanes, width)
self.bn1 = norm_layer(width)
self.conv2 = conv3x3(width, width, stride, groups, dilation)
self.bn2 = norm_layer(width)
self.conv3 = conv1x1(width, planes * self.expansion)
self.bn3 = norm_layer(planes * self.expansion)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
identity = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
identity = self.downsample(x)
out += identity
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(
self,
block,
layers,
num_classes=1000,
zero_init_residual=False,
groups=1,
width_per_group=64,
replace_stride_with_dilation=None,
norm_layer=None,
return_all_feature_maps=False,
first_conv=True,
maxpool1=True,
):
super(ResNet, self).__init__()
if norm_layer is None:
norm_layer = nn.BatchNorm2d
self._norm_layer = norm_layer
self.return_all_feature_maps = return_all_feature_maps
self.inplanes = 64
self.dilation = 1
if replace_stride_with_dilation is None:
# each element in the tuple indicates if we should replace
# the 2x2 stride with a dilated convolution instead
replace_stride_with_dilation = [False, False, False]
if len(replace_stride_with_dilation) != 3:
raise ValueError(
"replace_stride_with_dilation should be None "
f"or a 3-element tuple, got {replace_stride_with_dilation}"
)
self.groups = groups
self.base_width = width_per_group
if first_conv:
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=7, stride=2, padding=3, bias=False)
else:
self.conv1 = nn.Conv2d(3, self.inplanes, kernel_size=3, stride=1, padding=1, bias=False)
self.bn1 = norm_layer(self.inplanes)
self.relu = nn.ReLU(inplace=True)
if maxpool1:
self.maxpool = nn.MaxPool2d(kernel_size=3, stride=2, padding=1)
else:
self.maxpool = nn.MaxPool2d(kernel_size=1, stride=1)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2, dilate=replace_stride_with_dilation[0])
self.layer3 = self._make_layer(block, 256, layers[2], stride=2, dilate=replace_stride_with_dilation[1])
self.layer4 = self._make_layer(block, 512, layers[3], stride=2, dilate=replace_stride_with_dilation[2])
self.avgpool = nn.AdaptiveAvgPool2d((1, 1))
self.fc = nn.Linear(512 * block.expansion, num_classes)
for m in self.modules():
if isinstance(m, nn.Conv2d):
nn.init.kaiming_normal_(m.weight, mode='fan_out', nonlinearity='relu')
elif isinstance(m, (nn.BatchNorm2d, nn.GroupNorm)):
nn.init.constant_(m.weight, 1)
nn.init.constant_(m.bias, 0)
# Zero-initialize the last BN in each residual branch,
# so that the residual branch starts with zeros, and each residual block behaves like an identity.
# This improves the model by 0.2~0.3% according to https://arxiv.org/abs/1706.02677
if zero_init_residual:
for m in self.modules():
if isinstance(m, Bottleneck):
nn.init.constant_(m.bn3.weight, 0)
elif isinstance(m, BasicBlock):
nn.init.constant_(m.bn2.weight, 0)
def _make_layer(self, block, planes, blocks, stride=1, dilate=False):
norm_layer = self._norm_layer
downsample = None
previous_dilation = self.dilation
if dilate:
self.dilation *= stride
stride = 1
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
conv1x1(self.inplanes, planes * block.expansion, stride),
norm_layer(planes * block.expansion),
)
layers = []
layers.append(
block(
self.inplanes,
planes,
stride,
downsample,
self.groups,
self.base_width,
previous_dilation,
norm_layer,
)
)
self.inplanes = planes * block.expansion
for _ in range(1, blocks):
layers.append(
block(
self.inplanes,
planes,
groups=self.groups,
base_width=self.base_width,
dilation=self.dilation,
norm_layer=norm_layer
)
)
return nn.Sequential(*layers)
def forward(self, x):
x0 = self.conv1(x)
x0 = self.bn1(x0)
x0 = self.relu(x0)
x0 = self.maxpool(x0)
if self.return_all_feature_maps:
x1 = self.layer1(x0)
x2 = self.layer2(x1)
x3 = self.layer3(x2)
x4 = self.layer4(x3)
return [x0, x1, x2, x3, x4]
else:
x0 = self.layer1(x0)
x0 = self.layer2(x0)
x0 = self.layer3(x0)
x0 = self.layer4(x0)
x0 = self.avgpool(x0)
x0 = torch.flatten(x0, 1)
return [x0]
def _resnet(arch, block, layers, pretrained, progress, **kwargs):
model = ResNet(block, layers, **kwargs)
if pretrained:
state_dict = load_state_dict_from_url(MODEL_URLS[arch], progress=progress)
model.load_state_dict(state_dict)
return model
def resnet18(pretrained: bool = False, progress: bool = True, **kwargs):
r"""ResNet-18 model from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
Args:
pretrained: If True, returns a model pre-trained on ImageNet
progress: If True, displays a progress bar of the download to stderr
"""
return _resnet('resnet18', BasicBlock, [2, 2, 2, 2], pretrained, progress, **kwargs)
def resnet34(pretrained=False, progress=True, **kwargs):
r"""ResNet-34 model from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
Args:
pretrained: If True, returns a model pre-trained on ImageNet
progress: If True, displays a progress bar of the download to stderr
"""
return _resnet('resnet34', BasicBlock, [3, 4, 6, 3], pretrained, progress, **kwargs)
def resnet50(pretrained: bool = False, progress: bool = True, **kwargs):
r"""ResNet-50 model from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
Args:
pretrained: If True, returns a model pre-trained on ImageNet
progress: If True, displays a progress bar of the download to stderr
"""
return _resnet('resnet50', Bottleneck, [3, 4, 6, 3], pretrained, progress, **kwargs)
def resnet101(pretrained: bool = False, progress: bool = True, **kwargs):
r"""ResNet-101 model from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
Args:
pretrained: If True, returns a model pre-trained on ImageNet
progress: If True, displays a progress bar of the download to stderr
"""
return _resnet('resnet101', Bottleneck, [3, 4, 23, 3], pretrained, progress, **kwargs)
def resnet152(pretrained: bool = False, progress: bool = True, **kwargs):
r"""ResNet-152 model from
`"Deep Residual Learning for Image Recognition" <https://arxiv.org/pdf/1512.03385.pdf>`_
Args:
pretrained: If True, returns a model pre-trained on ImageNet
progress: If True, displays a progress bar of the download to stderr
"""
return _resnet('resnet152', Bottleneck, [3, 8, 36, 3], pretrained, progress, **kwargs)
def resnext50_32x4d(pretrained: bool = False, progress: bool = True, **kwargs):
r"""ResNeXt-50 32x4d model from
`"Aggregated Residual Transformation for Deep Neural Networks" <https://arxiv.org/pdf/1611.05431.pdf>`_
Args:
pretrained: If True, returns a model pre-trained on ImageNet
progress: If True, displays a progress bar of the download to stderr
"""
kwargs['groups'] = 32
kwargs['width_per_group'] = 4
return _resnet('resnext50_32x4d', Bottleneck, [3, 4, 6, 3], pretrained, progress, **kwargs)
def resnext101_32x8d(pretrained: bool = False, progress: bool = True, **kwargs):
r"""ResNeXt-101 32x8d model from
`"Aggregated Residual Transformation for Deep Neural Networks" <https://arxiv.org/pdf/1611.05431.pdf>`_
Args:
pretrained: If True, returns a model pre-trained on ImageNet
progress: If True, displays a progress bar of the download to stderr
"""
kwargs['groups'] = 32
kwargs['width_per_group'] = 8
return _resnet('resnext101_32x8d', Bottleneck, [3, 4, 23, 3], pretrained, progress, **kwargs)
def wide_resnet50_2(pretrained: bool = False, progress: bool = True, **kwargs):
r"""Wide ResNet-50-2 model from
`"Wide Residual Networks" <https://arxiv.org/pdf/1605.07146.pdf>`_
The model is the same as ResNet except for the bottleneck number of channels
which is twice larger in every block. The number of channels in outer 1x1
convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048
channels, and in Wide ResNet-50-2 has 2048-1024-2048.
Args:
pretrained: If True, returns a model pre-trained on ImageNet
progress: If True, displays a progress bar of the download to stderr
"""
kwargs['width_per_group'] = 64 * 2
return _resnet('wide_resnet50_2', Bottleneck, [3, 4, 6, 3], pretrained, progress, **kwargs)
def wide_resnet101_2(pretrained: bool = False, progress: bool = True, **kwargs):
r"""Wide ResNet-101-2 model from
`"Wide Residual Networks" <https://arxiv.org/pdf/1605.07146.pdf>`_
The model is the same as ResNet except for the bottleneck number of channels
which is twice larger in every block. The number of channels in outer 1x1
convolutions is the same, e.g. last block in ResNet-50 has 2048-512-2048
channels, and in Wide ResNet-50-2 has 2048-1024-2048.
Args:
pretrained: If True, returns a model pre-trained on ImageNet
progress: If True, displays a progress bar of the download to stderr
"""
kwargs['width_per_group'] = 64 * 2
return _resnet('wide_resnet101_2', Bottleneck, [3, 4, 23, 3], pretrained, progress, **kwargs) | PypiClean |
/BiblioPixel-3.4.46.tar.gz/BiblioPixel-3.4.46/bibliopixel/util/int_names.py | import calendar
def to_index(name):
if isinstance(name, float):
raise KeyError('Indexes cannot be floating point')
try:
return int(name)
except:
pass
try:
return NAME_TO_INDEX[name.lower()]
except:
raise KeyError('Can\'t understand index "%s"' % name)
def to_names(index):
return INDEX_TO_NAME[index]
def _combine(*name_lists):
name_to_index = {}
index_to_name = {}
def add(i, name):
nl = name.lower()
if nl not in name_to_index:
name_to_index[nl] = i
index_to_name.setdefault(i, []).append(name)
elif nl not in DUPES:
raise ValueError(name + ' duplicated')
for z in ZEROES:
add(0, z)
for name_list in name_lists:
for i, names in enumerate(name_list):
if isinstance(names, str):
names = names,
for name in names:
add(i + 1, name)
return name_to_index, index_to_name
DUPES = 'may', 'mercury'
ZEROES = 'none', 'nothing', 'zero', 'zip'
DAYS = tuple(zip(calendar.day_abbr, calendar.day_name))
MONTHS = tuple(zip(calendar.month_abbr, calendar.month_name))[1:]
COLORS = 'red', 'orange', 'yellow', 'green', 'blue', 'indigo', 'violet'
PLANETS = ('Mercury', 'Venus', 'Earth', 'Mars', 'Jupiter', 'Saturn', 'Uranus',
'Neptune', 'Pluto')
ELEMENTS = (
('H', 'hydrogen'),
('He', 'helium'),
('Li', 'lithium'),
('Be', 'beryllium'),
('B', 'boron'),
('C', 'carbon'),
('N', 'nitrogen'),
('O', 'oxygen'),
('F', 'fluorine'),
('Ne', 'neon'),
('Na', 'sodium'),
('Mg', 'magnesium'),
('Al', 'aluminum'),
('Si', 'silicon'),
('P', 'phosphorus'),
('S', 'sulfur'),
('Cl', 'chlorine'),
('Ar', 'argon'),
('K', 'potassium'),
('Ca', 'calcium'),
('Sc', 'scandium'),
('Ti', 'titanium'),
('V', 'vanadium'),
('Cr', 'chromium'),
('Mn', 'manganese'),
('Fe', 'iron'),
('Co', 'cobalt'),
('Ni', 'nickel'),
('Cu', 'copper'),
('Zn', 'zinc'),
('Ga', 'gallium'),
('Ge', 'germanium'),
('As', 'arsenic'),
('Se', 'selenium'),
('Br', 'bromine'),
('Kr', 'krypton'),
('Rb', 'rubidium'),
('Sr', 'strontium'),
('Y', 'yttrium'),
('Zr', 'zirconium'),
('Nb', 'niobium'),
('Mo', 'molybdenum'),
('Tc', 'technetium'),
('Ru', 'ruthenium'),
('Rh', 'rhodium'),
('Pd', 'palladium'),
('Ag', 'silver'),
('Cd', 'cadmium'),
('In', 'indium'),
('Sn', 'tin'),
('Sb', 'antimony'),
('Te', 'tellurium'),
('I', 'iodine'),
('Xe', 'xenon'),
('Cs', 'cesium'),
('Ba', 'barium'),
('La', 'lanthanum'),
('Ce', 'cerium'),
('Pr', 'praseodymium'),
('Nd', 'neodymium'),
('Pm', 'promethium'),
('Sm', 'samarium'),
('Eu', 'europium'),
('Gd', 'gadolinium'),
('Tb', 'terbium'),
('Dy', 'dysprosium'),
('Ho', 'holmium'),
('Er', 'erbium'),
('Tm', 'thulium'),
('Yb', 'ytterbium'),
('Lu', 'lutetium'),
('Hf', 'hafnium'),
('Ta', 'tantalum'),
('W', 'tungsten'),
('Re', 'rhenium'),
('Os', 'osmium'),
('Ir', 'iridium'),
('Pt', 'platinum'),
('Au', 'gold'),
('Hg', 'mercury'),
('Tl', 'thallium'),
('Pb', 'lead'),
('Bi', 'bismuth'),
('Po', 'polonium'),
('At', 'astatine'),
('Rn', 'radon'),
('Fr', 'francium'),
('Ra', 'radium'),
('Ac', 'actinium'),
('Th', 'thorium'),
('Pa', 'protactinium'),
('U', 'uranium'),
('Np', 'neptunium'),
('Pu', 'plutonium'),
('Am', 'americium'),
('Cm', 'curium'),
('Bk', 'berkelium'),
('Cf', 'californium'),
('Es', 'einsteinium'),
('Fm', 'fermium'),
('Md', 'mendelevium'),
('No', 'nobelium'),
('Lr', 'lawrencium'),
('Rf', 'rutherfordium'),
('Db', 'dubnium'),
('Sg', 'seaborgium'),
('Bh', 'bohrium'),
('Hs', 'hassium'),
('Mt', 'meitnerium'),
('Ds', 'darmstadtium'),
('Rg', 'roentgenium'),
('Cn', 'copernicium'),
('Nh', 'nihonium'),
('Fl', 'flerovium'),
('Mc', 'moscovium'),
('Lv', 'livermorium'),
('Ts', 'tennessine'),
('Og', 'oganesson'),
)
NAME_TO_INDEX, INDEX_TO_NAME = _combine(DAYS, MONTHS, COLORS, PLANETS, ELEMENTS) | PypiClean |
/MnemoPwd-1.2.1-py3-none-any.whl/mnemopwd/client/corelayer/protocol/StateS37A.py |
# Copyright (c) 2016-2017, Thierry Lemeunier <thierry at lemeunier dot net>
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without modification,
# are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
# OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
# OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
# ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
State S37 : UpdateData
"""
from ...util.funcutils import singleton
from .StateSCC import StateSCC
@singleton
class StateS37A(StateSCC):
"""State S37 : UpdateData"""
def do(self, handler, data):
"""Action of the state S37A: treat response of UpdateData request"""
with handler.lock:
try:
# Test if request is rejected
is_KO = data[:5] == b"ERROR"
if is_KO:
raise Exception((data[6:]).decode())
# Test if request is accepted
is_OK = data[:2] == b"OK"
if is_OK:
# Notify the handler a property has changed
if handler.core.notify:
handler.loop.run_in_executor(
None, handler.notify, "application.state",
"Information updated by server")
# Indicate the actual task is done
handler.core.taskInProgress = False
else:
raise Exception("S37 protocol error")
except Exception as exc:
# Schedule a call to the exception handler
handler.loop.call_soon_threadsafe(handler.exception_handler, exc) | PypiClean |
/Gizela-1.0.18.tar.gz/Gizela-1.0.18/gizela/pyplot/FigureLayoutBase.py |
import matplotlib
import gizela
from gizela.util.Error import Error
from gizela.pyplot.PlotPoint import PlotPoint
from gizela.pyplot.BackendSingleton import BackendSingleton
#import math
class FigureLayoutBaseError(Error): pass
class FigureLayoutBase(object):
'''
object with matplotlib Figure instance
base class for other Layouts
just figure with one axes rectangle
work with: axes orientation
axes scale
save figure as image
plot points xy and x, y, or z coordinate separately
plot standard deviation along vertical axis
plot error ellipses
'''
# matplotlib backend
#backend = None # backend - just the first instance can set backend
#@classmethod
#def set_backend(cls, backend):
# if cls.backend == None:
# try:
# matplotlib.use(backend)
# cls.backend = backend
# except:
# raise FigureLayoutBaseError, "Backend set error"
# else:
# if backend != cls.backend:
# raise FigureLayoutBaseError, "Different backend can not set"
def __init__(self,
axesOri="en",
figScale=None,
configFileName=None):
"""
axesOri: orientation of axes ne, en, sw, ws, ...
figScale: scale of data in axes
configFileName ... name of configuration file
"""
# sets self.config dictionary
self.parse_config_file(name=configFileName)
# first of all set backend
# call the BackendSingleton class
if "backend" in self.config and "name" in self.config["backend"]:
self.backend = BackendSingleton(self.config["backend"]["name"])
else:
self.backend = BackendSingleton("GTK") # implicit backend
# set figure
import matplotlib.pyplot
self.figure = matplotlib.pyplot.figure() #figure instance
# set size of figure
if "figure" in self.config and "size" in self.config["figure"]:
self.figSize = [float(s)
for s in self.config["figure"]["size"].split(",")]
else:
# default value A4
self.figSize = [297, 210]
# figure size in milimeters
self.figWidth = self.figSize[0]
self.figHeight = self.figSize[1]
sizei = [i / 25.4 for i in self.figSize]
self.figure.set_size_inches(sizei, forward=True)
# forward figure size to window size
# works just for GTK* and WX* backends
# compute sizes
if "figure" in self.config and "border" in self.config["figure"]:
self.border = self.config["figure"]["border"]
else: self.border = 5 # implicit value
if "axes" in self.config and "tickLabelSpace" in self.config["axes"]:
tickSpace = [float(i)
for i in self.config["axes"]["tickLabelSpace"].split(",")]
else: tickSpace = [7.0, 10.0] # implicit value
self.tickSpaceX = tickSpace[0]
self.tickSpaceY = tickSpace[1]
# position of axes in 0-1
self.posAxes = [self.border/self.figWidth,
self.border/self.figHeight,
1 - (2*self.border + self.tickSpaceY)/self.figWidth,
1 - (2*self.border + self.tickSpaceX)/self.figHeight
]
# offset for posAxes
self.posTickSpace = [self.tickSpaceY/self.figWidth,
self.tickSpaceX/self.figHeight,
0, 0]
# set axes
self.figure.add_axes(self.posAxes)
self.set_axes(axesOri)
# set adjustable and autoscale
self.gca().set_adjustable("datalim")
#self.gca().set_autoscale_on(False)
# set tick formatter
import matplotlib.ticker as ticker
formatter = ticker.ScalarFormatter(useOffset=False)
formatter.set_powerlimits((-4,10))
self.gca().xaxis.set_major_formatter(formatter)
self.gca().yaxis.set_major_formatter(formatter)
# scale
self.figScale = None
if figScale is not None:
self.set_scale_ratio(figScale)
self.figScale = figScale
# sets logo
self.logo = self.figure.text(1 - self.border/2/self.figWidth,
self.border/2/self.figHeight,
">--Gizela-%s-->" % gizela.__version__,
fontsize=6,
verticalalignment="center",
horizontalalignment="right")
#transform=self.figure.tranFigure)
# set scale bar
#if "scaleBar" in self.config and figScale is not None:
# from matplotlib.patches import Rectangle
# if self.config["scaleBar"]["visible"] == "on":
# # plot scale bar
# width = self.config["scaleBar"]["width"]/100
# height = self.config["scaleBar"]["height"]/100
# offset = self.config["scaleBar"]["offset"]/100
# width_m = width * self.figWidth / figScale
# #: width of bar in meters in real
# exp = 10**math.round(math.log10(width_m))
# widthi_m = width_m/4.0
# xy = [1 - width - offset, offset]
# widthi = width/4
# facecolor="white"
# trnax = self.gca().transAxes
# for i in xrange(4):
# self.gca().add_patch(Rectangle(xy=xy,
# width=widthi,
# height=height,
# transform=trnax,
# facecolor=facecolor))
# xy[0] += widthi
# if facecolor is "white":
# facecolor="black"
# else:
# facecolor="white"
def update_(self, axesOri=None, figScale=None):
"updates properties of figure"
if axesOri is not None:
self.set_axes(axesOri=axesOri)
if figScale is not None:
self.set_scale_ratio(figScale)
def set_axes(self, axesOri="en", xLabel="X", yLabel="Y"):
"""
axesXY: orientation of axes: ne, nw, se, sw
en, wn, es, ws
sets direction and position of ticks and its properties
sets _swapXY for drawing
sets position of axes object and posAxes attribute
"""
#import sys
#print >>sys.stderr, "set_axes", axesOri
ax = self.gca()
self._axesOri = axesOri
if axesOri == "ne" or axesOri == "en":
self.posTickSpace[0] = self.tickSpaceY/self.figWidth
self.posTickSpace[1] = self.tickSpaceX/self.figHeight
elif axesOri == "sw" or axesOri == "ws":
# direction of axes
if not ax.xaxis_inverted():
ax.invert_xaxis()
if not ax.yaxis_inverted():
ax.invert_yaxis()
# ticks position
for tick in ax.xaxis.get_major_ticks():
tick.label1On = False
tick.label2On = True
for tick in ax.yaxis.get_major_ticks():
tick.label1On = False
tick.label2On = True
# position of axes
self.posTickSpace[0] = 0
self.posTickSpace[1] = 0
elif axesOri == "es" or axesOri == "se":
# direction of axes
if not ax.yaxis_inverted():
ax.invert_yaxis()
# ticks position
for tick in ax.xaxis.get_major_ticks():
tick.label1On = False
tick.label2On = True
# position of axes
self.posTickSpace[0] = self.tickSpaceY/self.figWidth
self.posTickSpace[1] = 0
elif axesOri == "wn" or axesOri == "nw":
# direction of axes
if not ax.xaxis_inverted():
ax.invert_xaxis()
# ticks position
for tick in ax.yaxis.get_major_ticks():
tick.label1On = False
tick.label2On = True
# position of axes
self.posTickSpace[0] = 0
self.posTickSpace[1] = self.tickSpaceX/self.figHeight
else:
raise FigureLayoutBaseError, "Unknown axes orientation %s" % axesOri
# set axes position
self._set_axes_position()
# set ticks label properties
for l in ax.xaxis.get_ticklabels():
if "axes" in self.config and "tickFontSize" in self.config["axes"]:
l.set_fontsize(self.config["axes"]["tickFontSize"])
else:
l.set_fontsize(6)
for l in ax.yaxis.get_ticklabels():
if "axes" in self.config and "tickFontSize" in self.config["axes"]:
l.set_fontsize(self.config["axes"]["tickFontSize"])
else:
l.set_fontsize(6)
# set swapXY
if axesOri == "ne" or axesOri == "nw" \
or axesOri == "se" or axesOri == "sw":
self._swapXY = True
else:
self._swapXY = False
#sets label of x-axis
if axesOri=="en" or axesOri=="wn" or axesOri=="es" or axesOri=="ws":
ax.xaxis.set_label_text(xLabel)
if axesOri=="es" or axesOri=="ws":
ax.xaxis.set_label_position("top")
else:
ax.xaxis.set_label_position("bottom")
else:
ax.yaxis.set_label_text(xLabel)
if axesOri=="se" or axesOri=="ne":
ax.yaxis.set_label_position("left")
else:
ax.yaxis.set_label_position("right")
#sets label of y axis
if axesOri=="ne" or axesOri=="nw" or axesOri=="se" or axesOri=="sw":
ax.xaxis.set_label_text(yLabel)
if axesOri=="se" or axesOri=="sw":
ax.xaxis.set_label_position("top")
else:
ax.xaxis.set_label_position("bottom")
else:
ax.yaxis.set_label_text(yLabel)
if axesOri=="es" or axesOri=="en":
ax.yaxis.set_label_position("left")
else:
ax.yaxis.set_label_position("right")
def _set_axes_position(self):
self.gca().set_position([i+j for i, j in zip(self.posAxes,
self.posTickSpace)])
def get_axes_ori(self): return self._axesOri
def gca(self):
"returns current axes"
return self.figure.gca()
def plot_xy(self, x, y):
"plots data to axes with respect to axes orientation"
if type(x) != list and type(x) != tuple:
x = [x]
if type(y) != list and type(y) != tuple:
y = [y]
if self._swapXY:
return self.gca().plot(y, x)
else:
return self.gca().plot(x, y)
def set_aspect_equal(self):
"sets equal aspect ratio for axes"
self.gca().set_aspect("equal")
def is_swap_xy(self):
return self._swapXY
def get_scale_ratio(self):
"""
returns scale ratio for x and y axis
supposed that data are in meters
"""
xmin, xmax = self.gca().get_xbound()
ymin, ymax = self.gca().get_ybound()
return (self.posAxes[2]*self.figWidth/1000)/(xmax - xmin),\
(self.posAxes[3]*self.figHeight/1000)/(ymax - ymin)
def get_scale_ratio_x(self):
return self.get_scale_ratio()[0]
def get_scale_ratio_y(self):
return self.get_scale_ratio()[1]
def set_scale_ratio(self, ratio):
"set scale ration of both x and y axes"
self.set_scale_ratio_x(ratio)
self.set_scale_ratio_y(ratio)
def set_scale_ratio_x(self, ratio):
"""
sets scale ratio of x axis
manipulating xlim properties of axes object
"""
dx_ = self.posAxes[2]*self.figWidth/1000/ratio
xmin, xmax = self.gca().get_xbound()
dx = xmax - xmin
ddx = dx_ - dx
xmin, xmax = xmin - ddx/2, xmax + ddx/2
self.gca().set_xbound(xmin, xmax)
def set_scale_ratio_y(self, ratio):
"""
sets scale ratio of y axis
manipulating ylim properties of axes object
"""
dy_ = self.posAxes[3]*self.figHeight/1000/ratio
ymin, ymax = self.gca().get_ybound()
dy = ymax - ymin
ddy = dy_ - dy
ymin, ymax = ymin - ddy/2, ymax + ddy/2
self.gca().set_ybound(ymin, ymax)
@staticmethod
def get_scale_ratio_string(ratio):
if ratio > 1.0:
if round(ratio) - ratio > 1e-5:
return "%.5f : 1" % ratio
else:
return "%.0f : 1" % ratio
else:
ratio = 1.0/ratio
if round(ratio) - ratio > 1e-5:
return "1 : %.5f" % ratio
else:
return "1 : %.0f" % ratio
def get_scale_ratio_string_min(self):
"returns string with min scale ratio"
return self.get_scale_ratio_string(min(self.get_scale_ratio()))
def get_scale_ratio_string_y(self):
"returns scale ratio of y axis - vertical axis"
return self.get_scale_ratio_string(self.get_scale_ratio()[1])
def show_(self, mainloop=True):
"""
show figure
"""
if self.figScale is not None:
self.set_scale_ratio(self.figScale)
import matplotlib.pyplot
matplotlib.pyplot.show(mainloop)
def set_free_space(self, border=10, equal=False):
"""
border: white space around drawing in percents
equal: equal border for x and y direction?
"""
xmin, xmax = self.gca().get_xlim()
ymin, ymax = self.gca().get_ylim()
dx = xmax - xmin
dy = ymax - ymin
dxp = dx * border/100
dyp = dy * border/100
if equal:
dxyp = (dxp + dyp)/2 # mean value
dxp = dxyp
dyp = dxyp
self.gca().set_xlim((xmin - dxp, xmax + dxp))
self.gca().set_ylim((ymin - dyp, ymax + dyp))
def save_as(self, fileName="figure"):
"saves figure as image"
if self.figScale is not None:
self.set_scale_ratio(self.figScale)
dpi = self.config["figure"]["dpi"]
# set image size
sizem = self.config["figure"]["size"]
sizei = [float(i) / 25.4 for i in sizem.split(",")]
self.figure.set_size_inches(sizei)
import sys
print >>sys.stderr, "Figure name:", fileName,\
"size (mm):", sizem, "DPI:", dpi
#self.figure.set_dpi(dpi)
self.figure.savefig(fileName, dpi=dpi)
def parse_config_file(self, name):
"parser for configuration file"
import ConfigParser, os, sys
configParser = ConfigParser.SafeConfigParser()
configParser.optionxform = str # to make options case sensitive
defaults = os.path.sep.join(["gizela", "pyplot", "default.cfg"])
path = [p + os.path.sep + defaults for p in sys.path]
if name is not None:
path.extend([p + os.path.sep + name for p in sys.path])
path.append(name)
readed = configParser.read(path)
#os.path.expanduser("~/" + name),
#"./" + name])
print >>sys.stderr, \
"Figure configuration file(s) readed: %s" % ", ".join(readed)
self.config = {}
for sec in configParser.sections():
self.config[sec] = {}
for p,v in configParser.items(sec):
try:
v=float(v)
except:
pass
self.config[sec][p] = v
def get_config_section(self, section):
"returns configuration section items in dictionary"
return self.config[section]
def set_style(self, style, artist):
"""
sets properties of artist according to
configuration file.
styleType: the name of section in config file or
dictionary with properties
artist: instance of graphic object (line, text, ...)
"""
if type(style) is str:
style = self.get_config_section(style)
for p, v in style.items():
fun = getattr(artist, "set_" + p)
fun(v)
def get_style_dict(self, style):
"returns style dictionary of properties"
return self.get_config_section(style)
def get_label_tran(self):
"return transformation for text labels"
from matplotlib.transforms import offset_copy
offset = self.get_config_section("pointLabelOffset")
return offset_copy(self.gca().transData, self.figure,
offset["x"], offset["y"],
units="points")
def plot_point_dot(self, point):
PlotPoint.plot_point_dot(self, point, style="pointDotStyle")
def plot_point_fix_dot(self, point):
PlotPoint.plot_point_dot(self, point, style="pointFixDotStyle")
def plot_point_con_dot(self, point):
PlotPoint.plot_point_dot(self, point, style="pointConDotStyle")
def plot_point_adj_dot(self, point):
PlotPoint.plot_point_dot(self, point, style="pointAdjDotStyle")
def plot_point_label(self, point):
PlotPoint.plot_point_label(self, point, style="pointLabelStyle")
def plot_point_fix_label(self, point):
PlotPoint.plot_point_label(self, point, style="pointFixLabelStyle")
def plot_point_con_label(self, point):
PlotPoint.plot_point_label(self, point, style="pointConLabelStyle")
def plot_point_adj_label(self, point):
PlotPoint.plot_point_label(self, point, style="pointAdjLabelStyle")
def plot_point_x(self, point, x):
PlotPoint.plot_point_x(self, point, x, style="pointDotStyle")
def plot_point_y(self, point, x):
PlotPoint.plot_point_y(self, point, x, style="pointDotStyle")
def plot_point_z(self, point, x):
PlotPoint.plot_point_z(self, point, x, style="pointDotStyle")
#def plot_point_error_ellipse(self, point): pass
#def plot_point_x_stdev(self, point, x): pass
#def plot_point_y_stdev(self, point, x): pass
#def plot_point_z_stdev(self, point, x): pass
#def plot_point_error_z(self, point): pass
def plot_scale_bar(self):pass
if __name__ == "__main__":
fig = FigureLayoutBase(figScale=1e-5)
fig.set_axes("sw")
fig.plot_xy([1e3, 1.001e3, 1.001e3, 1e3, 1e3],
[0.5e3, 0.5e3, 0.501e3, 0.501e3, 0.5e3])
scalex, scaley = fig.get_scale_ratio()
print 1/scalex, 1/scaley
fig.set_aspect_equal()
print 1/scalex, 1/scaley
fig.show_()
#fig.save_as() | PypiClean |
/EOxServer-1.2.12-py3-none-any.whl/eoxserver/services/ows/wps/v10/encoders/execute_response.py |
from lxml import etree
from django.utils.timezone import now
from django.utils.six import itervalues
from eoxserver.core.config import get_eoxserver_config
from eoxserver.services.ows.common.config import CapabilitiesConfigReader
from eoxserver.core.util.timetools import isoformat
from eoxserver.services.ows.wps.v10.util import WPS, OWS, ns_xlink, ns_xml
from eoxserver.services.ows.wps.exceptions import OWS10Exception
from eoxserver.services.ows.wps.parameters import (
Parameter, LiteralData, ComplexData, BoundingBoxData,
fix_parameter, InputReference, Reference, RequestParameter,
)
from .process_description import encode_process_brief
from .parameters import (
encode_input_exec, encode_output_exec, encode_output_def
)
from .base import WPS10BaseXMLEncoder
from eoxserver.services.ows.wps.exceptions import InvalidOutputValueError
class WPS10ExecuteResponseXMLEncoder(WPS10BaseXMLEncoder):
""" WPS 1.0 ExecuteResponse XML response encoder. """
def __init__(self, process, resp_form, raw_inputs, inputs=None,
status_location=None):
super(WPS10ExecuteResponseXMLEncoder, self).__init__()
self.process = process
self.resp_form = resp_form
self.raw_inputs = raw_inputs
self.inputs = inputs
self.status_location = status_location
def _encode_common(self, status):
""" Encode common response element. """
elem = _encode_common_response(
self.process, status, self.inputs, self.raw_inputs, self.resp_form
)
if self.status_location:
elem.set("statusLocation", self.status_location)
return elem
def encode_response(self, results):
"""Encode ProcessSucceeded execute response including the output data."""
elem = self._encode_common(WPS(
"ProcessSucceeded",
"The processes execution completed successfully."
))
outputs = []
for result, prm, req in itervalues(results):
outputs.append(_encode_output(result, prm, req))
elem.append(WPS("ProcessOutputs", *outputs))
return elem
def encode_failed(self, exception):
""" Encode ProcessFailed execute response."""
# NOTE: Some exceptions such as the urllib2.HTTPError have also
# the 'code' attribute and the duck typing does not work very well.
# Therefore we need match the exception base type.
if isinstance(exception, OWS10Exception):
code = exception.code
locator = exception.locator
else:
code = "NoApplicableCode"
locator = type(exception).__name__
message = str(exception)
exc_attr = {"exceptionCode": str(code)}
if locator:
exc_attr["locator"] = str(locator)
exc_elem = OWS("Exception", OWS("ExceptionText", message), **exc_attr)
status = WPS("ProcessFailed", WPS("ExceptionReport", exc_elem))
return self._encode_common(status)
def encode_started(self, progress=0, message=None):
""" Encode ProcessStarted execute response."""
if not message:
message = "The processes execution is in progress."
return self._encode_common(WPS(
"ProcessStarted", message,
percentCompleted=("%d" % min(99, max(0, int(float(progress)))))
))
def encode_paused(self, progress=0):
""" Encode ProcessPaused execute response."""
return self._encode_common(WPS(
"ProcessPaused", "The processes execution is paused.",
percentCompleted=("%d" % min(99, max(0, int(float(progress)))))
))
def encode_accepted(self):
""" Encode ProcessAccepted execute response."""
return self._encode_common(WPS(
"ProcessAccepted", "The processes was accepted for execution."
))
#-------------------------------------------------------------------------------
def _encode_common_response(process, status_elem, inputs, raw_inputs, resp_doc):
"""Encode common execute response part shared by all specific responses."""
inputs = inputs or {}
conf = CapabilitiesConfigReader(get_eoxserver_config())
url = conf.http_service_url
if url[-1] == "?":
url = url[:-1]
elem = WPS("ExecuteResponse",
encode_process_brief(process),
WPS("Status", status_elem, creationTime=isoformat(now())),
{
"service": "WPS",
"version": "1.0.0",
ns_xml("lang"): "en-US",
"serviceInstance": (
"%s?service=WPS&version=1.0.0&request=GetCapabilities" % url
)
},
)
if resp_doc.lineage:
inputs_data = []
for id_, prm in process.inputs:
if isinstance(prm, RequestParameter):
continue
prm = fix_parameter(id_, prm)
data = inputs.get(id_)
rawinp = raw_inputs.get(prm.identifier)
if rawinp is not None:
inputs_data.append(_encode_input(data, prm, rawinp))
elem.append(WPS("DataInputs", *inputs_data))
outputs_def = []
for id_, prm in process.outputs:
prm = fix_parameter(id_, prm)
outdef = resp_doc.get(prm.identifier)
if outdef is not None:
outputs_def.append(encode_output_def(outdef))
elem.append(WPS("OutputDefinitions", *outputs_def))
return elem
def _encode_input(data, prm, raw):
""" Encode one DataInputs sub-element. """
elem = encode_input_exec(raw)
if isinstance(raw, InputReference):
elem.append(_encode_input_reference(raw))
elif isinstance(prm, LiteralData):
elem.append(WPS("Data", _encode_raw_input_literal(raw, prm)))
elif isinstance(prm, BoundingBoxData):
if data is None:
data = prm.parse(raw.data)
elem.append(WPS("Data", _encode_bbox(data, prm)))
elif isinstance(prm, ComplexData):
if data is None:
data = prm.parse(
data=raw.data, mime_type=raw.mime_type, encoding=raw.encoding,
schema=raw.schema
)
elem.append(WPS("Data", _encode_complex(data, prm)))
return elem
def _encode_output(data, prm, req):
""" Encode one ProcessOutputs sub-element. """
elem = encode_output_exec(Parameter(
prm.identifier, req.title or prm.title, req.abstract or prm.abstract
))
if isinstance(data, Reference):
elem.append(_encode_output_reference(data, prm))
elif isinstance(prm, LiteralData):
elem.append(WPS("Data", _encode_literal(data, prm, req)))
elif isinstance(prm, BoundingBoxData):
elem.append(WPS("Data", _encode_bbox(data, prm)))
elif isinstance(prm, ComplexData):
elem.append(WPS("Data", _encode_complex(data, prm)))
return elem
def _encode_input_reference(ref):
""" Encode DataInputs/Reference element. """
#TODO proper input reference encoding
return WPS("Reference", **{ns_xlink("href"): ref.href})
def _encode_output_reference(ref, prm):
""" Encode ProcessOutputs/Reference element. """
#TODO proper output reference encoding
mime_type = getattr(ref, 'mime_type', None)
encoding = getattr(ref, 'encoding', None)
schema = getattr(ref, 'schema', None)
if mime_type is None and hasattr(prm, 'default_format'):
default_format = prm.default_format
mime_type = default_format.mime_type
encoding = default_format.encoding
schema = default_format.schema
attr = {
#ns_xlink("href"): ref.href,
'href': ref.href,
}
if mime_type:
attr['mimeType'] = mime_type
if encoding is not None:
attr['encoding'] = encoding
if schema is not None:
attr['schema'] = schema
return WPS("Reference", **attr)
def _encode_raw_input_literal(input_raw, prm):
""" Encode Data/LiteralData element from a raw (unparsed) input ."""
attrib = {'dataType': prm.dtype.name}
uom = input_raw.uom or prm.default_uom
if prm.uoms:
attrib['uom'] = uom
return WPS("LiteralData", input_raw.data, **attrib)
def _encode_literal(data, prm, req):
""" Encode Data/LiteralData element. """
attrib = {'dataType': prm.dtype.name}
uom = req.uom or prm.default_uom
if prm.uoms:
attrib['uom'] = uom
try:
encoded_data = prm.encode(data, uom)
except (ValueError, TypeError) as exc:
raise InvalidOutputValueError(prm.identifier, exc)
return WPS("LiteralData", encoded_data, **attrib)
def _encode_bbox(data, prm):
""" Encode Data/BoundingBoxData element. """
try:
lower, upper, crs = prm.encode_xml(data)
except (ValueError, TypeError) as exc:
raise InvalidOutputValueError(prm.identifier, exc)
return WPS("BoundingBoxData",
OWS("LowerCorner", lower),
OWS("UpperCorner", upper),
crs=crs,
#dimension="%d"%prm.dimension,
)
#NOTE: Although derived from OWS BoundingBox the WPS (schema) does not
# allow the dimension attribute.
def _encode_format_attr(data, prm):
""" Get format attributes of the Data/ComplexData element. """
mime_type = getattr(data, 'mime_type', None)
if mime_type is not None:
encoding = getattr(data, 'encoding', None)
schema = getattr(data, 'schema', None)
else:
deffrm = prm.default_format
mime_type = deffrm.mime_type
encoding = deffrm.encoding
schema = deffrm.schema
attr = {"mimeType": mime_type}
if encoding is not None:
attr['encoding'] = encoding
if schema is not None:
attr['schema'] = schema
return attr
def _encode_complex(data, prm):
""" Encode Data/ComplexData element. """
try:
payload = prm.encode_xml(data)
except (ValueError, TypeError) as exc:
raise InvalidOutputValueError(prm.identifier, exc)
elem = WPS("ComplexData", **_encode_format_attr(data, prm))
if isinstance(payload, etree._Element):
elem.append(payload)
else:
elem.text = etree.CDATA(payload)
return elem | PypiClean |
/githubkit-0.10.7-py3-none-any.whl/githubkit/rest/migrations.py | from typing import TYPE_CHECKING, Dict, List, Union, Literal, Optional, overload
from pydantic import BaseModel, parse_obj_as
from githubkit.utils import UNSET, Missing, exclude_unset
from .types import (
UserMigrationsPostBodyType,
OrgsOrgMigrationsPostBodyType,
ReposOwnerRepoImportPutBodyType,
ReposOwnerRepoImportPatchBodyType,
ReposOwnerRepoImportLfsPatchBodyType,
ReposOwnerRepoImportAuthorsAuthorIdPatchBodyType,
)
from .models import (
Import,
Migration,
BasicError,
PorterAuthor,
PorterLargeFile,
ValidationError,
MinimalRepository,
UserMigrationsPostBody,
OrgsOrgMigrationsPostBody,
ReposOwnerRepoImportPutBody,
ReposOwnerRepoImportPatchBody,
ReposOwnerRepoImportLfsPatchBody,
ReposOwnerRepoImportAuthorsAuthorIdPatchBody,
)
if TYPE_CHECKING:
from githubkit import GitHubCore
from githubkit.response import Response
class MigrationsClient:
_REST_API_VERSION = "2022-11-28"
def __init__(self, github: "GitHubCore"):
self._github = github
def list_for_org(
self,
org: str,
per_page: Missing[int] = 30,
page: Missing[int] = 1,
exclude: Missing[List[Literal["repositories"]]] = UNSET,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[List[Migration]]":
url = f"/orgs/{org}/migrations"
params = {
"per_page": per_page,
"page": page,
"exclude": exclude,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=List[Migration],
)
async def async_list_for_org(
self,
org: str,
per_page: Missing[int] = 30,
page: Missing[int] = 1,
exclude: Missing[List[Literal["repositories"]]] = UNSET,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[List[Migration]]":
url = f"/orgs/{org}/migrations"
params = {
"per_page": per_page,
"page": page,
"exclude": exclude,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=List[Migration],
)
@overload
def start_for_org(
self,
org: str,
*,
headers: Optional[Dict[str, str]] = None,
data: OrgsOrgMigrationsPostBodyType,
) -> "Response[Migration]":
...
@overload
def start_for_org(
self,
org: str,
*,
data: Literal[UNSET] = UNSET,
headers: Optional[Dict[str, str]] = None,
repositories: List[str],
lock_repositories: Missing[bool] = False,
exclude_metadata: Missing[bool] = False,
exclude_git_data: Missing[bool] = False,
exclude_attachments: Missing[bool] = False,
exclude_releases: Missing[bool] = False,
exclude_owner_projects: Missing[bool] = False,
org_metadata_only: Missing[bool] = False,
exclude: Missing[List[Literal["repositories"]]] = UNSET,
) -> "Response[Migration]":
...
def start_for_org(
self,
org: str,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[OrgsOrgMigrationsPostBodyType] = UNSET,
**kwargs,
) -> "Response[Migration]":
url = f"/orgs/{org}/migrations"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
if not kwargs:
kwargs = UNSET
json = kwargs if data is UNSET else data
json = parse_obj_as(OrgsOrgMigrationsPostBody, json)
json = json.dict(by_alias=True) if isinstance(json, BaseModel) else json
return self._github.request(
"POST",
url,
json=exclude_unset(json),
headers=exclude_unset(headers),
response_model=Migration,
error_models={
"404": BasicError,
"422": ValidationError,
},
)
@overload
async def async_start_for_org(
self,
org: str,
*,
headers: Optional[Dict[str, str]] = None,
data: OrgsOrgMigrationsPostBodyType,
) -> "Response[Migration]":
...
@overload
async def async_start_for_org(
self,
org: str,
*,
data: Literal[UNSET] = UNSET,
headers: Optional[Dict[str, str]] = None,
repositories: List[str],
lock_repositories: Missing[bool] = False,
exclude_metadata: Missing[bool] = False,
exclude_git_data: Missing[bool] = False,
exclude_attachments: Missing[bool] = False,
exclude_releases: Missing[bool] = False,
exclude_owner_projects: Missing[bool] = False,
org_metadata_only: Missing[bool] = False,
exclude: Missing[List[Literal["repositories"]]] = UNSET,
) -> "Response[Migration]":
...
async def async_start_for_org(
self,
org: str,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[OrgsOrgMigrationsPostBodyType] = UNSET,
**kwargs,
) -> "Response[Migration]":
url = f"/orgs/{org}/migrations"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
if not kwargs:
kwargs = UNSET
json = kwargs if data is UNSET else data
json = parse_obj_as(OrgsOrgMigrationsPostBody, json)
json = json.dict(by_alias=True) if isinstance(json, BaseModel) else json
return await self._github.arequest(
"POST",
url,
json=exclude_unset(json),
headers=exclude_unset(headers),
response_model=Migration,
error_models={
"404": BasicError,
"422": ValidationError,
},
)
def get_status_for_org(
self,
org: str,
migration_id: int,
exclude: Missing[List[Literal["repositories"]]] = UNSET,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[Migration]":
url = f"/orgs/{org}/migrations/{migration_id}"
params = {
"exclude": exclude,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=Migration,
error_models={
"404": BasicError,
},
)
async def async_get_status_for_org(
self,
org: str,
migration_id: int,
exclude: Missing[List[Literal["repositories"]]] = UNSET,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[Migration]":
url = f"/orgs/{org}/migrations/{migration_id}"
params = {
"exclude": exclude,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=Migration,
error_models={
"404": BasicError,
},
)
def download_archive_for_org(
self,
org: str,
migration_id: int,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/orgs/{org}/migrations/{migration_id}/archive"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"GET",
url,
headers=exclude_unset(headers),
error_models={
"404": BasicError,
},
)
async def async_download_archive_for_org(
self,
org: str,
migration_id: int,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/orgs/{org}/migrations/{migration_id}/archive"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"GET",
url,
headers=exclude_unset(headers),
error_models={
"404": BasicError,
},
)
def delete_archive_for_org(
self,
org: str,
migration_id: int,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/orgs/{org}/migrations/{migration_id}/archive"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"DELETE",
url,
headers=exclude_unset(headers),
error_models={
"404": BasicError,
},
)
async def async_delete_archive_for_org(
self,
org: str,
migration_id: int,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/orgs/{org}/migrations/{migration_id}/archive"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"DELETE",
url,
headers=exclude_unset(headers),
error_models={
"404": BasicError,
},
)
def unlock_repo_for_org(
self,
org: str,
migration_id: int,
repo_name: str,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"DELETE",
url,
headers=exclude_unset(headers),
error_models={
"404": BasicError,
},
)
async def async_unlock_repo_for_org(
self,
org: str,
migration_id: int,
repo_name: str,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"DELETE",
url,
headers=exclude_unset(headers),
error_models={
"404": BasicError,
},
)
def list_repos_for_org(
self,
org: str,
migration_id: int,
per_page: Missing[int] = 30,
page: Missing[int] = 1,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[List[MinimalRepository]]":
url = f"/orgs/{org}/migrations/{migration_id}/repositories"
params = {
"per_page": per_page,
"page": page,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=List[MinimalRepository],
error_models={
"404": BasicError,
},
)
async def async_list_repos_for_org(
self,
org: str,
migration_id: int,
per_page: Missing[int] = 30,
page: Missing[int] = 1,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[List[MinimalRepository]]":
url = f"/orgs/{org}/migrations/{migration_id}/repositories"
params = {
"per_page": per_page,
"page": page,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=List[MinimalRepository],
error_models={
"404": BasicError,
},
)
def get_import_status(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[Import]":
url = f"/repos/{owner}/{repo}/import"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"GET",
url,
headers=exclude_unset(headers),
response_model=Import,
error_models={
"404": BasicError,
"503": BasicError,
},
)
async def async_get_import_status(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[Import]":
url = f"/repos/{owner}/{repo}/import"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"GET",
url,
headers=exclude_unset(headers),
response_model=Import,
error_models={
"404": BasicError,
"503": BasicError,
},
)
@overload
def start_import(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
data: ReposOwnerRepoImportPutBodyType,
) -> "Response[Import]":
...
@overload
def start_import(
self,
owner: str,
repo: str,
*,
data: Literal[UNSET] = UNSET,
headers: Optional[Dict[str, str]] = None,
vcs_url: str,
vcs: Missing[Literal["subversion", "git", "mercurial", "tfvc"]] = UNSET,
vcs_username: Missing[str] = UNSET,
vcs_password: Missing[str] = UNSET,
tfvc_project: Missing[str] = UNSET,
) -> "Response[Import]":
...
def start_import(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[ReposOwnerRepoImportPutBodyType] = UNSET,
**kwargs,
) -> "Response[Import]":
url = f"/repos/{owner}/{repo}/import"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
if not kwargs:
kwargs = UNSET
json = kwargs if data is UNSET else data
json = parse_obj_as(ReposOwnerRepoImportPutBody, json)
json = json.dict(by_alias=True) if isinstance(json, BaseModel) else json
return self._github.request(
"PUT",
url,
json=exclude_unset(json),
headers=exclude_unset(headers),
response_model=Import,
error_models={
"422": ValidationError,
"404": BasicError,
"503": BasicError,
},
)
@overload
async def async_start_import(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
data: ReposOwnerRepoImportPutBodyType,
) -> "Response[Import]":
...
@overload
async def async_start_import(
self,
owner: str,
repo: str,
*,
data: Literal[UNSET] = UNSET,
headers: Optional[Dict[str, str]] = None,
vcs_url: str,
vcs: Missing[Literal["subversion", "git", "mercurial", "tfvc"]] = UNSET,
vcs_username: Missing[str] = UNSET,
vcs_password: Missing[str] = UNSET,
tfvc_project: Missing[str] = UNSET,
) -> "Response[Import]":
...
async def async_start_import(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[ReposOwnerRepoImportPutBodyType] = UNSET,
**kwargs,
) -> "Response[Import]":
url = f"/repos/{owner}/{repo}/import"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
if not kwargs:
kwargs = UNSET
json = kwargs if data is UNSET else data
json = parse_obj_as(ReposOwnerRepoImportPutBody, json)
json = json.dict(by_alias=True) if isinstance(json, BaseModel) else json
return await self._github.arequest(
"PUT",
url,
json=exclude_unset(json),
headers=exclude_unset(headers),
response_model=Import,
error_models={
"422": ValidationError,
"404": BasicError,
"503": BasicError,
},
)
def cancel_import(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/repos/{owner}/{repo}/import"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"DELETE",
url,
headers=exclude_unset(headers),
error_models={
"503": BasicError,
},
)
async def async_cancel_import(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/repos/{owner}/{repo}/import"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"DELETE",
url,
headers=exclude_unset(headers),
error_models={
"503": BasicError,
},
)
@overload
def update_import(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[Union[ReposOwnerRepoImportPatchBodyType, None]] = UNSET,
) -> "Response[Import]":
...
@overload
def update_import(
self,
owner: str,
repo: str,
*,
data: Literal[UNSET] = UNSET,
headers: Optional[Dict[str, str]] = None,
vcs_username: Missing[str] = UNSET,
vcs_password: Missing[str] = UNSET,
vcs: Missing[Literal["subversion", "tfvc", "git", "mercurial"]] = UNSET,
tfvc_project: Missing[str] = UNSET,
) -> "Response[Import]":
...
def update_import(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[Union[ReposOwnerRepoImportPatchBodyType, None]] = UNSET,
**kwargs,
) -> "Response[Import]":
url = f"/repos/{owner}/{repo}/import"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
if not kwargs:
kwargs = UNSET
json = kwargs if data is UNSET else data
json = parse_obj_as(Union[ReposOwnerRepoImportPatchBody, None], json)
json = json.dict(by_alias=True) if isinstance(json, BaseModel) else json
return self._github.request(
"PATCH",
url,
json=exclude_unset(json),
headers=exclude_unset(headers),
response_model=Import,
error_models={
"503": BasicError,
},
)
@overload
async def async_update_import(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[Union[ReposOwnerRepoImportPatchBodyType, None]] = UNSET,
) -> "Response[Import]":
...
@overload
async def async_update_import(
self,
owner: str,
repo: str,
*,
data: Literal[UNSET] = UNSET,
headers: Optional[Dict[str, str]] = None,
vcs_username: Missing[str] = UNSET,
vcs_password: Missing[str] = UNSET,
vcs: Missing[Literal["subversion", "tfvc", "git", "mercurial"]] = UNSET,
tfvc_project: Missing[str] = UNSET,
) -> "Response[Import]":
...
async def async_update_import(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[Union[ReposOwnerRepoImportPatchBodyType, None]] = UNSET,
**kwargs,
) -> "Response[Import]":
url = f"/repos/{owner}/{repo}/import"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
if not kwargs:
kwargs = UNSET
json = kwargs if data is UNSET else data
json = parse_obj_as(Union[ReposOwnerRepoImportPatchBody, None], json)
json = json.dict(by_alias=True) if isinstance(json, BaseModel) else json
return await self._github.arequest(
"PATCH",
url,
json=exclude_unset(json),
headers=exclude_unset(headers),
response_model=Import,
error_models={
"503": BasicError,
},
)
def get_commit_authors(
self,
owner: str,
repo: str,
since: Missing[int] = UNSET,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[List[PorterAuthor]]":
url = f"/repos/{owner}/{repo}/import/authors"
params = {
"since": since,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=List[PorterAuthor],
error_models={
"404": BasicError,
"503": BasicError,
},
)
async def async_get_commit_authors(
self,
owner: str,
repo: str,
since: Missing[int] = UNSET,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[List[PorterAuthor]]":
url = f"/repos/{owner}/{repo}/import/authors"
params = {
"since": since,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=List[PorterAuthor],
error_models={
"404": BasicError,
"503": BasicError,
},
)
@overload
def map_commit_author(
self,
owner: str,
repo: str,
author_id: int,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[ReposOwnerRepoImportAuthorsAuthorIdPatchBodyType] = UNSET,
) -> "Response[PorterAuthor]":
...
@overload
def map_commit_author(
self,
owner: str,
repo: str,
author_id: int,
*,
data: Literal[UNSET] = UNSET,
headers: Optional[Dict[str, str]] = None,
email: Missing[str] = UNSET,
name: Missing[str] = UNSET,
) -> "Response[PorterAuthor]":
...
def map_commit_author(
self,
owner: str,
repo: str,
author_id: int,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[ReposOwnerRepoImportAuthorsAuthorIdPatchBodyType] = UNSET,
**kwargs,
) -> "Response[PorterAuthor]":
url = f"/repos/{owner}/{repo}/import/authors/{author_id}"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
if not kwargs:
kwargs = UNSET
json = kwargs if data is UNSET else data
json = parse_obj_as(ReposOwnerRepoImportAuthorsAuthorIdPatchBody, json)
json = json.dict(by_alias=True) if isinstance(json, BaseModel) else json
return self._github.request(
"PATCH",
url,
json=exclude_unset(json),
headers=exclude_unset(headers),
response_model=PorterAuthor,
error_models={
"422": ValidationError,
"404": BasicError,
"503": BasicError,
},
)
@overload
async def async_map_commit_author(
self,
owner: str,
repo: str,
author_id: int,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[ReposOwnerRepoImportAuthorsAuthorIdPatchBodyType] = UNSET,
) -> "Response[PorterAuthor]":
...
@overload
async def async_map_commit_author(
self,
owner: str,
repo: str,
author_id: int,
*,
data: Literal[UNSET] = UNSET,
headers: Optional[Dict[str, str]] = None,
email: Missing[str] = UNSET,
name: Missing[str] = UNSET,
) -> "Response[PorterAuthor]":
...
async def async_map_commit_author(
self,
owner: str,
repo: str,
author_id: int,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[ReposOwnerRepoImportAuthorsAuthorIdPatchBodyType] = UNSET,
**kwargs,
) -> "Response[PorterAuthor]":
url = f"/repos/{owner}/{repo}/import/authors/{author_id}"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
if not kwargs:
kwargs = UNSET
json = kwargs if data is UNSET else data
json = parse_obj_as(ReposOwnerRepoImportAuthorsAuthorIdPatchBody, json)
json = json.dict(by_alias=True) if isinstance(json, BaseModel) else json
return await self._github.arequest(
"PATCH",
url,
json=exclude_unset(json),
headers=exclude_unset(headers),
response_model=PorterAuthor,
error_models={
"422": ValidationError,
"404": BasicError,
"503": BasicError,
},
)
def get_large_files(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[List[PorterLargeFile]]":
url = f"/repos/{owner}/{repo}/import/large_files"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"GET",
url,
headers=exclude_unset(headers),
response_model=List[PorterLargeFile],
error_models={
"503": BasicError,
},
)
async def async_get_large_files(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[List[PorterLargeFile]]":
url = f"/repos/{owner}/{repo}/import/large_files"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"GET",
url,
headers=exclude_unset(headers),
response_model=List[PorterLargeFile],
error_models={
"503": BasicError,
},
)
@overload
def set_lfs_preference(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
data: ReposOwnerRepoImportLfsPatchBodyType,
) -> "Response[Import]":
...
@overload
def set_lfs_preference(
self,
owner: str,
repo: str,
*,
data: Literal[UNSET] = UNSET,
headers: Optional[Dict[str, str]] = None,
use_lfs: Literal["opt_in", "opt_out"],
) -> "Response[Import]":
...
def set_lfs_preference(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[ReposOwnerRepoImportLfsPatchBodyType] = UNSET,
**kwargs,
) -> "Response[Import]":
url = f"/repos/{owner}/{repo}/import/lfs"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
if not kwargs:
kwargs = UNSET
json = kwargs if data is UNSET else data
json = parse_obj_as(ReposOwnerRepoImportLfsPatchBody, json)
json = json.dict(by_alias=True) if isinstance(json, BaseModel) else json
return self._github.request(
"PATCH",
url,
json=exclude_unset(json),
headers=exclude_unset(headers),
response_model=Import,
error_models={
"422": ValidationError,
"503": BasicError,
},
)
@overload
async def async_set_lfs_preference(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
data: ReposOwnerRepoImportLfsPatchBodyType,
) -> "Response[Import]":
...
@overload
async def async_set_lfs_preference(
self,
owner: str,
repo: str,
*,
data: Literal[UNSET] = UNSET,
headers: Optional[Dict[str, str]] = None,
use_lfs: Literal["opt_in", "opt_out"],
) -> "Response[Import]":
...
async def async_set_lfs_preference(
self,
owner: str,
repo: str,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[ReposOwnerRepoImportLfsPatchBodyType] = UNSET,
**kwargs,
) -> "Response[Import]":
url = f"/repos/{owner}/{repo}/import/lfs"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
if not kwargs:
kwargs = UNSET
json = kwargs if data is UNSET else data
json = parse_obj_as(ReposOwnerRepoImportLfsPatchBody, json)
json = json.dict(by_alias=True) if isinstance(json, BaseModel) else json
return await self._github.arequest(
"PATCH",
url,
json=exclude_unset(json),
headers=exclude_unset(headers),
response_model=Import,
error_models={
"422": ValidationError,
"503": BasicError,
},
)
def list_for_authenticated_user(
self,
per_page: Missing[int] = 30,
page: Missing[int] = 1,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[List[Migration]]":
url = "/user/migrations"
params = {
"per_page": per_page,
"page": page,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=List[Migration],
error_models={
"403": BasicError,
"401": BasicError,
},
)
async def async_list_for_authenticated_user(
self,
per_page: Missing[int] = 30,
page: Missing[int] = 1,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[List[Migration]]":
url = "/user/migrations"
params = {
"per_page": per_page,
"page": page,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=List[Migration],
error_models={
"403": BasicError,
"401": BasicError,
},
)
@overload
def start_for_authenticated_user(
self,
*,
headers: Optional[Dict[str, str]] = None,
data: UserMigrationsPostBodyType,
) -> "Response[Migration]":
...
@overload
def start_for_authenticated_user(
self,
*,
data: Literal[UNSET] = UNSET,
headers: Optional[Dict[str, str]] = None,
lock_repositories: Missing[bool] = UNSET,
exclude_metadata: Missing[bool] = UNSET,
exclude_git_data: Missing[bool] = UNSET,
exclude_attachments: Missing[bool] = UNSET,
exclude_releases: Missing[bool] = UNSET,
exclude_owner_projects: Missing[bool] = UNSET,
org_metadata_only: Missing[bool] = False,
exclude: Missing[List[Literal["repositories"]]] = UNSET,
repositories: List[str],
) -> "Response[Migration]":
...
def start_for_authenticated_user(
self,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[UserMigrationsPostBodyType] = UNSET,
**kwargs,
) -> "Response[Migration]":
url = "/user/migrations"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
if not kwargs:
kwargs = UNSET
json = kwargs if data is UNSET else data
json = parse_obj_as(UserMigrationsPostBody, json)
json = json.dict(by_alias=True) if isinstance(json, BaseModel) else json
return self._github.request(
"POST",
url,
json=exclude_unset(json),
headers=exclude_unset(headers),
response_model=Migration,
error_models={
"422": ValidationError,
"403": BasicError,
"401": BasicError,
},
)
@overload
async def async_start_for_authenticated_user(
self,
*,
headers: Optional[Dict[str, str]] = None,
data: UserMigrationsPostBodyType,
) -> "Response[Migration]":
...
@overload
async def async_start_for_authenticated_user(
self,
*,
data: Literal[UNSET] = UNSET,
headers: Optional[Dict[str, str]] = None,
lock_repositories: Missing[bool] = UNSET,
exclude_metadata: Missing[bool] = UNSET,
exclude_git_data: Missing[bool] = UNSET,
exclude_attachments: Missing[bool] = UNSET,
exclude_releases: Missing[bool] = UNSET,
exclude_owner_projects: Missing[bool] = UNSET,
org_metadata_only: Missing[bool] = False,
exclude: Missing[List[Literal["repositories"]]] = UNSET,
repositories: List[str],
) -> "Response[Migration]":
...
async def async_start_for_authenticated_user(
self,
*,
headers: Optional[Dict[str, str]] = None,
data: Missing[UserMigrationsPostBodyType] = UNSET,
**kwargs,
) -> "Response[Migration]":
url = "/user/migrations"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
if not kwargs:
kwargs = UNSET
json = kwargs if data is UNSET else data
json = parse_obj_as(UserMigrationsPostBody, json)
json = json.dict(by_alias=True) if isinstance(json, BaseModel) else json
return await self._github.arequest(
"POST",
url,
json=exclude_unset(json),
headers=exclude_unset(headers),
response_model=Migration,
error_models={
"422": ValidationError,
"403": BasicError,
"401": BasicError,
},
)
def get_status_for_authenticated_user(
self,
migration_id: int,
exclude: Missing[List[str]] = UNSET,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[Migration]":
url = f"/user/migrations/{migration_id}"
params = {
"exclude": exclude,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=Migration,
error_models={
"404": BasicError,
"403": BasicError,
"401": BasicError,
},
)
async def async_get_status_for_authenticated_user(
self,
migration_id: int,
exclude: Missing[List[str]] = UNSET,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[Migration]":
url = f"/user/migrations/{migration_id}"
params = {
"exclude": exclude,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=Migration,
error_models={
"404": BasicError,
"403": BasicError,
"401": BasicError,
},
)
def get_archive_for_authenticated_user(
self,
migration_id: int,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/user/migrations/{migration_id}/archive"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"GET",
url,
headers=exclude_unset(headers),
error_models={
"403": BasicError,
"401": BasicError,
},
)
async def async_get_archive_for_authenticated_user(
self,
migration_id: int,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/user/migrations/{migration_id}/archive"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"GET",
url,
headers=exclude_unset(headers),
error_models={
"403": BasicError,
"401": BasicError,
},
)
def delete_archive_for_authenticated_user(
self,
migration_id: int,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/user/migrations/{migration_id}/archive"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"DELETE",
url,
headers=exclude_unset(headers),
error_models={
"404": BasicError,
"403": BasicError,
"401": BasicError,
},
)
async def async_delete_archive_for_authenticated_user(
self,
migration_id: int,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/user/migrations/{migration_id}/archive"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"DELETE",
url,
headers=exclude_unset(headers),
error_models={
"404": BasicError,
"403": BasicError,
"401": BasicError,
},
)
def unlock_repo_for_authenticated_user(
self,
migration_id: int,
repo_name: str,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/user/migrations/{migration_id}/repos/{repo_name}/lock"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"DELETE",
url,
headers=exclude_unset(headers),
error_models={
"404": BasicError,
"403": BasicError,
"401": BasicError,
},
)
async def async_unlock_repo_for_authenticated_user(
self,
migration_id: int,
repo_name: str,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response":
url = f"/user/migrations/{migration_id}/repos/{repo_name}/lock"
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"DELETE",
url,
headers=exclude_unset(headers),
error_models={
"404": BasicError,
"403": BasicError,
"401": BasicError,
},
)
def list_repos_for_authenticated_user(
self,
migration_id: int,
per_page: Missing[int] = 30,
page: Missing[int] = 1,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[List[MinimalRepository]]":
url = f"/user/migrations/{migration_id}/repositories"
params = {
"per_page": per_page,
"page": page,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return self._github.request(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=List[MinimalRepository],
error_models={
"404": BasicError,
},
)
async def async_list_repos_for_authenticated_user(
self,
migration_id: int,
per_page: Missing[int] = 30,
page: Missing[int] = 1,
*,
headers: Optional[Dict[str, str]] = None,
) -> "Response[List[MinimalRepository]]":
url = f"/user/migrations/{migration_id}/repositories"
params = {
"per_page": per_page,
"page": page,
}
headers = {"X-GitHub-Api-Version": self._REST_API_VERSION, **(headers or {})}
return await self._github.arequest(
"GET",
url,
params=exclude_unset(params),
headers=exclude_unset(headers),
response_model=List[MinimalRepository],
error_models={
"404": BasicError,
},
) | PypiClean |
/OASYS1-APS-Extensions-1.0.87.tar.gz/OASYS1-APS-Extensions-1.0.87/README.md | # OASYS1-APS-Extensions
Copyright (c) 2018, UChicago Argonne, LLC. All rights reserved.
Copyright 2018. UChicago Argonne, LLC. This software was produced
under U.S. Government contract DE-AC02-06CH11357 for Argonne National
Laboratory (ANL), which is operated by UChicago Argonne, LLC for the
U.S. Department of Energy. The U.S. Government has rights to use,
reproduce, and distribute this software. NEITHER THE GOVERNMENT NOR
UChicago Argonne, LLC MAKES ANY WARRANTY, EXPRESS OR IMPLIED, OR
ASSUMES ANY LIABILITY FOR THE USE OF THIS SOFTWARE. If software is
modified to produce derivative works, such modified software should
be clearly marked, so as not to confuse it with the version available
from ANL.
Additionally, redistribution and use in source and binary forms, with
or without modification, are permitted provided that the following
conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in
the documentation and/or other materials provided with the
distribution.
* Neither the name of UChicago Argonne, LLC, Argonne National
Laboratory, ANL, the U.S. Government, nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY UChicago Argonne, LLC AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL UChicago
Argonne, LLC OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
Customized widgets for Oasys developed at the APS-ANL
| PypiClean |
/BioTEMPy-2.1.0a0-py3-none-any.whl/TEMPy/optimisation/ensemble_generation.py |
import os
from numpy import linspace
import TEMPy.math.vector as Vector
from TEMPy.protein.structure_parser import PDBParser
class EnsembleGeneration:
"""A class to create ensemble of structure instance"""
def __init__(self):
pass
def loadEnsemble(
self,
path_dir,
file_name_flag,
hetatm=False,
water=False,
verbose=False,
pdb=True,
):
"""
Load an ensemble of Structure Instance from the directory path_dir.
Arguments:
*path_dir*
directory name
*file_name_flag*
name or suffix of the files.
"""
structure_list = []
list_rotate_models = [
filein for filein in os.listdir(path_dir)
if file_name_flag in filein and filein.endswith('.pdb')
]
for pdbin in list_rotate_models:
print(pdbin)
if pdb is True:
file_in = path_dir+'/'+pdbin
if verbose is True:
print('load file:', pdbin[:-4], file_in)
structure_instance = PDBParser.read_PDB_file(
str(pdbin[:-4]),
str(file_in),
hetatm=hetatm,
water=water,
)
structure_list.append([pdbin[:-4], structure_instance])
return structure_list
def randomise_structs(
self,
structure_instance,
no_of_structs,
max_trans,
max_rot,
v_grain=30,
rad=False,
flag='mod',
write=False,
):
"""
Generate an ensemble of Structure Instance.
Arguments:
*structure_instance*
Input Structure Instance
*no_of_structs*
int, number of structures to output
*max_trans*
Maximum translation permitted
*max_rot*
Maximum rotation permitted (in degree if rad=False)
*v_grain*
Graning Level for the generation of random vectors (default=30)
*write*
True will write out each structure_instanceure Instance in the
ensemble as single PDB.
Return:
list of Structure Instance in which each item is
[structure_instance_name,structure_instance]
"""
ensemble_list = []
file_name0 = flag + '_0'
ensemble_list.append([file_name0, structure_instance.copy()])
if write is True:
structure_instance.write_to_PDB(file_name0)
count = 0
for x in range(0, no_of_structs - 1):
count += 1
file_name = flag + '_' + str(count) + ".pdb"
structure_instance.randomise_position(
float(max_trans),
float(max_rot),
v_grain,
rad,
)
if write is True:
structure_instance.write_to_PDB(file_name)
ensemble_list.append([file_name[:-4], structure_instance.copy()])
structure_instance.reset_position()
return ensemble_list
def anglar_sweep(
self,
structure_instance,
axis,
translation_vector,
no_of_structs,
loc_rotation_angle,
flag='mod',
atom_com_ind=False,
write=False,
filename=None,
):
"""
Generate an ensemble of Structure Instance
NOTE - Chose the number of structures for the ensemble accordingly
with the angular increment step (loc_rotation_angle/no_of_structs) and
translational increment step (translation_vector/no_of_structs)
required.
Default setting is around the center of mass.
Arguments:
*structure_instance*
Input Structure Instructure_instance
*axis*
3-tuple, axis for translation
*translation_vector*
3-ple, vector for translation
*no_of_structs*
int, number of structures to output
*loc_rotation_angle*
tuple, rotation angle for local rotation (degrees)
*flag*
string, prefix name for outputted pdb files
*atom_com_ind*
int, index of atom to rotate around. If False, rotates around
centre of mass
*write*
True will write out each Structure Instance in the ensemble as
single PDB.
Return:
list of Structure Instance in which each item is
[structure_instance_name,structure_instance]
"""
ensemble_list = []
file_name0 = flag + '_0'
ensemble_list.append([file_name0, structure_instance])
grain = loc_rotation_angle / no_of_structs
if int(grain) < 1:
print('Warning: less then 1deg rotation')
else:
transl_x = linspace(0, translation_vector[0], num=no_of_structs)
transl_y = linspace(0, translation_vector[1], num=no_of_structs)
transl_z = linspace(0, translation_vector[2], num=no_of_structs)
angle_rot = linspace(0, loc_rotation_angle, num=no_of_structs)
count = -1
for x in angle_rot:
count += 1
file_name = flag + '_' + str(count + 1) + '.pdb'
if atom_com_ind:
loc_point = structure_instance[atom_com_ind].get_pos_vector() # noqa: E501
structure_instance.rotate_by_axis_angle(
axis[0],
axis[1],
axis[2],
x,
com=loc_point,
)
else:
structure_instance.rotate_by_axis_angle(
axis[0],
axis[1],
axis[2],
x,
)
structure_instance.translate(
transl_x[count],
transl_y[count],
transl_z[count],
)
if write is True:
structure_instance.write_to_PDB(file_name)
ensemble_list.append([file_name[:-4], structure_instance])
else:
ensemble_list.append([file_name[:-4], structure_instance])
structure_instance.reset_position()
return ensemble_list
def spiral_sweep(
self,
structure_instance,
axis,
dist,
no_of_structs,
loc_ang_range,
loc_axis,
flag,
atom_com_ind=False,
write=False,
):
"""
Generate an ensemble of Structure Instance
Arguments:
*structure_instance*
Input Structure Instance
*axis*
3-ple, axis for translation
*dist*
int, translation range (Angstroms)
*no_of_structs*
int, number of structures to output
*loc_axis*
3-ple, axis for local rotation around centre of mass
*loc_ang_range*
tuple, rotation range for local rotation (degrees)
*flag*
string, prefix name for outputted pdb files
*atom_com_ind*
int, index of atom to rotate around. If False, rotates around
centre of mass.
*write*
True will write out each Structure Instance in the ensemble as
single PDB.
Return:
list of Structure Instance in which each item is
[structure_instance_name,structure_instance]
"""
ensemble_list = []
file_name0 = 'mod_0'
ensemble_list.append([file_name0, structure_instance])
grain = dist / no_of_structs
axis = Vector.Vector(axis[0], axis[1], axis[2]).unit()
for r in range(no_of_structs):
file_name = flag + str(r + 1) + '.pdb'
structure_instance.translate(
axis.x * r * grain,
axis.y * r * grain,
axis.z * r * grain,
)
# Rotation around centre of mass
loc_grain = (loc_ang_range[1] - loc_ang_range[0]) / no_of_structs
if atom_com_ind:
loc_point = self[atom_com_ind].get_pos_vector()
structure_instance.rotate_by_axis_angle(
loc_axis[0],
loc_axis[1],
loc_axis[2],
r * loc_grain,
com=loc_point,
)
else:
structure_instance.rotate_by_axis_angle(
loc_axis[0],
loc_axis[1],
loc_axis[2],
r * loc_grain
)
if write is True:
structure_instance.write_to_PDB(file_name)
ensemble_list.append([file_name[:-4], structure_instance])
else:
ensemble_list.append([file_name[:-4], structure_instance])
structure_instance.reset_position()
return ensemble_list | PypiClean |
/ActionCableZwei-0.1.7.2.tar.gz/ActionCableZwei-0.1.7.2/actioncable/connection.py | import threading
import uuid
import json
import logging
import time
import websocket
class Connection:
"""
The connection to a websocket server
"""
def __init__(self, url, origin=None, log_ping=False, cookie=None, header=None):
"""
:param url: The url of the cable server.
:param origin: (Optional) The origin.
:param log_ping: (Default: False) If true every
ping gets logged.
:param cookie: (Optional) A cookie to send (used for
authentication for instance).
:param header: (Optional) custom header for websocket handshake.
"""
self.url = url
self.origin = origin
self.log_ping = log_ping
self.cookie = cookie
self.header = header
self.logger = logging.getLogger('ActionCable Connection')
self.subscriptions = {}
self.websocket = None
self.ws_thread = None
self.auto_reconnect = False
if origin is not None:
self.origin = origin
def connect(self, origin=None):
"""
Connects to the server.
:param origin: (Optional) The origin.
"""
self.logger.debug('Establish connection...')
if self.connected:
self.logger.warning('Connection already established. Return...')
return
if origin is not None:
self.origin = origin
self.auto_reconnect = True
self.ws_thread = threading.Thread(
name="APIConnectionThread_{}".format(uuid.uuid1()),
target=self._run_forever)
self.ws_thread.daemon = True
self.ws_thread.start()
def disconnect(self):
"""
Closes the connection.
"""
self.logger.debug('Close connection...')
self.auto_reconnect = False
if self.websocket is not None:
self.websocket.close()
def _run_forever(self):
while self.auto_reconnect:
try:
self.logger.debug('Run connection loop.')
self.websocket = websocket.WebSocketApp(
self.url,
cookie=self.cookie,
header=self.header,
on_message=self._on_message,
on_close=self._on_close)
self.websocket.on_open = self._on_open
self.websocket.run_forever(ping_interval=5, ping_timeout=3, origin=self.origin)
time.sleep(2)
except Exception as exc:
self.logger.error('Connection loop raised exception. Exception: %s', exc)
def send(self, data):
"""
Sends data to the server.
"""
self.logger.debug('Send data: {}'.format(data))
if not self.connected:
self.logger.warning('Connection not established. Return...')
return
self.websocket.send(json.dumps(data))
def _on_open(self, socket):
"""
Called when the connection is open.
"""
self.logger.debug('Connection established.')
def _on_message(self, socket, message):
"""
Called aways when a message arrives.
"""
data = json.loads(message)
message_type = None
identifier = None
subscription = None
if 'type' in data:
message_type = data['type']
if 'identifier' in data:
identifier = json.loads(data['identifier'])
if identifier is not None:
subscription = self.find_subscription(identifier)
if subscription is not None:
subscription.received(data)
elif message_type == 'welcome':
self.logger.debug('Welcome message received.')
for subscription in self.subscriptions.values():
if subscription.state == 'connection_pending':
subscription.create()
elif message_type == 'ping':
if self.log_ping:
self.logger.debug('Ping received.')
else:
self.logger.warning('Message not supported. (Message: {})'.format(message))
def _on_close(self, socket):
"""
Called when the connection was closed.
"""
self.logger.debug('Connection closed.')
for subscription in self.subscriptions.values():
if subscription.state == 'subscribed':
subscription.state = 'connection_pending'
@property
def socket_present(self):
"""
If socket is present.
"""
return self.websocket is not None and self.websocket.sock is not None
@property
def connected(self):
"""
If connected to server.
"""
return self.websocket is not None and \
self.websocket.sock is not None and \
self.websocket.sock.connected
def find_subscription(self, identifier):
"""
Finds a subscription
by it's identifier.
"""
for subscription in self.subscriptions.values():
if subscription.identifier == identifier:
return subscription | PypiClean |
/G31_KID_design-1.0.3.tar.gz/G31_KID_design-1.0.3/src/G31_KID_design/HilbertIShape.py |
# import packages
import ezdxf
from ezdxf.addons.drawing.matplotlib import MatplotlibBackend
from ezdxf.addons.drawing import Frontend, RenderContext
import numpy as np
import os
from pathlib import Path
from matplotlib import pyplot as plt
from shapely.geometry import Polygon
from shapely.ops import unary_union
# units: micron
class HilbertIShape():
'''
Parameters (all the distances are in units of micron):
index: int, the id of the pixel
vertical_size: float, edge size of the absorber
line_width: float, width of the conductive path
coupling_capacitor_length: float, length of the coupling capacitor
coupling_capacitor_width: float, width of the coupling capacitor
coupling_connector_width: float, width of the conductive segment that goes
from the pixel to the coupling capacitor
coupling_capacitor_y_offset: float, vertical separation between the pixel
and the coupling capacitor
capacitor_finger_number: float, number of fingers of the interdigital capacitor
with decimal digits meaning a extra finger of variable length
capacitor_finger_gap: float, gap between interdigitated fingers
capacitor_finger_width: float, width of the interdigitated fingers
hilbert_order: int, hilbert order of the absorber (it is reccommended to not
exceed the 7th order for computational reasons)
absorber_separation: float, horizontal separation of the absorber from the
capacitor
See other function help for more info
'''
def __init__(self, index, vertical_size, line_width, coupling_capacitor_length, coupling_capacitor_width,
coupling_connector_width, coupling_capacitor_y_offset, capacitor_finger_number,
capacitor_finger_gap, capacitor_finger_width, hilbert_order, absorber_separation):
self.index = index
self.vertical_size = vertical_size
self.line_width = line_width
self.coupling_capacitor_length = coupling_capacitor_length
self.coupling_capacitor_width = coupling_capacitor_width
self.coupling_connector_width = coupling_connector_width
self.coupling_capacitor_y_offset = coupling_capacitor_y_offset
self.capacitor_finger_number = capacitor_finger_number
self.capacitor_finger_gap = capacitor_finger_gap
self.capacitor_finger_length = self.vertical_size-2*self.line_width-self.capacitor_finger_gap
self.capacitor_finger_width = capacitor_finger_width
self.hilbert_order = hilbert_order
self.absorber_separation = absorber_separation
self.info_string = ("\n"
"units: microns\n"
"index: {:d}\n"
"vertical_size: {:.2f}\n"
"line_width: {:.2f}\n"
"coupling_capacitor_length: {:.2f}\n"
"coupling_capacitor_width: {:.2f}\n"
"coupling_connector_width: {:.2f}\n"
"coupling_capacitor_y_offset: {:.2f}\n"
"capacitor_finger_number: {:.2f}\n"
"capacitor_finger_gap: {:.2f}\n"
"capacitor_finger_length: {:.2f}\n"
"capacitor_finger_width: {:.2f}\n"
"hilbert_order: {:d}\n"
"absorber_separation: {:.2f}\n"
"\n".format(self.index,
self.vertical_size,
self.line_width,
self.coupling_capacitor_length,
self.coupling_capacitor_width,
self.coupling_connector_width,
self.coupling_capacitor_y_offset,
self.capacitor_finger_number,
self.capacitor_finger_gap,
self.capacitor_finger_length,
self.capacitor_finger_width,
self.hilbert_order,
self.absorber_separation))
# Create a new DXF R2018 drawing
self.dxf = ezdxf.new('R2018', setup=True)
# layer names
self.pixel_layer_name = "PIXEL"
self.center_layer_name = "CENTER"
self.pixel_area_layer_name = "PIXEL_AREA"
self.absorber_area_layer_name = "ABSORBER_AREA"
self.index_layer_name = "INDEX"
# layer colors - AutoCAD Color Index - table on http://gohtx.com/acadcolors.php
self.pixel_layer_color = 255
self.pixel_area_layer_color = 140
self.absorber_area_layer_color = 150
self.center_layer_color = 120
self.index_layer_color = 254
# adds layers
self.dxf.layers.add(name=self.pixel_layer_name, color=self.pixel_layer_color)
self.dxf.layers.add(name=self.center_layer_name, color=self.center_layer_color)
self.dxf.layers.add(name=self.pixel_area_layer_name, color=self.pixel_area_layer_color)
self.dxf.layers.add(name=self.absorber_area_layer_name, color=self.absorber_area_layer_color)
self.dxf.layers.add(name=self.index_layer_name, color=self.index_layer_color)
# adds a modelspace
self.msp = self.dxf.modelspace()
# list of all the polygons that draw the whole pixel
self.__pixel_polygons__ = []
# draws a lwpolyline from a list of points
def __draw_polyline(self, points, layer):
self.msp.add_lwpolyline(points, close=True, dxfattribs={"layer": layer})
# adds a rectangle from opposite corners coordinates as a lwpolyline
def __draw_rectangle_corner_dimensions(self, corner0, x_size, y_size):
points = ( corner0,
(corner0[0]+x_size, corner0[1]),
(corner0[0]+x_size, corner0[1]+y_size),
(corner0[0], corner0[1]+y_size))
return Polygon(points)
#self.msp.add_lwpolyline(points, close=False, dxfattribs={"layer": layer})
# adds a rectangle from the center coordinates and dimensions as a lwpolyline
def __draw_rectangle_center_dimensions(self, center, x_size, y_size):
points = ((center[0]-0.5*x_size, center[1]-0.5*y_size),
(center[0]+0.5*x_size, center[1]-0.5*y_size),
(center[0]+0.5*x_size, center[1]+0.5*y_size),
(center[0]-0.5*x_size, center[1]+0.5*y_size))
return Polygon(points)
#self.msp.add_lwpolyline(points, close=False, dxfattribs={"layer": layer})
# draws the single digit coupling capacitor
def __draw_coupling_capacitor(self):
corner0 = (0, self.vertical_size+self.coupling_capacitor_y_offset)
x_size = self.coupling_capacitor_length
y_size = self.coupling_capacitor_width
self.__pixel_polygons__.append(self.__draw_rectangle_corner_dimensions(corner0, x_size, y_size))
# draws the interdigital capacitor
def __draw_capacitor(self):
finger_number_int = int(self.capacitor_finger_number)
# draw fingers
for i in range(finger_number_int):
corner0 = (i*(self.capacitor_finger_width+self.capacitor_finger_gap), ((i+1)%2)*self.capacitor_finger_gap + self.line_width)
x_size = self.capacitor_finger_width
y_size = self.capacitor_finger_length
self.__pixel_polygons__.append(self.__draw_rectangle_corner_dimensions(corner0, x_size, y_size))
# pinky finger
if self.capacitor_finger_number-finger_number_int != 0.0:
pinky_length = self.capacitor_finger_length*(self.capacitor_finger_number-finger_number_int)
corner0 = (-self.capacitor_finger_gap-self.capacitor_finger_width, self.line_width)
x_size = self.capacitor_finger_width
y_size = pinky_length
self.__pixel_polygons__.append(self.__draw_rectangle_corner_dimensions(corner0, x_size, y_size))
# draw the two horizontal lines
# upper line
corner0 = (0.0, self.vertical_size-self.line_width)
x_size = finger_number_int*self.capacitor_finger_width + (finger_number_int-1)*self.capacitor_finger_gap
y_size = self.line_width
self.__pixel_polygons__.append(self.__draw_rectangle_corner_dimensions(corner0, x_size, y_size))
# lower line
if self.capacitor_finger_number-finger_number_int != 0.0:
corner0 = (-self.capacitor_finger_gap-self.capacitor_finger_width, 0.0)
x_size = (finger_number_int+1)*self.capacitor_finger_width + finger_number_int*self.capacitor_finger_gap
y_size = self.line_width
self.__pixel_polygons__.append(self.__draw_rectangle_corner_dimensions(corner0, x_size, y_size))
else:
corner0 = (0.0, 0.0)
self.__pixel_polygons__.append(self.__draw_rectangle_corner_dimensions(corner0, x_size, y_size))
# draws the hilbert shaped absorber
def __draw_absorber(self):
axiom = "X"
X_rule = "-YF+XFX+FY-"
Y_rule = "+XF-YFY-FX+"
for i in range(self.hilbert_order):
new_axiom = ""
for word in axiom:
if word == "X":
new_axiom += X_rule
elif word == "Y":
new_axiom += Y_rule
else:
new_axiom += word
axiom = new_axiom
axiom = axiom.replace("X", "")
axiom = axiom.replace("Y", "")
axiom = axiom.replace("+-", "")
axiom = axiom.replace("-+", "")
points = []
# add an initial horizontal offset to the hilbert pattern
points.append([0.5*self.line_width, 0.0])
L_el = (self.vertical_size-self.line_width)/(2.0**self.hilbert_order-1)
step = [0, L_el]
for word in axiom:
if word == "-": # turn right
new_step = [step[1], -step[0]]
step = new_step
elif word == "+": # turn left
new_step = [-step[1], step[0]]
step = new_step
elif word == "F":
points.append(step)
# add a final horizontal offset to the hilbert pattern
points.append([-0.5*self.line_width, 0.0])
# draw the midline
x0 = self.absorber_separation+int(self.capacitor_finger_number)*self.capacitor_finger_width+int(self.capacitor_finger_number-1)*self.capacitor_finger_gap
y0 = 0.5*self.line_width
starting_point = [x0, y0]
for point in points:
center = (0.5*(2*starting_point[0]+point[0]), 0.5*(2*starting_point[1]+point[1]))
x_size = np.abs(point[0])+self.line_width
y_size = np.abs(point[1])+self.line_width
self.__pixel_polygons__.append(self.__draw_rectangle_center_dimensions(center, x_size, y_size))
starting_point = [starting_point[0]+point[0], starting_point[1]+point[1]]
# draws connection lines between components
def __connect_components(self):
# coupling capacitor connector
corner0 = (0.0, self.vertical_size)
x_size = self.coupling_connector_width
y_size = self.coupling_capacitor_y_offset
self.__pixel_polygons__.append(self.__draw_rectangle_corner_dimensions(corner0, x_size, y_size))
# absorber connectors
x0 = int(self.capacitor_finger_number)*self.capacitor_finger_width+int(self.capacitor_finger_number-1)*self.capacitor_finger_gap
corner0 = (x0, 0.0)
x_size = self.absorber_separation
y_size = self.line_width
self.__pixel_polygons__.append(self.__draw_rectangle_corner_dimensions(corner0, x_size, y_size))
corner0 = (x0, self.vertical_size-self.line_width)
self.__pixel_polygons__.append(self.__draw_rectangle_corner_dimensions(corner0, x_size, y_size))
# draws a cross over the absorber to find its center
def __draw_center(self):
# draw the diagonals to find the center
x0 = self.absorber_separation+int(self.capacitor_finger_number)*self.capacitor_finger_width+int(self.capacitor_finger_number-1)*self.capacitor_finger_gap
points = ((x0, 0.0), (x0+self.vertical_size, self.vertical_size))
self.__draw_polyline(points, self.center_layer_name)
points = ((x0, self.vertical_size), (x0+self.vertical_size, 0.0))
self.__draw_polyline(points, self.center_layer_name)
# draws a box over the whole pixel
def __draw_pixel_area(self):
cor0 = [0.0, 0.0]
cor1 = [0.0, 0.0]
finger_number_int = int(self.capacitor_finger_number)
cor1[0] = finger_number_int*self.capacitor_finger_width+(finger_number_int-1)*self.capacitor_finger_gap+self.absorber_separation+self.vertical_size
cor1[1] = self.vertical_size+self.coupling_capacitor_width+self.coupling_capacitor_y_offset
x_size = cor1[0]-cor0[0]
# check if the copuling length is longer than the pixel size
if self.coupling_capacitor_length > x_size:
cor1[0] = self.coupling_capacitor_length
x_size = cor1[0]-cor0[0]
# check if the extra finger is present
if int(self.capacitor_finger_number)-self.capacitor_finger_number != 0:
cor0[0] = -self.capacitor_finger_gap-self.capacitor_finger_width
x_size = cor1[0]-cor0[0]
y_size = cor1[1]-cor0[1]
self.__draw_polyline(self.__draw_rectangle_corner_dimensions(cor0, x_size, y_size).exterior.coords, self.pixel_area_layer_name)
# draws a box over the absorber
def __draw_absorber_area(self):
corner0 = (self.absorber_separation+int(self.capacitor_finger_number)*self.capacitor_finger_width+int(self.capacitor_finger_number-1)*self.capacitor_finger_gap, 0.0)
x_size = self.vertical_size
y_size = self.vertical_size
self.__draw_polyline(self.__draw_rectangle_corner_dimensions(corner0, x_size, y_size).exterior.coords, self.absorber_area_layer_name)
# draws the textual index on the absorber
def __draw_index(self):
position = (self.absorber_separation+int(self.capacitor_finger_number)*self.capacitor_finger_width+int(self.capacitor_finger_number-1)*self.capacitor_finger_gap, 0.0)
height = 0.35*self.vertical_size
text = str(self.index)
self.msp.add_text(text, dxfattribs={'height': height, 'layer': self.index_layer_name}).set_pos(position, align='LEFT')
# prints on screen all the parameters
def print_info(self):
'''
Prints on screen all the parameters
'''
print(self.info_string)
# saves a dxf file of the pixel
def save_dxf(self, filename):
'''
Saves a .dxf file of a single pixel
Parameters:
filename: String, the path and name of the script file (ex. 'a/b/pixel0.scr')
Output:
This function creates a .dxf file in the directory specified in the filename parameter.
The drawing has many layers:
- PIXEL: the actual layer where the KID is shown
- PIXEL_AREA: a layer where a rectangle encloses the whole pixel
- ABSORBER_AREA: a layer where a square encloses the absorber section of the KID
- CENTER: a layer where the two diagonals of the ABSORBER_AREA square are shown
- INDEX: a layer where the self.index value of the pixel is shown
The output drawing has the absorber centered to the origin
'''
# make dxf directory
filename = Path(filename)
if not os.path.exists(filename.parent):
os.makedirs(filename.parent)
# draw pixel
self.__draw_coupling_capacitor()
self.__draw_capacitor()
self.__draw_absorber()
self.__connect_components()
# merge all the polygons of the pixel layer and draw a single polyline
pixel_pl = unary_union(self.__pixel_polygons__)
self.__draw_polyline(pixel_pl.exterior.coords, self.pixel_layer_name)
# draw other layers above the pixel
self.__draw_center()
self.__draw_pixel_area()
self.__draw_absorber_area()
self.__draw_index()
# center position of the absorber
center=(-0.5*self.vertical_size-
self.absorber_separation-
int(self.capacitor_finger_number)*self.capacitor_finger_width-
int(self.capacitor_finger_number-1)*self.capacitor_finger_gap,
-0.5*self.vertical_size)
# origin on the absorber center
for entity in self.msp:
entity.transform(ezdxf.math.Matrix44.translate(center[0], center[1], 0.0))
self.dxf.saveas(filename)
# saves the figure of a pixel
def saveFig(self, filename, dpi=150):
'''
Save a figure of the drawing
Parameters:
filename: string, output path and filename of the figure
dpi: int (optional), dpi of the figure, default value: 150
'''
# check if the output directory exists
filename = Path(filename)
if not os.path.exists(filename.parent):
os.makedirs(filename.parent)
fig = plt.figure()
ax = fig.add_axes([0, 0, 1, 1])
backend = MatplotlibBackend(ax)
Frontend(RenderContext(self.dxf), backend).draw_layout(self.msp)
fig.savefig(filename, dpi=dpi) | PypiClean |
/Flask-Helper-2.0.8.tar.gz/Flask-Helper-2.0.8/README.md | # Flask-Heler
## 2.0.8
支持加载固定后缀的view 和 hook
## 2.0.7
修复无法一个方法注册两个路由问题
## 2.0.6
修复因为eventlet问题的启动问题
## 2.0.5
支持设置 cross_domain 参数,可以设置 methods, origin, headers
## 2.0.4
删除_packet_data
## 2.0.3
修复
'Flask2' object has no attribute 'session_cookie_name'
## 2.0.2
修复
TypeError: send_file() got an unexpected keyword argument 'cache_timeout'
## 2.0.1
不再支持from flask_helper import globals
## 1.2.7
utils.log支持getLogger
## 1.2.6
utils.registry exist_in 支持 add_not_exist
## 1.2.5
flask_helper.view.View支持view_context_func
## 1.2.4
user agent hook 允许从url参数中取User-Agent
utils.registry add notify and notify_callback
## 1.2.3
user agent hook 允许过滤一些path不校验
## 1.2.2
add HookRegistry
## 1.2.1
删除不必要的print
## 1.2
Flask增加log
Hook也包含log对象,默认使用app的log属性
## 1.1
Flask.run 使 eventlet
## 1.0
只允许python3
## 0.19
扩展功能(cross_domain real_ip handle_30x filter_user_agent)只设置一次
## 0.3
可从 from flask_helper import Flask2 | PypiClean |
/Impression-CMS-0.2.0.tar.gz/Impression-CMS-0.2.0/impression/__init__.py |
__author__ = 'Scott Blevins'
__email__ = '[email protected]'
__version__ = '1.0'
from flask import Flask, g
from webassets.loaders import PythonLoader as PythonAssetsLoader
from impression.controllers.main import main_controller
from impression.controllers.admin import admin_controller
from impression.controllers.file import file_controller
from impression import assets
from impression.models import db
from impression.controls import get_setting
from impression.decorators import import_user
from impression.extensions import (
cache,
assets_env,
debug_toolbar,
themes2,
login_manager
)
def before_app_request():
g.user = None
g.theme = get_setting('blog-theme', 'impression')
g.bootstrap_theme = get_setting('bootstrap-theme', 'yeti')
g.syntax_highlighting_theme = get_setting('syntax-highlighting-theme', 'monokai.css')
g.blog_title = get_setting('blog-title', 'Blog Title')
g.blog_copyright = get_setting('blog-copyright', 'Blog Copyright')
g.upload_directory = get_setting('upload-directory', 'uploads/')
g.allowed_extensions = get_setting('allowed-extensions', ['txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif', 'tiff'])
g.max_file_size = get_setting('max-file-size', 16777216) # 16 MB
g.user = import_user()
def create_app(object_name):
"""
An flask application factory, as explained here:
http://flask.pocoo.org/docs/patterns/appfactories/
Arguments:
object_name: the python path of the config object,
e.g. impression.settings.ProdConfig
env: The name of the current environment, e.g. prod or dev
"""
app = Flask(__name__)
app.config.from_object(object_name)
# initialize the cache
cache.init_app(app)
if debug_toolbar:
# initialize the debug tool bar
debug_toolbar.init_app(app)
# initialize SQLAlchemy
db.init_app(app)
login_manager.init_app(app)
# Import and register the different asset bundles
assets_env.init_app(app)
assets_loader = PythonAssetsLoader(assets)
for name, bundle in assets_loader.load_bundles().items():
assets_env.register(name, bundle)
# register our blueprints
main_controller.before_request(before_app_request)
app.register_blueprint(main_controller)
admin_controller.before_request(before_app_request)
app.register_blueprint(admin_controller)
file_controller.before_request(before_app_request)
app.register_blueprint(file_controller)
# Add theme support
# themes2.init_themes(app, app_identifier="...")
themes2.init_themes(app, app_identifier='impression')
return app | PypiClean |
/Adafruit_Blinka-8.20.1-py3-none-any.whl/adafruit_blinka/board/beagleboard/beaglebone_ai.py | """Pin definitions for the Beaglebone Black."""
from adafruit_blinka.microcontroller.dra74x import pin
# initial pins, to mimic bonescript demo
# BeagleBone Black
# P8_1 = DGND # DGND
# P8_2 = DGND # DGND
P8_3 = pin.P8_3 # GPIO1_6 - GPIO_38
P8_4 = pin.P8_4 # GPIO1_7 - GPIO_39
P8_5 = pin.P8_5 # GPIO1_2 - GPIO_34
P8_6 = pin.P8_6 # GPIO1_3 - GPIO_35
P8_7 = pin.P8_7 # TIMER4 - GPIO_66
P8_8 = pin.P8_8 # TIMER7 - GPIO_67
P8_9 = pin.P8_9 # TIMER5 - GPIO_69
P8_10 = pin.P8_10 # TIMER6 - GPIO_68
P8_11 = pin.P8_11 # GPIO1_13 - GPIO_45
P8_12 = pin.P8_12 # GPIO1_12 - GPIO_44
P8_13 = pin.P8_13 # EHRPWM2B - GPIO_23
P8_14 = pin.P8_14 # GPIO0_26 - GPIO_26
P8_15 = pin.P8_15 # GPIO1_15 - GPIO_47
P8_16 = pin.P8_16 # GPIO1_14 - GPIO_46
P8_17 = pin.P8_17 # GPIO0_27 - GPIO_27
P8_18 = pin.P8_18 # GPIO2_1 - GPIO_65
P8_19 = pin.P8_19 # EHRPWM2A - GPIO_22
P8_20 = pin.P8_20 # GPIO1_31 - GPIO_63
P8_21 = pin.P8_21 # GPIO1_30 - GPIO_62
P8_22 = pin.P8_22 # GPIO1_5 - GPIO_37
P8_23 = pin.P8_23 # GPIO1_4 - GPIO_36
P8_24 = pin.P8_24 # GPIO1_1 - GPIO_33
P8_25 = pin.P8_25 # GPIO1_0 - GPIO_32
P8_26 = pin.P8_26 # GPIO1_29 - GPIO_61
P8_27 = pin.P8_27 # GPIO2_22 - GPIO_86
P8_28 = pin.P8_28 # GPIO2_24 - GPIO_88
P8_29 = pin.P8_29 # GPIO2_23 - GPIO_87
P8_30 = pin.P8_30 # GPIO2_25 - GPIO_89
P8_31 = pin.P8_31 # UART5_CTSN - GPIO_10
P8_32 = pin.P8_32 # UART5_RTSN - GPIO_11
P8_33 = pin.P8_33 # UART4_RTSN - GPIO_9
P8_34 = pin.P8_34 # UART3_RTSN - GPIO_81
P8_35 = pin.P8_35 # UART4_CTSN - GPIO_8
P8_36 = pin.P8_36 # UART3_CTSN - GPIO_80
P8_37 = pin.P8_37 # UART5_TXD - GPIO_78
P8_38 = pin.P8_38 # UART5_RXD - GPIO_79
P8_39 = pin.P8_39 # GPIO2_12 - GPIO_76
P8_40 = pin.P8_40 # GPIO2_13 - GPIO_77
P8_41 = pin.P8_41 # GPIO2_10 - GPIO_74
P8_42 = pin.P8_42 # GPIO2_11 - GPIO_75
P8_43 = pin.P8_43 # GPIO2_8 - GPIO_72
P8_44 = pin.P8_44 # GPIO2_9 - GPIO_73
P8_45 = pin.P8_45 # GPIO2_6 - GPIO_70
P8_46 = pin.P8_46 # GPIO2_7 - GPIO_71
# P9_1 = DGND # DGND
# P9_2 = DGND # DGND
# P9_3 = VDD_3V3 # VDD_3V3
# P9_4 = VDD_3V3 # VDD_3V3
# P9_5 = VDD_5V # VDD_5V
# P9_6 = VDD_5V # VDD_5V
# P9_7 = SYS_5V # SYS_5V
# P9_8 = SYS_5V # SYS_5V
# P9_9 = PWR_BUT # PWR_BUT
# P9_10 = SYS_RESETN # SYS_RESETn
P9_11 = pin.P9_11 # UART4_RXD - GPIO_30
P9_12 = pin.P9_12 # GPIO1_28 - GPIO_60
P9_13 = pin.P9_13 # UART4_TXD - GPIO_31
P9_14 = pin.P9_14 # EHRPWM1A - GPIO_50
P9_15 = pin.P9_15 # GPIO1_16 - GPIO_48
P9_16 = pin.P9_16 # EHRPWM1B - GPIO_51
P9_17 = pin.P9_17 # I2C1_SCL - GPIO_5
P9_18 = pin.P9_18 # I2C1_SDA - GPIO_4
P9_19 = pin.P9_19 # I2C2_SCL - GPIO_13
P9_20 = pin.P9_20 # I2C2_SDA - GPIO_12
P9_21 = pin.P9_21 # UART2_TXD - GPIO_3
P9_22 = pin.P9_22 # UART2_RXD - GPIO_2
P9_23 = pin.P9_23 # GPIO1_17 - GPIO_49
P9_24 = pin.P9_24 # UART1_TXD - GPIO_15
P9_25 = pin.P9_25 # GPIO3_21 - GPIO_117
P9_26 = pin.P9_26 # UART1_RXD - GPIO_14
P9_27 = pin.P9_27 # GPIO3_19 - GPIO_115
P9_28 = pin.P9_28 # SPI1_CS0 - GPIO_113
P9_29 = pin.P9_29 # SPI1_D0 - GPIO_111
P9_30 = pin.P9_30 # SPI1_D1 - GPIO_112
P9_31 = pin.P9_31 # SPI1_SCLK - GPIO_110
# P9_32 = VDD_ADC # VDD_ADC
# P9_33 = AIN4 # AIN4
# P9_34 = GNDA_ADC # GNDA_ADC
# P9_35 = AIN6 # AIN6
# P9_36 = AIN5 # AIN5
# P9_37 = AIN2 # AIN2
# P9_38 = AIN3 # AIN3
# P9_39 = AIN0 # AIN0
# P9_40 = AIN1 # AIN1
P9_41 = pin.P9_41 # CLKOUT2 - GPIO_20
P9_42 = pin.P9_42 # GPIO0_7 - GPIO_7
# P9_43 = DGND # DGND
# P9_44 = DGND # DGND
# P9_45 = DGND # DGND
# P9_46 = DGND # DGND
# common to all beagles
LED_USR0 = pin.USR0
LED_USR1 = pin.USR1
LED_USR2 = pin.USR2
LED_USR3 = pin.USR3
LED_USR4 = pin.USR4
# I2C and SPI pins from:
# src/adafruit_blinka/board/raspi_40pin.py
# SDA = pin.SDA
# SCL = pin.SCL
# CE1 = pin.D7
# CE0 = pin.D8
# MISO = pin.D9
# MOSI = pin.D10
# SCLK = pin.D11
# SCK = pin.D11
# TXD = pin.D14
# RXD = pin.D15
# MISO_1 = pin.D19
# MOSI_1 = pin.D20
# SCLK_1 = pin.D21
# SCK_1 = pin.D21
SDA = pin.I2C4_SDA # P9_19
SCL = pin.I2C4_SCL # P9_20
# Pins for SPI
#
# To enable SPI and an additional I2C port, add the following line to /boot/uEnv.txt:
# dtb=am5729-beagleboneai-roboticscape.dtb
#
# You can verify the dtb file exists by checking the /boot/dtbs/{kernel_version}/ folder
#
CE0 = pin.SPI1_CS0 # P9_28
CE1 = pin.SPI1_CS1 # P9_42
MOSI = pin.SPI1_D1 # P9_29
MISO = pin.SPI1_D0 # P9_30
SCLK = pin.SPI1_SCLK # P9_31
# CircuitPython naming convention for SPI Clock
SCK = SCLK | PypiClean |
/DLMS_SPODES-0.40.11.tar.gz/DLMS_SPODES-0.40.11/src/DLMS_SPODES/configure.py | from .types import cst
from .cosem_interface_classes import cosem_interface_class as ic, collection
from . import ITE_exceptions as exc
from .cosem_interface_classes.overview import ClassID
def get_attr_index(obj: ic.COSEMInterfaceClasses) -> list[int]:
""" return attribute indexes for reading keep configuration for SPODES3 only"""
match obj.CLASS_ID, obj.logical_name:
case ClassID.DATA, _: return [2]
case ClassID.REGISTER, _: return [2, 3]
case ClassID.EXT_REGISTER, _: return [3]
case ClassID.PROFILE_GENERIC, cst.LogicalName(1, _, 94, 7, 1 | 2 | 3 | 4): return [6, 3, 2, 4, 5, 8]
case ClassID.PROFILE_GENERIC, _: return [6, 3, 4, 5, 8]
case ClassID.CLOCK, _: return [8, 9]
case ClassID.SCRIPT_TABLE, _: return [2]
case ClassID.SCHEDULE, _: return [2]
case ClassID.SPECIAL_DAYS_TABLE, _: return []
case ClassID.ACTIVITY_CALENDAR, _: return []
case ClassID.SINGLE_ACTION_SCHEDULE, _: return [2, 3, 4]
case ClassID.ASSOCIATION_LN_CLASS, cst.LogicalName(0, _, 40, 0, 0): return []
case ClassID.ASSOCIATION_LN_CLASS, _: return [4, 5, 7]
case ClassID.IEC_HDLC_SETUP, _: return [2, 3, 4, 5, 6, 7]
case ClassID.DISCONNECT_CONTROL, _: return [3, 4]
case ClassID.LIMITER, _: return [2, 11]
case ClassID.MODEM_CONFIGURATION, _: return [2]
case ClassID.IMAGE_TRANSFER, _: return [2]
case ClassID.GPRS_MODEM_SETUP, _: return [2]
case ClassID.GSM_DIAGNOSTIC, _: return []
case ClassID.CLIENT_SETUP, _: return [] # not need only for client
case ClassID.TCP_UDP_SETUP, _: return [2, 3, 4, 5, 6]
case ClassID.IPV4_SETUP, _: return []
case ClassID.ARBITRATOR, _: return [2]
case ClassID.SECURITY_SETUP, _: return [2, 3, 5]
case ClassID.REGISTER_MONITOR, _: return [3, 2, 4]
case ClassID.DEMAND_REGISTER, _: return [4, 2, 3, 5, 6, 7, 8, 9]
case _: raise exc.NoObject(F"Configuring. Not found {obj} with {obj.CLASS_ID} for read attributes")
empty_dict = dict()
def get_saved_parameters(obj: ic.COSEMInterfaceClasses) -> dict[int, int]:
""" return attribute indexes for saved keep configuration dictionary(attr_index: 0-for value 1-for type, ...)"""
ln = obj.logical_name
match obj.CLASS_ID, obj.logical_name:
case collection.Data.CLASS_ID, cst.LogicalName(0, 0, 96, 1, 1 | 3 | 6 | 8) | cst.LogicalName(1, 0, 0, 8, 4): return {2: 0}
case collection.Data.CLASS_ID, cst.LogicalName(0, 0, 96, 11, 0 | 1 | 2 | 3 | 4 | 5 | 6 | 7 | 8): return empty_dict
case collection.Data.CLASS_ID, _: return {2: 1}
case collection.Register.CLASS_ID, cst.LogicalName(1, 0, 0, 6, 0 | 1 | 2 | 3 | 4): return {2: 1, 3: 1}
case collection.Register.CLASS_ID, _: return {2: 1, 3: 0}
case collection.ExtendedRegister.CLASS_ID, _: return {2: 1, 3: 0}
case collection.ProfileGenericVer1.CLASS_ID, cst.LogicalName(1, _, 94, 7, 1 | 2 | 3 | 4): return {6: 0, 3: 0, 2: 0, 4: 0, 5: 0, 8: 0}
case collection.ProfileGenericVer1.CLASS_ID, _: return {6: 0, 3: 0, 4: 0, 5: 0, 8: 0}
case collection.Clock.CLASS_ID, _: return {8: 0, 9: 0}
case collection.ScriptTable.CLASS_ID, _: return {2: 0}
case collection.Schedule.CLASS_ID, _: return {2: 0}
case collection.SpecialDaysTable.CLASS_ID, _: return empty_dict
case collection.ActivityCalendar.CLASS_ID, _: return empty_dict
case collection.SingleActionSchedule.CLASS_ID, _: return {2: 0, 3: 0, 4: 0}
case collection.AssociationLNVer0.CLASS_ID, cst.LogicalName(0, 0, 40, 0, 0): return {3: 0}
case collection.AssociationLNVer0.CLASS_ID, _: return {2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 9: 0}
case collection.IECHDLCSetupVer1.CLASS_ID, _: return {2: 0, 3: 0, 4: 0, 5: 0, 6: 0, 7: 0, 8: 0}
case collection.DisconnectControl.CLASS_ID, _: return {3: 0, 4: 0}
case collection.Limiter.CLASS_ID, _: return {2: 0, 11: 0}
case collection.PSTNModemConfiguration.CLASS_ID, _: return {2: 0}
case collection.ImageTransfer.CLASS_ID, _: return {2: 0}
case collection.GPRSModemSetup.CLASS_ID, _: return {2: 0}
case collection.GSMDiagnosticVer0.CLASS_ID, _: return empty_dict
case collection.ClientSetup.CLASS_ID, _: return empty_dict # not need only for client
case collection.TCPUDPSetup.CLASS_ID, _: return {2: 0, 3: 0, 4: 0, 5: 0, 6: 0}
case collection.IPv4Setup.CLASS_ID, _: return empty_dict
case collection.Arbitrator.CLASS_ID, _: return {2: 0}
case collection.SecuritySetupVer0.CLASS_ID, _: return {2: 0, 3: 0, 5: 0}
case collection.RegisterMonitor.CLASS_ID, _: return {3: 0, 2: 0, 4: 0}
case ClassID.DEMAND_REGISTER, _: return {2: 1, 3: 1, 4: 0, 5: 1, 8: 0, 9: 0}
case _: raise exc.NoObject(F'Save configure. Not found {obj} with {obj.CLASS_ID} for read attributes')
if __name__ == '__main__':
a = collection.AssociationLNVer0('0.0.1.0.0.255')
print(a)
b = get_attr_index(a)
print(b) | PypiClean |
/Mastodon.py-1.8.1.tar.gz/Mastodon.py-1.8.1/docs/06_accounts.rst | Accounts, relationships and lists
=================================
.. py:module:: mastodon
.. py:class: Mastodon
Accounts
--------
These functions allow you to get information about accounts and associated data as well as update that data - profile data (incuding pinned statuses and endorsements) for the logged in users account, and notes for everyone else
Reading
~~~~~~~~
.. automethod:: Mastodon.account_verify_credentials
.. automethod:: Mastodon.me
.. automethod:: Mastodon.account
.. automethod:: Mastodon.account_search
.. automethod:: Mastodon.account_lookup
.. automethod:: Mastodon.featured_tags
.. automethod:: Mastodon.featured_tag_suggestions
.. automethod:: Mastodon.account_featured_tags
.. automethod:: Mastodon.endorsements
.. automethod:: Mastodon.account_statuses
.. automethod:: Mastodon.account_familiar_followers
.. automethod:: Mastodon.account_lists
Writing
~~~~~~~
.. automethod:: Mastodon.account_update_credentials
.. automethod:: Mastodon.account_pin
.. automethod:: Mastodon.account_unpin
.. automethod:: Mastodon.account_note_set
.. automethod:: Mastodon.featured_tag_create
.. automethod:: Mastodon.featured_tag_delete
.. _status_pin():
.. automethod:: Mastodon.status_pin
.. _status_unpin():
.. automethod:: Mastodon.status_unpin
Following and followers
-----------------------
These functions allow you to get information about the logged in users followers and users that the logged in users follows as well as follow requests and follow suggestions, and to
manage that data - most importantly, follow and unfollow users.
Reading
~~~~~~~
.. automethod:: Mastodon.account_followers
.. automethod:: Mastodon.account_following
.. automethod:: Mastodon.account_relationships
.. automethod:: Mastodon.follows
.. automethod:: Mastodon.follow_requests
.. automethod:: Mastodon.suggestions
Writing
~~~~~~~
.. _account_follow():
.. automethod:: Mastodon.account_follow
.. automethod:: Mastodon.account_unfollow
.. automethod:: Mastodon.follow_request_authorize
.. automethod:: Mastodon.follow_request_reject
.. automethod:: Mastodon.suggestion_delete
Mutes and blocks
----------------
These functions allow you to get information about accounts and domains that are muted or blocked by the logged in user, and to block and mute users and domains
Reading
~~~~~~~
.. automethod:: Mastodon.mutes
.. automethod:: Mastodon.blocks
.. automethod:: Mastodon.domain_blocks
Writing
~~~~~~~
.. automethod:: Mastodon.account_mute
.. automethod:: Mastodon.account_unmute
.. automethod:: Mastodon.account_block
.. automethod:: Mastodon.account_unblock
.. automethod:: Mastodon.account_remove_from_followers
.. automethod:: Mastodon.domain_block
.. automethod:: Mastodon.domain_unblock
Lists
-----
These functions allow you to view information about lists as well as to create and update them.
By default, the maximum number of lists for a user is 50.
Reading
~~~~~~~
.. automethod:: Mastodon.lists
.. automethod:: Mastodon.list
.. automethod:: Mastodon.list_accounts
Writing
~~~~~~~
.. automethod:: Mastodon.list_create
.. automethod:: Mastodon.list_update
.. automethod:: Mastodon.list_delete
.. automethod:: Mastodon.list_accounts_add
.. automethod:: Mastodon.list_accounts_delete
| PypiClean |
/NIRCAM_Gsim-1.60.tar.gz/NIRCAM_Gsim-1.60/NIRCAM_Gsim/disperse/disperse.py | from scipy.interpolate import interp1d
import numpy as np
from ..polyclip.polyclip import polyclip
def dispersed_pixel(x0s,y0s,f0,order,C,ID,oversample_factor=2,extrapolate_SED=False,xoffset=0,yoffset=0):
"""This function take a list of pixels and disperses them using the information contained
in a GRISMCONF file, and returns a list of pixel and fluxes.
Parameters
----------
x0s: list
A list of n x-coordinates.
y0s: list
A list of n y-coordinates.
f0: list
A list of n flux (flam) for each of the pixels contained in x0s,y0s.
order: str
The name of the spectral order to disperse.
ID: int
The ID of the object this is for.
oversample_factor: int
The amount of oversampling required above that of the input spectra or natural dispersion, whichever is smaller. Default=2.
extrapolate_SED: bol
Whether to allow for the SED of the object to be extrapolated when it does not fully cover the
needed wavelength range. Default if False.
xoffset int
A pixel offset to apply when computing the dispersion (for padded images for example)
yoffset int
A pixel offset to apply when computing the dispersion (for padded images for example)
Output
------
xs: array
A list of x-coordinates dispersed pixel
ys: array
A list of y-coordinates dispersed pixel
areas: array
A list of the areas of the incident pixel thatm when dispersed falls on the dispersed pixel
lams: array
A list of the wavelength of each of the dispersed pixels
counts: array
A list of counts for each of the dispersed pixels
ID: array
A list containing the ID. Returned for bookkeeping convenience.
"""
if len(f0[0])>1:
#print("f0:",f0,len(f0[0]),len(f0[1]))
if extrapolate_SED==False:
f = interp1d(f0[0],f0[1],fill_value=0.,bounds_error=False)
else:
f = interp1d(f0[0],f0[1],fill_value="extrapolate",bounds_error=False)
else:
f = lambda x: f0[1][0]
s = C.SENS[order]
x0 = np.mean(x0s)
y0 = np.mean(y0s)
dx0s = [t-x0 for t in x0s]
dy0s = [t-y0 for t in y0s]
# Figuring out a few things about size of order, dispersion and wavelengths to use
wmin = C.WRANGE[order][0]
wmax = C.WRANGE[order][1]
t0 = C.INVDISPL(order,x0+xoffset,y0+yoffset,wmin)
t1 = C.INVDISPL(order,x0+xoffset,y0+yoffset,wmax)
dx0 = C.DISPX(order,x0+xoffset,y0+yoffset,t0) - C.DISPX(order,x0+xoffset,y0+yoffset,t1)
dx1 = C.DISPY(order,x0+xoffset,y0+yoffset,t0) - C.DISPY(order,x0+xoffset,y0+yoffset,t1)
dw = np.abs((wmax-wmin)/(dx1-dx0))
# Use a natural wavelength scale or the wavelength scale of the input SED/spectrum, whichever is smaller, divided by oversampling requested
if len(f0[0])>1:
input_dlam = np.median(f0[0][1:]-f0[0][:-1])
if input_dlam<dw:
dlam = input_dlam/oversample_factor
else:
dlam = dw/oversample_factor
else:
dlam = dw/oversample_factor
lambdas = np.arange(wmin,wmax+dlam,dlam)
dS = C.INVDISPL(order,x0+xoffset,y0+yoffset,lambdas)
m = len(lambdas)
dXs = C.DISPX(order,x0+xoffset,y0+yoffset,dS)
dYs = C.DISPY(order,x0+xoffset,y0+yoffset,dS)
x0s = x0 + dXs
y0s = y0 + dYs
padding = 1
l = x0s.astype(np.int32) - padding
r = x0s.astype(np.int32) + padding
b = y0s.astype(np.int32) - padding
t = y0s.astype(np.int32) + padding
px = np.array([x0s+dx0s[0],x0s+dx0s[1],x0s+dx0s[2],x0s+dx0s[3]],dtype=np.float32).transpose().ravel()
py = np.array([y0s+dy0s[0],y0s+dy0s[1],y0s+dy0s[2],y0s+dy0s[3]],dtype=np.float32).transpose().ravel()
lams = np.array([[ll,0,0,0] for ll in lambdas],dtype=np.float32).transpose().ravel()
poly_inds = np.arange(0,(m+1)*4,4,dtype=np.int32)
n_poly = len(x0s)
n = len(lams) # number of pixels we are "dropping", e.g. number of wav bins
n = n*2
index = np.zeros(n, dtype=np.int32)
x = np.zeros(n, dtype=np.int32)
y = np.zeros(n, dtype=np.int32)
areas = np.zeros(n, dtype=np.float32)
nclip_poly = np.array([0],np.int32)
polyclip.polyclip_multi4(l,r,b,t,px,py,n_poly,poly_inds,x,y,nclip_poly,areas,index)
xs = x[0:nclip_poly[0]]
ys = y[0:nclip_poly[0]]
areas = areas[0:nclip_poly[0]]
lams = np.take(lambdas,index)[0:len(xs)]
counts = f(lams)*areas*s(lams)*np.abs(dlam) * 10000. # factor of 10000 because dlam is in micron and we want Angstrom with to apply f(lams)
vg = (xs>=0) & (ys>=0)
if len(xs[vg])==0:
return np.array([0]),np.array([0]),np.array([0]),np.array([0]),np.array([0]),0
return xs[vg], ys[vg], areas[vg], lams[vg], counts[vg], ID | PypiClean |
/Djblets-3.3.tar.gz/Djblets-3.3/djblets/static/lib/js/selectize-0.12.4.js | (function(root, factory) {
if (typeof define === 'function' && define.amd) {
define('sifter', factory);
} else if (typeof exports === 'object') {
module.exports = factory();
} else {
root.Sifter = factory();
}
}(this, function() {
/**
* Textually searches arrays and hashes of objects
* by property (or multiple properties). Designed
* specifically for autocomplete.
*
* @constructor
* @param {array|object} items
* @param {object} items
*/
var Sifter = function(items, settings) {
this.items = items;
this.settings = settings || {diacritics: true};
};
/**
* Splits a search string into an array of individual
* regexps to be used to match results.
*
* @param {string} query
* @returns {array}
*/
Sifter.prototype.tokenize = function(query) {
query = trim(String(query || '').toLowerCase());
if (!query || !query.length) return [];
var i, n, regex, letter;
var tokens = [];
var words = query.split(/ +/);
for (i = 0, n = words.length; i < n; i++) {
regex = escape_regex(words[i]);
if (this.settings.diacritics) {
for (letter in DIACRITICS) {
if (DIACRITICS.hasOwnProperty(letter)) {
regex = regex.replace(new RegExp(letter, 'g'), DIACRITICS[letter]);
}
}
}
tokens.push({
string : words[i],
regex : new RegExp(regex, 'i')
});
}
return tokens;
};
/**
* Iterates over arrays and hashes.
*
* ```
* this.iterator(this.items, function(item, id) {
* // invoked for each item
* });
* ```
*
* @param {array|object} object
*/
Sifter.prototype.iterator = function(object, callback) {
var iterator;
if (is_array(object)) {
iterator = Array.prototype.forEach || function(callback) {
for (var i = 0, n = this.length; i < n; i++) {
callback(this[i], i, this);
}
};
} else {
iterator = function(callback) {
for (var key in this) {
if (this.hasOwnProperty(key)) {
callback(this[key], key, this);
}
}
};
}
iterator.apply(object, [callback]);
};
/**
* Returns a function to be used to score individual results.
*
* Good matches will have a higher score than poor matches.
* If an item is not a match, 0 will be returned by the function.
*
* @param {object|string} search
* @param {object} options (optional)
* @returns {function}
*/
Sifter.prototype.getScoreFunction = function(search, options) {
var self, fields, tokens, token_count, nesting;
self = this;
search = self.prepareSearch(search, options);
tokens = search.tokens;
fields = search.options.fields;
token_count = tokens.length;
nesting = search.options.nesting;
/**
* Calculates how close of a match the
* given value is against a search token.
*
* @param {mixed} value
* @param {object} token
* @return {number}
*/
var scoreValue = function(value, token) {
var score, pos;
if (!value) return 0;
value = String(value || '');
pos = value.search(token.regex);
if (pos === -1) return 0;
score = token.string.length / value.length;
if (pos === 0) score += 0.5;
return score;
};
/**
* Calculates the score of an object
* against the search query.
*
* @param {object} token
* @param {object} data
* @return {number}
*/
var scoreObject = (function() {
var field_count = fields.length;
if (!field_count) {
return function() { return 0; };
}
if (field_count === 1) {
return function(token, data) {
return scoreValue(getattr(data, fields[0], nesting), token);
};
}
return function(token, data) {
for (var i = 0, sum = 0; i < field_count; i++) {
sum += scoreValue(getattr(data, fields[i], nesting), token);
}
return sum / field_count;
};
})();
if (!token_count) {
return function() { return 0; };
}
if (token_count === 1) {
return function(data) {
return scoreObject(tokens[0], data);
};
}
if (search.options.conjunction === 'and') {
return function(data) {
var score;
for (var i = 0, sum = 0; i < token_count; i++) {
score = scoreObject(tokens[i], data);
if (score <= 0) return 0;
sum += score;
}
return sum / token_count;
};
} else {
return function(data) {
for (var i = 0, sum = 0; i < token_count; i++) {
sum += scoreObject(tokens[i], data);
}
return sum / token_count;
};
}
};
/**
* Returns a function that can be used to compare two
* results, for sorting purposes. If no sorting should
* be performed, `null` will be returned.
*
* @param {string|object} search
* @param {object} options
* @return function(a,b)
*/
Sifter.prototype.getSortFunction = function(search, options) {
var i, n, self, field, fields, fields_count, multiplier, multipliers, get_field, implicit_score, sort;
self = this;
search = self.prepareSearch(search, options);
sort = (!search.query && options.sort_empty) || options.sort;
/**
* Fetches the specified sort field value
* from a search result item.
*
* @param {string} name
* @param {object} result
* @return {mixed}
*/
get_field = function(name, result) {
if (name === '$score') return result.score;
return getattr(self.items[result.id], name, options.nesting);
};
// parse options
fields = [];
if (sort) {
for (i = 0, n = sort.length; i < n; i++) {
if (search.query || sort[i].field !== '$score') {
fields.push(sort[i]);
}
}
}
// the "$score" field is implied to be the primary
// sort field, unless it's manually specified
if (search.query) {
implicit_score = true;
for (i = 0, n = fields.length; i < n; i++) {
if (fields[i].field === '$score') {
implicit_score = false;
break;
}
}
if (implicit_score) {
fields.unshift({field: '$score', direction: 'desc'});
}
} else {
for (i = 0, n = fields.length; i < n; i++) {
if (fields[i].field === '$score') {
fields.splice(i, 1);
break;
}
}
}
multipliers = [];
for (i = 0, n = fields.length; i < n; i++) {
multipliers.push(fields[i].direction === 'desc' ? -1 : 1);
}
// build function
fields_count = fields.length;
if (!fields_count) {
return null;
} else if (fields_count === 1) {
field = fields[0].field;
multiplier = multipliers[0];
return function(a, b) {
return multiplier * cmp(
get_field(field, a),
get_field(field, b)
);
};
} else {
return function(a, b) {
var i, result, a_value, b_value, field;
for (i = 0; i < fields_count; i++) {
field = fields[i].field;
result = multipliers[i] * cmp(
get_field(field, a),
get_field(field, b)
);
if (result) return result;
}
return 0;
};
}
};
/**
* Parses a search query and returns an object
* with tokens and fields ready to be populated
* with results.
*
* @param {string} query
* @param {object} options
* @returns {object}
*/
Sifter.prototype.prepareSearch = function(query, options) {
if (typeof query === 'object') return query;
options = extend({}, options);
var option_fields = options.fields;
var option_sort = options.sort;
var option_sort_empty = options.sort_empty;
if (option_fields && !is_array(option_fields)) options.fields = [option_fields];
if (option_sort && !is_array(option_sort)) options.sort = [option_sort];
if (option_sort_empty && !is_array(option_sort_empty)) options.sort_empty = [option_sort_empty];
return {
options : options,
query : String(query || '').toLowerCase(),
tokens : this.tokenize(query),
total : 0,
items : []
};
};
/**
* Searches through all items and returns a sorted array of matches.
*
* The `options` parameter can contain:
*
* - fields {string|array}
* - sort {array}
* - score {function}
* - filter {bool}
* - limit {integer}
*
* Returns an object containing:
*
* - options {object}
* - query {string}
* - tokens {array}
* - total {int}
* - items {array}
*
* @param {string} query
* @param {object} options
* @returns {object}
*/
Sifter.prototype.search = function(query, options) {
var self = this, value, score, search, calculateScore;
var fn_sort;
var fn_score;
search = this.prepareSearch(query, options);
options = search.options;
query = search.query;
// generate result scoring function
fn_score = options.score || self.getScoreFunction(search);
// perform search and sort
if (query.length) {
self.iterator(self.items, function(item, id) {
score = fn_score(item);
if (options.filter === false || score > 0) {
search.items.push({'score': score, 'id': id});
}
});
} else {
self.iterator(self.items, function(item, id) {
search.items.push({'score': 1, 'id': id});
});
}
fn_sort = self.getSortFunction(search, options);
if (fn_sort) search.items.sort(fn_sort);
// apply limits
search.total = search.items.length;
if (typeof options.limit === 'number') {
search.items = search.items.slice(0, options.limit);
}
return search;
};
// utilities
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
var cmp = function(a, b) {
if (typeof a === 'number' && typeof b === 'number') {
return a > b ? 1 : (a < b ? -1 : 0);
}
a = asciifold(String(a || ''));
b = asciifold(String(b || ''));
if (a > b) return 1;
if (b > a) return -1;
return 0;
};
var extend = function(a, b) {
var i, n, k, object;
for (i = 1, n = arguments.length; i < n; i++) {
object = arguments[i];
if (!object) continue;
for (k in object) {
if (object.hasOwnProperty(k)) {
a[k] = object[k];
}
}
}
return a;
};
/**
* A property getter resolving dot-notation
* @param {Object} obj The root object to fetch property on
* @param {String} name The optionally dotted property name to fetch
* @param {Boolean} nesting Handle nesting or not
* @return {Object} The resolved property value
*/
var getattr = function(obj, name, nesting) {
if (!obj || !name) return;
if (!nesting) return obj[name];
var names = name.split(".");
while(names.length && (obj = obj[names.shift()]));
return obj;
};
var trim = function(str) {
return (str + '').replace(/^\s+|\s+$|/g, '');
};
var escape_regex = function(str) {
return (str + '').replace(/([.?*+^$[\]\\(){}|-])/g, '\\$1');
};
var is_array = Array.isArray || (typeof $ !== 'undefined' && $.isArray) || function(object) {
return Object.prototype.toString.call(object) === '[object Array]';
};
var DIACRITICS = {
'a': '[aḀḁĂăÂâǍǎȺⱥȦȧẠạÄäÀàÁáĀāÃãÅåąĄÃąĄ]',
'b': '[b␢βΒB฿𐌁ᛒ]',
'c': '[cĆćĈĉČčĊċC̄c̄ÇçḈḉȻȼƇƈɕᴄCc]',
'd': '[dĎďḊḋḐḑḌḍḒḓḎḏĐđD̦d̦ƉɖƊɗƋƌᵭᶁᶑȡᴅDdð]',
'e': '[eÉéÈèÊêḘḙĚěĔĕẼẽḚḛẺẻĖėËëĒēȨȩĘęᶒɆɇȄȅẾếỀềỄễỂểḜḝḖḗḔḕȆȇẸẹỆệⱸᴇEeɘǝƏƐε]',
'f': '[fƑƒḞḟ]',
'g': '[gɢ₲ǤǥĜĝĞğĢģƓɠĠġ]',
'h': '[hĤĥĦħḨḩẖẖḤḥḢḣɦʰǶƕ]',
'i': '[iÍíÌìĬĭÎîǏǐÏïḮḯĨĩĮįĪīỈỉȈȉȊȋỊịḬḭƗɨɨ̆ᵻᶖİiIıɪIi]',
'j': '[jȷĴĵɈɉʝɟʲ]',
'k': '[kƘƙꝀꝁḰḱǨǩḲḳḴḵκϰ₭]',
'l': '[lŁłĽľĻļĹĺḶḷḸḹḼḽḺḻĿŀȽƚⱠⱡⱢɫɬᶅɭȴʟLl]',
'n': '[nŃńǸǹŇňÑñṄṅŅņṆṇṊṋṈṉN̈n̈ƝɲȠƞᵰᶇɳȵɴNnŊŋ]',
'o': '[oØøÖöÓóÒòÔôǑǒŐőŎŏȮȯỌọƟɵƠơỎỏŌōÕõǪǫȌȍՕօ]',
'p': '[pṔṕṖṗⱣᵽƤƥᵱ]',
'q': '[qꝖꝗʠɊɋꝘꝙq̃]',
'r': '[rŔŕɌɍŘřŖŗṘṙȐȑȒȓṚṛⱤɽ]',
's': '[sŚśṠṡṢṣꞨꞩŜŝŠšŞşȘșS̈s̈]',
't': '[tŤťṪṫŢţṬṭƮʈȚțṰṱṮṯƬƭ]',
'u': '[uŬŭɄʉỤụÜüÚúÙùÛûǓǔŰűŬŭƯưỦủŪūŨũŲųȔȕ∪]',
'v': '[vṼṽṾṿƲʋꝞꝟⱱʋ]',
'w': '[wẂẃẀẁŴŵẄẅẆẇẈẉ]',
'x': '[xẌẍẊẋχ]',
'y': '[yÝýỲỳŶŷŸÿỸỹẎẏỴỵɎɏƳƴ]',
'z': '[zŹźẐẑŽžŻżẒẓẔẕƵƶ]'
};
var asciifold = (function() {
var i, n, k, chunk;
var foreignletters = '';
var lookup = {};
for (k in DIACRITICS) {
if (DIACRITICS.hasOwnProperty(k)) {
chunk = DIACRITICS[k].substring(2, DIACRITICS[k].length - 1);
foreignletters += chunk;
for (i = 0, n = chunk.length; i < n; i++) {
lookup[chunk.charAt(i)] = k;
}
}
}
var regexp = new RegExp('[' + foreignletters + ']', 'g');
return function(str) {
return str.replace(regexp, function(foreignletter) {
return lookup[foreignletter];
}).toLowerCase();
};
})();
// export
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
return Sifter;
}));
/**
* microplugin.js
* Copyright (c) 2013 Brian Reavis & contributors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy of the License at:
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
* ANY KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*
* @author Brian Reavis <[email protected]>
*/
(function(root, factory) {
if (typeof define === 'function' && define.amd) {
define('microplugin', factory);
} else if (typeof exports === 'object') {
module.exports = factory();
} else {
root.MicroPlugin = factory();
}
}(this, function() {
var MicroPlugin = {};
MicroPlugin.mixin = function(Interface) {
Interface.plugins = {};
/**
* Initializes the listed plugins (with options).
* Acceptable formats:
*
* List (without options):
* ['a', 'b', 'c']
*
* List (with options):
* [{'name': 'a', options: {}}, {'name': 'b', options: {}}]
*
* Hash (with options):
* {'a': { ... }, 'b': { ... }, 'c': { ... }}
*
* @param {mixed} plugins
*/
Interface.prototype.initializePlugins = function(plugins) {
var i, n, key;
var self = this;
var queue = [];
self.plugins = {
names : [],
settings : {},
requested : {},
loaded : {}
};
if (utils.isArray(plugins)) {
for (i = 0, n = plugins.length; i < n; i++) {
if (typeof plugins[i] === 'string') {
queue.push(plugins[i]);
} else {
self.plugins.settings[plugins[i].name] = plugins[i].options;
queue.push(plugins[i].name);
}
}
} else if (plugins) {
for (key in plugins) {
if (plugins.hasOwnProperty(key)) {
self.plugins.settings[key] = plugins[key];
queue.push(key);
}
}
}
while (queue.length) {
self.require(queue.shift());
}
};
Interface.prototype.loadPlugin = function(name) {
var self = this;
var plugins = self.plugins;
var plugin = Interface.plugins[name];
if (!Interface.plugins.hasOwnProperty(name)) {
throw new Error('Unable to find "' + name + '" plugin');
}
plugins.requested[name] = true;
plugins.loaded[name] = plugin.fn.apply(self, [self.plugins.settings[name] || {}]);
plugins.names.push(name);
};
/**
* Initializes a plugin.
*
* @param {string} name
*/
Interface.prototype.require = function(name) {
var self = this;
var plugins = self.plugins;
if (!self.plugins.loaded.hasOwnProperty(name)) {
if (plugins.requested[name]) {
throw new Error('Plugin has circular dependency ("' + name + '")');
}
self.loadPlugin(name);
}
return plugins.loaded[name];
};
/**
* Registers a plugin.
*
* @param {string} name
* @param {function} fn
*/
Interface.define = function(name, fn) {
Interface.plugins[name] = {
'name' : name,
'fn' : fn
};
};
};
var utils = {
isArray: Array.isArray || function(vArg) {
return Object.prototype.toString.call(vArg) === '[object Array]';
}
};
return MicroPlugin;
}));
/**
* selectize.js (v0.12.4)
* Copyright (c) 2013–2015 Brian Reavis & contributors
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this
* file except in compliance with the License. You may obtain a copy of the License at:
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under
* the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF
* ANY KIND, either express or implied. See the License for the specific language
* governing permissions and limitations under the License.
*
* @author Brian Reavis <[email protected]>
*/
/*jshint curly:false */
/*jshint browser:true */
(function(root, factory) {
if (typeof define === 'function' && define.amd) {
define('selectize', ['jquery','sifter','microplugin'], factory);
} else if (typeof exports === 'object') {
module.exports = factory(require('jquery'), require('sifter'), require('microplugin'));
} else {
root.Selectize = factory(root.jQuery, root.Sifter, root.MicroPlugin);
}
}(this, function($, Sifter, MicroPlugin) {
'use strict';
var highlight = function($element, pattern) {
if (typeof pattern === 'string' && !pattern.length) return;
var regex = (typeof pattern === 'string') ? new RegExp(pattern, 'i') : pattern;
var highlight = function(node) {
var skip = 0;
if (node.nodeType === 3) {
var pos = node.data.search(regex);
if (pos >= 0 && node.data.length > 0) {
var match = node.data.match(regex);
var spannode = document.createElement('span');
spannode.className = 'highlight';
var middlebit = node.splitText(pos);
var endbit = middlebit.splitText(match[0].length);
var middleclone = middlebit.cloneNode(true);
spannode.appendChild(middleclone);
middlebit.parentNode.replaceChild(spannode, middlebit);
skip = 1;
}
} else if (node.nodeType === 1 && node.childNodes && !/(script|style)/i.test(node.tagName)) {
for (var i = 0; i < node.childNodes.length; ++i) {
i += highlight(node.childNodes[i]);
}
}
return skip;
};
return $element.each(function() {
highlight(this);
});
};
/**
* removeHighlight fn copied from highlight v5 and
* edited to remove with() and pass js strict mode
*/
$.fn.removeHighlight = function() {
return this.find("span.highlight").each(function() {
this.parentNode.firstChild.nodeName;
var parent = this.parentNode;
parent.replaceChild(this.firstChild, this);
parent.normalize();
}).end();
};
var MicroEvent = function() {};
MicroEvent.prototype = {
on: function(event, fct){
this._events = this._events || {};
this._events[event] = this._events[event] || [];
this._events[event].push(fct);
},
off: function(event, fct){
var n = arguments.length;
if (n === 0) return delete this._events;
if (n === 1) return delete this._events[event];
this._events = this._events || {};
if (event in this._events === false) return;
this._events[event].splice(this._events[event].indexOf(fct), 1);
},
trigger: function(event /* , args... */){
this._events = this._events || {};
if (event in this._events === false) return;
for (var i = 0; i < this._events[event].length; i++){
this._events[event][i].apply(this, Array.prototype.slice.call(arguments, 1));
}
}
};
/**
* Mixin will delegate all MicroEvent.js function in the destination object.
*
* - MicroEvent.mixin(Foobar) will make Foobar able to use MicroEvent
*
* @param {object} the object which will support MicroEvent
*/
MicroEvent.mixin = function(destObject){
var props = ['on', 'off', 'trigger'];
for (var i = 0; i < props.length; i++){
destObject.prototype[props[i]] = MicroEvent.prototype[props[i]];
}
};
var IS_MAC = /Mac/.test(navigator.userAgent);
var KEY_A = 65;
var KEY_COMMA = 188;
var KEY_RETURN = 13;
var KEY_ESC = 27;
var KEY_LEFT = 37;
var KEY_UP = 38;
var KEY_P = 80;
var KEY_RIGHT = 39;
var KEY_DOWN = 40;
var KEY_N = 78;
var KEY_BACKSPACE = 8;
var KEY_DELETE = 46;
var KEY_SHIFT = 16;
var KEY_CMD = IS_MAC ? 91 : 17;
var KEY_CTRL = IS_MAC ? 18 : 17;
var KEY_TAB = 9;
var TAG_SELECT = 1;
var TAG_INPUT = 2;
// for now, android support in general is too spotty to support validity
var SUPPORTS_VALIDITY_API = !/android/i.test(window.navigator.userAgent) && !!document.createElement('input').validity;
var isset = function(object) {
return typeof object !== 'undefined';
};
/**
* Converts a scalar to its best string representation
* for hash keys and HTML attribute values.
*
* Transformations:
* 'str' -> 'str'
* null -> ''
* undefined -> ''
* true -> '1'
* false -> '0'
* 0 -> '0'
* 1 -> '1'
*
* @param {string} value
* @returns {string|null}
*/
var hash_key = function(value) {
if (typeof value === 'undefined' || value === null) return null;
if (typeof value === 'boolean') return value ? '1' : '0';
return value + '';
};
/**
* Escapes a string for use within HTML.
*
* @param {string} str
* @returns {string}
*/
var escape_html = function(str) {
return (str + '')
.replace(/&/g, '&')
.replace(/</g, '<')
.replace(/>/g, '>')
.replace(/"/g, '"');
};
/**
* Escapes "$" characters in replacement strings.
*
* @param {string} str
* @returns {string}
*/
var escape_replace = function(str) {
return (str + '').replace(/\$/g, '$$$$');
};
var hook = {};
/**
* Wraps `method` on `self` so that `fn`
* is invoked before the original method.
*
* @param {object} self
* @param {string} method
* @param {function} fn
*/
hook.before = function(self, method, fn) {
var original = self[method];
self[method] = function() {
fn.apply(self, arguments);
return original.apply(self, arguments);
};
};
/**
* Wraps `method` on `self` so that `fn`
* is invoked after the original method.
*
* @param {object} self
* @param {string} method
* @param {function} fn
*/
hook.after = function(self, method, fn) {
var original = self[method];
self[method] = function() {
var result = original.apply(self, arguments);
fn.apply(self, arguments);
return result;
};
};
/**
* Wraps `fn` so that it can only be invoked once.
*
* @param {function} fn
* @returns {function}
*/
var once = function(fn) {
var called = false;
return function() {
if (called) return;
called = true;
fn.apply(this, arguments);
};
};
/**
* Wraps `fn` so that it can only be called once
* every `delay` milliseconds (invoked on the falling edge).
*
* @param {function} fn
* @param {int} delay
* @returns {function}
*/
var debounce = function(fn, delay) {
var timeout;
return function() {
var self = this;
var args = arguments;
window.clearTimeout(timeout);
timeout = window.setTimeout(function() {
fn.apply(self, args);
}, delay);
};
};
/**
* Debounce all fired events types listed in `types`
* while executing the provided `fn`.
*
* @param {object} self
* @param {array} types
* @param {function} fn
*/
var debounce_events = function(self, types, fn) {
var type;
var trigger = self.trigger;
var event_args = {};
// override trigger method
self.trigger = function() {
var type = arguments[0];
if (types.indexOf(type) !== -1) {
event_args[type] = arguments;
} else {
return trigger.apply(self, arguments);
}
};
// invoke provided function
fn.apply(self, []);
self.trigger = trigger;
// trigger queued events
for (type in event_args) {
if (event_args.hasOwnProperty(type)) {
trigger.apply(self, event_args[type]);
}
}
};
/**
* A workaround for http://bugs.jquery.com/ticket/6696
*
* @param {object} $parent - Parent element to listen on.
* @param {string} event - Event name.
* @param {string} selector - Descendant selector to filter by.
* @param {function} fn - Event handler.
*/
var watchChildEvent = function($parent, event, selector, fn) {
$parent.on(event, selector, function(e) {
var child = e.target;
while (child && child.parentNode !== $parent[0]) {
child = child.parentNode;
}
e.currentTarget = child;
return fn.apply(this, [e]);
});
};
/**
* Determines the current selection within a text input control.
* Returns an object containing:
* - start
* - length
*
* @param {object} input
* @returns {object}
*/
var getSelection = function(input) {
var result = {};
if ('selectionStart' in input) {
result.start = input.selectionStart;
result.length = input.selectionEnd - result.start;
} else if (document.selection) {
input.focus();
var sel = document.selection.createRange();
var selLen = document.selection.createRange().text.length;
sel.moveStart('character', -input.value.length);
result.start = sel.text.length - selLen;
result.length = selLen;
}
return result;
};
/**
* Copies CSS properties from one element to another.
*
* @param {object} $from
* @param {object} $to
* @param {array} properties
*/
var transferStyles = function($from, $to, properties) {
var i, n, styles = {};
if (properties) {
for (i = 0, n = properties.length; i < n; i++) {
styles[properties[i]] = $from.css(properties[i]);
}
} else {
styles = $from.css();
}
$to.css(styles);
};
/**
* Measures the width of a string within a
* parent element (in pixels).
*
* @param {string} str
* @param {object} $parent
* @returns {int}
*/
var measureString = function(str, $parent) {
if (!str) {
return 0;
}
var $test = $('<test>').css({
position: 'absolute',
top: -99999,
left: -99999,
width: 'auto',
padding: 0,
whiteSpace: 'pre'
}).text(str).appendTo('body');
transferStyles($parent, $test, [
'letterSpacing',
'fontSize',
'fontFamily',
'fontWeight',
'textTransform'
]);
var width = $test.width();
$test.remove();
return width;
};
/**
* Sets up an input to grow horizontally as the user
* types. If the value is changed manually, you can
* trigger the "update" handler to resize:
*
* $input.trigger('update');
*
* @param {object} $input
*/
var autoGrow = function($input) {
var currentWidth = null;
var update = function(e, options) {
var value, keyCode, printable, placeholder, width;
var shift, character, selection;
e = e || window.event || {};
options = options || {};
if (e.metaKey || e.altKey) return;
if (!options.force && $input.data('grow') === false) return;
value = $input.val();
if (e.type && e.type.toLowerCase() === 'keydown') {
keyCode = e.keyCode;
printable = (
(keyCode >= 97 && keyCode <= 122) || // a-z
(keyCode >= 65 && keyCode <= 90) || // A-Z
(keyCode >= 48 && keyCode <= 57) || // 0-9
keyCode === 32 // space
);
if (keyCode === KEY_DELETE || keyCode === KEY_BACKSPACE) {
selection = getSelection($input[0]);
if (selection.length) {
value = value.substring(0, selection.start) + value.substring(selection.start + selection.length);
} else if (keyCode === KEY_BACKSPACE && selection.start) {
value = value.substring(0, selection.start - 1) + value.substring(selection.start + 1);
} else if (keyCode === KEY_DELETE && typeof selection.start !== 'undefined') {
value = value.substring(0, selection.start) + value.substring(selection.start + 1);
}
} else if (printable) {
shift = e.shiftKey;
character = String.fromCharCode(e.keyCode);
if (shift) character = character.toUpperCase();
else character = character.toLowerCase();
value += character;
}
}
placeholder = $input.attr('placeholder');
if (!value && placeholder) {
value = placeholder;
}
width = measureString(value, $input) + 4;
if (width !== currentWidth) {
currentWidth = width;
$input.width(width);
$input.triggerHandler('resize');
}
};
$input.on('keydown keyup update blur', update);
update();
};
var domToString = function(d) {
var tmp = document.createElement('div');
tmp.appendChild(d.cloneNode(true));
return tmp.innerHTML;
};
var logError = function(message, options){
if(!options) options = {};
var component = "Selectize";
console.error(component + ": " + message)
if(options.explanation){
// console.group is undefined in <IE11
if(console.group) console.group();
console.error(options.explanation);
if(console.group) console.groupEnd();
}
}
var Selectize = function($input, settings) {
var key, i, n, dir, input, self = this;
input = $input[0];
input.selectize = self;
// detect rtl environment
var computedStyle = window.getComputedStyle && window.getComputedStyle(input, null);
dir = computedStyle ? computedStyle.getPropertyValue('direction') : input.currentStyle && input.currentStyle.direction;
dir = dir || $input.parents('[dir]:first').attr('dir') || '';
// setup default state
$.extend(self, {
order : 0,
settings : settings,
$input : $input,
tabIndex : $input.attr('tabindex') || '',
tagType : input.tagName.toLowerCase() === 'select' ? TAG_SELECT : TAG_INPUT,
rtl : /rtl/i.test(dir),
eventNS : '.selectize' + (++Selectize.count),
highlightedValue : null,
isOpen : false,
isDisabled : false,
isRequired : $input.is('[required]'),
isInvalid : false,
isLocked : false,
isFocused : false,
isInputHidden : false,
isSetup : false,
isShiftDown : false,
isCmdDown : false,
isCtrlDown : false,
ignoreFocus : false,
ignoreBlur : false,
ignoreHover : false,
hasOptions : false,
currentResults : null,
lastValue : '',
caretPos : 0,
loading : 0,
loadedSearches : {},
$activeOption : null,
$activeItems : [],
optgroups : {},
options : {},
userOptions : {},
items : [],
renderCache : {},
onSearchChange : settings.loadThrottle === null ? self.onSearchChange : debounce(self.onSearchChange, settings.loadThrottle)
});
// search system
self.sifter = new Sifter(this.options, {diacritics: settings.diacritics});
// build options table
if (self.settings.options) {
for (i = 0, n = self.settings.options.length; i < n; i++) {
self.registerOption(self.settings.options[i]);
}
delete self.settings.options;
}
// build optgroup table
if (self.settings.optgroups) {
for (i = 0, n = self.settings.optgroups.length; i < n; i++) {
self.registerOptionGroup(self.settings.optgroups[i]);
}
delete self.settings.optgroups;
}
// option-dependent defaults
self.settings.mode = self.settings.mode || (self.settings.maxItems === 1 ? 'single' : 'multi');
if (typeof self.settings.hideSelected !== 'boolean') {
self.settings.hideSelected = self.settings.mode === 'multi';
}
self.initializePlugins(self.settings.plugins);
self.setupCallbacks();
self.setupTemplates();
self.setup();
};
// mixins
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
MicroEvent.mixin(Selectize);
if(typeof MicroPlugin !== "undefined"){
MicroPlugin.mixin(Selectize);
}else{
logError("Dependency MicroPlugin is missing",
{explanation:
"Make sure you either: (1) are using the \"standalone\" "+
"version of Selectize, or (2) require MicroPlugin before you "+
"load Selectize."}
);
}
// methods
// - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - -
$.extend(Selectize.prototype, {
/**
* Creates all elements and sets up event bindings.
*/
setup: function() {
var self = this;
var settings = self.settings;
var eventNS = self.eventNS;
var $window = $(window);
var $document = $(document);
var $input = self.$input;
var $wrapper;
var $control;
var $control_input;
var $dropdown;
var $dropdown_content;
var $dropdown_parent;
var inputMode;
var timeout_blur;
var timeout_focus;
var classes;
var classes_plugins;
var inputId;
inputMode = self.settings.mode;
classes = $input.attr('class') || '';
$wrapper = $('<div>').addClass(settings.wrapperClass).addClass(classes).addClass(inputMode);
$control = $('<div>').addClass(settings.inputClass).addClass('items').appendTo($wrapper);
$control_input = $('<input type="text" autocomplete="off" />').appendTo($control).attr('tabindex', $input.is(':disabled') ? '-1' : self.tabIndex);
$dropdown_parent = $(settings.dropdownParent || $wrapper);
$dropdown = $('<div>').addClass(settings.dropdownClass).addClass(inputMode).hide().appendTo($dropdown_parent);
$dropdown_content = $('<div>').addClass(settings.dropdownContentClass).appendTo($dropdown);
if(inputId = $input.attr('id')) {
$control_input.attr('id', inputId + '-selectized');
$("label[for='"+inputId+"']").attr('for', inputId + '-selectized');
}
if(self.settings.copyClassesToDropdown) {
$dropdown.addClass(classes);
}
$wrapper.css({
width: $input[0].style.width
});
if (self.plugins.names.length) {
classes_plugins = 'plugin-' + self.plugins.names.join(' plugin-');
$wrapper.addClass(classes_plugins);
$dropdown.addClass(classes_plugins);
}
if ((settings.maxItems === null || settings.maxItems > 1) && self.tagType === TAG_SELECT) {
$input.attr('multiple', 'multiple');
}
if (self.settings.placeholder) {
$control_input.attr('placeholder', settings.placeholder);
}
// if splitOn was not passed in, construct it from the delimiter to allow pasting universally
if (!self.settings.splitOn && self.settings.delimiter) {
var delimiterEscaped = self.settings.delimiter.replace(/[-\/\\^$*+?.()|[\]{}]/g, '\\$&');
self.settings.splitOn = new RegExp('\\s*' + delimiterEscaped + '+\\s*');
}
if ($input.attr('autocorrect')) {
$control_input.attr('autocorrect', $input.attr('autocorrect'));
}
if ($input.attr('autocapitalize')) {
$control_input.attr('autocapitalize', $input.attr('autocapitalize'));
}
self.$wrapper = $wrapper;
self.$control = $control;
self.$control_input = $control_input;
self.$dropdown = $dropdown;
self.$dropdown_content = $dropdown_content;
$dropdown.on('mouseenter', '[data-selectable]', function() { return self.onOptionHover.apply(self, arguments); });
$dropdown.on('mousedown click', '[data-selectable]', function() { return self.onOptionSelect.apply(self, arguments); });
watchChildEvent($control, 'mousedown', '*:not(input)', function() { return self.onItemSelect.apply(self, arguments); });
autoGrow($control_input);
$control.on({
mousedown : function() { return self.onMouseDown.apply(self, arguments); },
click : function() { return self.onClick.apply(self, arguments); }
});
$control_input.on({
mousedown : function(e) { e.stopPropagation(); },
keydown : function() { return self.onKeyDown.apply(self, arguments); },
keyup : function() { return self.onKeyUp.apply(self, arguments); },
keypress : function() { return self.onKeyPress.apply(self, arguments); },
resize : function() { self.positionDropdown.apply(self, []); },
blur : function() { return self.onBlur.apply(self, arguments); },
focus : function() { self.ignoreBlur = false; return self.onFocus.apply(self, arguments); },
paste : function() { return self.onPaste.apply(self, arguments); }
});
$document.on('keydown' + eventNS, function(e) {
self.isCmdDown = e[IS_MAC ? 'metaKey' : 'ctrlKey'];
self.isCtrlDown = e[IS_MAC ? 'altKey' : 'ctrlKey'];
self.isShiftDown = e.shiftKey;
});
$document.on('keyup' + eventNS, function(e) {
if (e.keyCode === KEY_CTRL) self.isCtrlDown = false;
if (e.keyCode === KEY_SHIFT) self.isShiftDown = false;
if (e.keyCode === KEY_CMD) self.isCmdDown = false;
});
$document.on('mousedown' + eventNS, function(e) {
if (self.isFocused) {
// prevent events on the dropdown scrollbar from causing the control to blur
if (e.target === self.$dropdown[0] || e.target.parentNode === self.$dropdown[0]) {
return false;
}
// blur on click outside
if (!self.$control.has(e.target).length && e.target !== self.$control[0]) {
self.blur(e.target);
}
}
});
$window.on(['scroll' + eventNS, 'resize' + eventNS].join(' '), function() {
if (self.isOpen) {
self.positionDropdown.apply(self, arguments);
}
});
$window.on('mousemove' + eventNS, function() {
self.ignoreHover = false;
});
// store original children and tab index so that they can be
// restored when the destroy() method is called.
this.revertSettings = {
$children : $input.children().detach(),
tabindex : $input.attr('tabindex')
};
$input.attr('tabindex', -1).hide().after(self.$wrapper);
if ($.isArray(settings.items)) {
self.setValue(settings.items);
delete settings.items;
}
// feature detect for the validation API
if (SUPPORTS_VALIDITY_API) {
$input.on('invalid' + eventNS, function(e) {
e.preventDefault();
self.isInvalid = true;
self.refreshState();
});
}
self.updateOriginalInput();
self.refreshItems();
self.refreshState();
self.updatePlaceholder();
self.isSetup = true;
if ($input.is(':disabled')) {
self.disable();
}
self.on('change', this.onChange);
$input.data('selectize', self);
$input.addClass('selectized');
self.trigger('initialize');
// preload options
if (settings.preload === true) {
self.onSearchChange('');
}
},
/**
* Sets up default rendering functions.
*/
setupTemplates: function() {
var self = this;
var field_label = self.settings.labelField;
var field_optgroup = self.settings.optgroupLabelField;
var templates = {
'optgroup': function(data) {
return '<div class="optgroup">' + data.html + '</div>';
},
'optgroup_header': function(data, escape) {
return '<div class="optgroup-header">' + escape(data[field_optgroup]) + '</div>';
},
'option': function(data, escape) {
return '<div class="option">' + escape(data[field_label]) + '</div>';
},
'item': function(data, escape) {
return '<div class="item">' + escape(data[field_label]) + '</div>';
},
'option_create': function(data, escape) {
return '<div class="create">Add <strong>' + escape(data.input) + '</strong>…</div>';
}
};
self.settings.render = $.extend({}, templates, self.settings.render);
},
/**
* Maps fired events to callbacks provided
* in the settings used when creating the control.
*/
setupCallbacks: function() {
var key, fn, callbacks = {
'initialize' : 'onInitialize',
'change' : 'onChange',
'item_add' : 'onItemAdd',
'item_remove' : 'onItemRemove',
'clear' : 'onClear',
'option_add' : 'onOptionAdd',
'option_remove' : 'onOptionRemove',
'option_clear' : 'onOptionClear',
'optgroup_add' : 'onOptionGroupAdd',
'optgroup_remove' : 'onOptionGroupRemove',
'optgroup_clear' : 'onOptionGroupClear',
'dropdown_open' : 'onDropdownOpen',
'dropdown_close' : 'onDropdownClose',
'type' : 'onType',
'load' : 'onLoad',
'focus' : 'onFocus',
'blur' : 'onBlur'
};
for (key in callbacks) {
if (callbacks.hasOwnProperty(key)) {
fn = this.settings[callbacks[key]];
if (fn) this.on(key, fn);
}
}
},
/**
* Triggered when the main control element
* has a click event.
*
* @param {object} e
* @return {boolean}
*/
onClick: function(e) {
var self = this;
// necessary for mobile webkit devices (manual focus triggering
// is ignored unless invoked within a click event)
if (!self.isFocused) {
self.focus();
e.preventDefault();
}
},
/**
* Triggered when the main control element
* has a mouse down event.
*
* @param {object} e
* @return {boolean}
*/
onMouseDown: function(e) {
var self = this;
var defaultPrevented = e.isDefaultPrevented();
var $target = $(e.target);
if (self.isFocused) {
// retain focus by preventing native handling. if the
// event target is the input it should not be modified.
// otherwise, text selection within the input won't work.
if (e.target !== self.$control_input[0]) {
if (self.settings.mode === 'single') {
// toggle dropdown
self.isOpen ? self.close() : self.open();
} else if (!defaultPrevented) {
self.setActiveItem(null);
}
return false;
}
} else {
// give control focus
if (!defaultPrevented) {
window.setTimeout(function() {
self.focus();
}, 0);
}
}
},
/**
* Triggered when the value of the control has been changed.
* This should propagate the event to the original DOM
* input / select element.
*/
onChange: function() {
this.$input.trigger('change');
},
/**
* Triggered on <input> paste.
*
* @param {object} e
* @returns {boolean}
*/
onPaste: function(e) {
var self = this;
if (self.isFull() || self.isInputHidden || self.isLocked) {
e.preventDefault();
return;
}
// If a regex or string is included, this will split the pasted
// input and create Items for each separate value
if (self.settings.splitOn) {
// Wait for pasted text to be recognized in value
setTimeout(function() {
var pastedText = self.$control_input.val();
if(!pastedText.match(self.settings.splitOn)){ return }
var splitInput = $.trim(pastedText).split(self.settings.splitOn);
for (var i = 0, n = splitInput.length; i < n; i++) {
self.createItem(splitInput[i]);
}
}, 0);
}
},
/**
* Triggered on <input> keypress.
*
* @param {object} e
* @returns {boolean}
*/
onKeyPress: function(e) {
if (this.isLocked) return e && e.preventDefault();
var character = String.fromCharCode(e.keyCode || e.which);
if (this.settings.create && this.settings.mode === 'multi' && character === this.settings.delimiter) {
this.createItem();
e.preventDefault();
return false;
}
},
/**
* Triggered on <input> keydown.
*
* @param {object} e
* @returns {boolean}
*/
onKeyDown: function(e) {
var isInput = e.target === this.$control_input[0];
var self = this;
if (self.isLocked) {
if (e.keyCode !== KEY_TAB) {
e.preventDefault();
}
return;
}
switch (e.keyCode) {
case KEY_A:
if (self.isCmdDown) {
self.selectAll();
return;
}
break;
case KEY_ESC:
if (self.isOpen) {
e.preventDefault();
e.stopPropagation();
self.close();
}
return;
case KEY_N:
if (!e.ctrlKey || e.altKey) break;
case KEY_DOWN:
if (!self.isOpen && self.hasOptions) {
self.open();
} else if (self.$activeOption) {
self.ignoreHover = true;
var $next = self.getAdjacentOption(self.$activeOption, 1);
if ($next.length) self.setActiveOption($next, true, true);
}
e.preventDefault();
return;
case KEY_P:
if (!e.ctrlKey || e.altKey) break;
case KEY_UP:
if (self.$activeOption) {
self.ignoreHover = true;
var $prev = self.getAdjacentOption(self.$activeOption, -1);
if ($prev.length) self.setActiveOption($prev, true, true);
}
e.preventDefault();
return;
case KEY_RETURN:
if (self.isOpen && self.$activeOption) {
self.onOptionSelect({currentTarget: self.$activeOption});
e.preventDefault();
}
return;
case KEY_LEFT:
self.advanceSelection(-1, e);
return;
case KEY_RIGHT:
self.advanceSelection(1, e);
return;
case KEY_TAB:
if (self.settings.selectOnTab && self.isOpen && self.$activeOption) {
self.onOptionSelect({currentTarget: self.$activeOption});
// Default behaviour is to jump to the next field, we only want this
// if the current field doesn't accept any more entries
if (!self.isFull()) {
e.preventDefault();
}
}
if (self.settings.create && self.createItem()) {
e.preventDefault();
}
return;
case KEY_BACKSPACE:
case KEY_DELETE:
self.deleteSelection(e);
return;
}
if ((self.isFull() || self.isInputHidden) && !(IS_MAC ? e.metaKey : e.ctrlKey)) {
e.preventDefault();
return;
}
},
/**
* Triggered on <input> keyup.
*
* @param {object} e
* @returns {boolean}
*/
onKeyUp: function(e) {
var self = this;
if (self.isLocked) return e && e.preventDefault();
var value = self.$control_input.val() || '';
if (self.lastValue !== value) {
self.lastValue = value;
self.onSearchChange(value);
self.refreshOptions();
self.trigger('type', value);
}
},
/**
* Invokes the user-provide option provider / loader.
*
* Note: this function is debounced in the Selectize
* constructor (by `settings.loadThrottle` milliseconds)
*
* @param {string} value
*/
onSearchChange: function(value) {
var self = this;
var fn = self.settings.load;
if (!fn) return;
if (self.loadedSearches.hasOwnProperty(value)) return;
self.loadedSearches[value] = true;
self.load(function(callback) {
fn.apply(self, [value, callback]);
});
},
/**
* Triggered on <input> focus.
*
* @param {object} e (optional)
* @returns {boolean}
*/
onFocus: function(e) {
var self = this;
var wasFocused = self.isFocused;
if (self.isDisabled) {
self.blur();
e && e.preventDefault();
return false;
}
if (self.ignoreFocus) return;
self.isFocused = true;
if (self.settings.preload === 'focus') self.onSearchChange('');
if (!wasFocused) self.trigger('focus');
if (!self.$activeItems.length) {
self.showInput();
self.setActiveItem(null);
self.refreshOptions(!!self.settings.openOnFocus);
}
self.refreshState();
},
/**
* Triggered on <input> blur.
*
* @param {object} e
* @param {Element} dest
*/
onBlur: function(e, dest) {
var self = this;
if (!self.isFocused) return;
self.isFocused = false;
if (self.ignoreFocus) {
return;
} else if (!self.ignoreBlur && document.activeElement === self.$dropdown_content[0]) {
// necessary to prevent IE closing the dropdown when the scrollbar is clicked
self.ignoreBlur = true;
self.onFocus(e);
return;
}
var deactivate = function() {
self.close();
self.setTextboxValue('');
self.setActiveItem(null);
self.setActiveOption(null);
self.setCaret(self.items.length);
self.refreshState();
// IE11 bug: element still marked as active
dest && dest.focus && dest.focus();
self.ignoreFocus = false;
self.trigger('blur');
};
self.ignoreFocus = true;
if (self.settings.create && self.settings.createOnBlur) {
self.createItem(null, false, deactivate);
} else {
deactivate();
}
},
/**
* Triggered when the user rolls over
* an option in the autocomplete dropdown menu.
*
* @param {object} e
* @returns {boolean}
*/
onOptionHover: function(e) {
if (this.ignoreHover) return;
this.setActiveOption(e.currentTarget, false);
},
/**
* Triggered when the user clicks on an option
* in the autocomplete dropdown menu.
*
* @param {object} e
* @returns {boolean}
*/
onOptionSelect: function(e) {
var value, $target, $option, self = this;
if (e.preventDefault) {
e.preventDefault();
e.stopPropagation();
}
$target = $(e.currentTarget);
if ($target.hasClass('create')) {
self.createItem(null, function() {
if (self.settings.closeAfterSelect) {
self.close();
}
});
} else {
value = $target.attr('data-value');
if (typeof value !== 'undefined') {
self.lastQuery = null;
self.setTextboxValue('');
self.addItem(value);
if (self.settings.closeAfterSelect) {
self.close();
} else if (!self.settings.hideSelected && e.type && /mouse/.test(e.type)) {
self.setActiveOption(self.getOption(value));
}
}
}
},
/**
* Triggered when the user clicks on an item
* that has been selected.
*
* @param {object} e
* @returns {boolean}
*/
onItemSelect: function(e) {
var self = this;
if (self.isLocked) return;
if (self.settings.mode === 'multi') {
e.preventDefault();
self.setActiveItem(e.currentTarget, e);
}
},
/**
* Invokes the provided method that provides
* results to a callback---which are then added
* as options to the control.
*
* @param {function} fn
*/
load: function(fn) {
var self = this;
var $wrapper = self.$wrapper.addClass(self.settings.loadingClass);
self.loading++;
fn.apply(self, [function(results) {
self.loading = Math.max(self.loading - 1, 0);
if (results && results.length) {
self.addOption(results);
self.refreshOptions(self.isFocused && !self.isInputHidden);
}
if (!self.loading) {
$wrapper.removeClass(self.settings.loadingClass);
}
self.trigger('load', results);
}]);
},
/**
* Sets the input field of the control to the specified value.
*
* @param {string} value
*/
setTextboxValue: function(value) {
var $input = this.$control_input;
var changed = $input.val() !== value;
if (changed) {
$input.val(value).triggerHandler('update');
this.lastValue = value;
}
},
/**
* Returns the value of the control. If multiple items
* can be selected (e.g. <select multiple>), this returns
* an array. If only one item can be selected, this
* returns a string.
*
* @returns {mixed}
*/
getValue: function() {
if (this.tagType === TAG_SELECT && this.$input.attr('multiple')) {
return this.items;
} else {
return this.items.join(this.settings.delimiter);
}
},
/**
* Resets the selected items to the given value.
*
* @param {mixed} value
*/
setValue: function(value, silent) {
var events = silent ? [] : ['change'];
debounce_events(this, events, function() {
this.clear(silent);
this.addItems(value, silent);
});
},
/**
* Sets the selected item.
*
* @param {object} $item
* @param {object} e (optional)
*/
setActiveItem: function($item, e) {
var self = this;
var eventName;
var i, idx, begin, end, item, swap;
var $last;
if (self.settings.mode === 'single') return;
$item = $($item);
// clear the active selection
if (!$item.length) {
$(self.$activeItems).removeClass('active');
self.$activeItems = [];
if (self.isFocused) {
self.showInput();
}
return;
}
// modify selection
eventName = e && e.type.toLowerCase();
if (eventName === 'mousedown' && self.isShiftDown && self.$activeItems.length) {
$last = self.$control.children('.active:last');
begin = Array.prototype.indexOf.apply(self.$control[0].childNodes, [$last[0]]);
end = Array.prototype.indexOf.apply(self.$control[0].childNodes, [$item[0]]);
if (begin > end) {
swap = begin;
begin = end;
end = swap;
}
for (i = begin; i <= end; i++) {
item = self.$control[0].childNodes[i];
if (self.$activeItems.indexOf(item) === -1) {
$(item).addClass('active');
self.$activeItems.push(item);
}
}
e.preventDefault();
} else if ((eventName === 'mousedown' && self.isCtrlDown) || (eventName === 'keydown' && this.isShiftDown)) {
if ($item.hasClass('active')) {
idx = self.$activeItems.indexOf($item[0]);
self.$activeItems.splice(idx, 1);
$item.removeClass('active');
} else {
self.$activeItems.push($item.addClass('active')[0]);
}
} else {
$(self.$activeItems).removeClass('active');
self.$activeItems = [$item.addClass('active')[0]];
}
// ensure control has focus
self.hideInput();
if (!this.isFocused) {
self.focus();
}
},
/**
* Sets the selected item in the dropdown menu
* of available options.
*
* @param {object} $object
* @param {boolean} scroll
* @param {boolean} animate
*/
setActiveOption: function($option, scroll, animate) {
var height_menu, height_item, y;
var scroll_top, scroll_bottom;
var self = this;
if (self.$activeOption) self.$activeOption.removeClass('active');
self.$activeOption = null;
$option = $($option);
if (!$option.length) return;
self.$activeOption = $option.addClass('active');
if (scroll || !isset(scroll)) {
height_menu = self.$dropdown_content.height();
height_item = self.$activeOption.outerHeight(true);
scroll = self.$dropdown_content.scrollTop() || 0;
y = self.$activeOption.offset().top - self.$dropdown_content.offset().top + scroll;
scroll_top = y;
scroll_bottom = y - height_menu + height_item;
if (y + height_item > height_menu + scroll) {
self.$dropdown_content.stop().animate({scrollTop: scroll_bottom}, animate ? self.settings.scrollDuration : 0);
} else if (y < scroll) {
self.$dropdown_content.stop().animate({scrollTop: scroll_top}, animate ? self.settings.scrollDuration : 0);
}
}
},
/**
* Selects all items (CTRL + A).
*/
selectAll: function() {
var self = this;
if (self.settings.mode === 'single') return;
self.$activeItems = Array.prototype.slice.apply(self.$control.children(':not(input)').addClass('active'));
if (self.$activeItems.length) {
self.hideInput();
self.close();
}
self.focus();
},
/**
* Hides the input element out of view, while
* retaining its focus.
*/
hideInput: function() {
var self = this;
self.setTextboxValue('');
self.$control_input.css({opacity: 0, position: 'absolute', left: self.rtl ? 10000 : -10000});
self.isInputHidden = true;
},
/**
* Restores input visibility.
*/
showInput: function() {
this.$control_input.css({opacity: 1, position: 'relative', left: 0});
this.isInputHidden = false;
},
/**
* Gives the control focus.
*/
focus: function() {
var self = this;
if (self.isDisabled) return;
self.ignoreFocus = true;
self.$control_input[0].focus();
window.setTimeout(function() {
self.ignoreFocus = false;
self.onFocus();
}, 0);
},
/**
* Forces the control out of focus.
*
* @param {Element} dest
*/
blur: function(dest) {
this.$control_input[0].blur();
this.onBlur(null, dest);
},
/**
* Returns a function that scores an object
* to show how good of a match it is to the
* provided query.
*
* @param {string} query
* @param {object} options
* @return {function}
*/
getScoreFunction: function(query) {
return this.sifter.getScoreFunction(query, this.getSearchOptions());
},
/**
* Returns search options for sifter (the system
* for scoring and sorting results).
*
* @see https://github.com/brianreavis/sifter.js
* @return {object}
*/
getSearchOptions: function() {
var settings = this.settings;
var sort = settings.sortField;
if (typeof sort === 'string') {
sort = [{field: sort}];
}
return {
fields : settings.searchField,
conjunction : settings.searchConjunction,
sort : sort
};
},
/**
* Searches through available options and returns
* a sorted array of matches.
*
* Returns an object containing:
*
* - query {string}
* - tokens {array}
* - total {int}
* - items {array}
*
* @param {string} query
* @returns {object}
*/
search: function(query) {
var i, value, score, result, calculateScore;
var self = this;
var settings = self.settings;
var options = this.getSearchOptions();
// validate user-provided result scoring function
if (settings.score) {
calculateScore = self.settings.score.apply(this, [query]);
if (typeof calculateScore !== 'function') {
throw new Error('Selectize "score" setting must be a function that returns a function');
}
}
// perform search
if (query !== self.lastQuery) {
self.lastQuery = query;
result = self.sifter.search(query, $.extend(options, {score: calculateScore}));
self.currentResults = result;
} else {
result = $.extend(true, {}, self.currentResults);
}
// filter out selected items
if (settings.hideSelected) {
for (i = result.items.length - 1; i >= 0; i--) {
if (self.items.indexOf(hash_key(result.items[i].id)) !== -1) {
result.items.splice(i, 1);
}
}
}
return result;
},
/**
* Refreshes the list of available options shown
* in the autocomplete dropdown menu.
*
* @param {boolean} triggerDropdown
*/
refreshOptions: function(triggerDropdown) {
var i, j, k, n, groups, groups_order, option, option_html, optgroup, optgroups, html, html_children, has_create_option;
var $active, $active_before, $create;
if (typeof triggerDropdown === 'undefined') {
triggerDropdown = true;
}
var self = this;
var query = $.trim(self.$control_input.val());
var results = self.search(query);
var $dropdown_content = self.$dropdown_content;
var active_before = self.$activeOption && hash_key(self.$activeOption.attr('data-value'));
// build markup
n = results.items.length;
if (typeof self.settings.maxOptions === 'number') {
n = Math.min(n, self.settings.maxOptions);
}
// render and group available options individually
groups = {};
groups_order = [];
for (i = 0; i < n; i++) {
option = self.options[results.items[i].id];
option_html = self.render('option', option);
optgroup = option[self.settings.optgroupField] || '';
optgroups = $.isArray(optgroup) ? optgroup : [optgroup];
for (j = 0, k = optgroups && optgroups.length; j < k; j++) {
optgroup = optgroups[j];
if (!self.optgroups.hasOwnProperty(optgroup)) {
optgroup = '';
}
if (!groups.hasOwnProperty(optgroup)) {
groups[optgroup] = document.createDocumentFragment();
groups_order.push(optgroup);
}
groups[optgroup].appendChild(option_html);
}
}
// sort optgroups
if (this.settings.lockOptgroupOrder) {
groups_order.sort(function(a, b) {
var a_order = self.optgroups[a].$order || 0;
var b_order = self.optgroups[b].$order || 0;
return a_order - b_order;
});
}
// render optgroup headers & join groups
html = document.createDocumentFragment();
for (i = 0, n = groups_order.length; i < n; i++) {
optgroup = groups_order[i];
if (self.optgroups.hasOwnProperty(optgroup) && groups[optgroup].childNodes.length) {
// render the optgroup header and options within it,
// then pass it to the wrapper template
html_children = document.createDocumentFragment();
html_children.appendChild(self.render('optgroup_header', self.optgroups[optgroup]));
html_children.appendChild(groups[optgroup]);
html.appendChild(self.render('optgroup', $.extend({}, self.optgroups[optgroup], {
html: domToString(html_children),
dom: html_children
})));
} else {
html.appendChild(groups[optgroup]);
}
}
$dropdown_content.html(html);
// highlight matching terms inline
if (self.settings.highlight && results.query.length && results.tokens.length) {
$dropdown_content.removeHighlight();
for (i = 0, n = results.tokens.length; i < n; i++) {
highlight($dropdown_content, results.tokens[i].regex);
}
}
// add "selected" class to selected options
if (!self.settings.hideSelected) {
for (i = 0, n = self.items.length; i < n; i++) {
self.getOption(self.items[i]).addClass('selected');
}
}
// add create option
has_create_option = self.canCreate(query);
if (has_create_option) {
$dropdown_content.prepend(self.render('option_create', {input: query}));
$create = $($dropdown_content[0].childNodes[0]);
}
// activate
self.hasOptions = results.items.length > 0 || has_create_option;
if (self.hasOptions) {
if (results.items.length > 0) {
$active_before = active_before && self.getOption(active_before);
if ($active_before && $active_before.length) {
$active = $active_before;
} else if (self.settings.mode === 'single' && self.items.length) {
$active = self.getOption(self.items[0]);
}
if (!$active || !$active.length) {
if ($create && !self.settings.addPrecedence) {
$active = self.getAdjacentOption($create, 1);
} else {
$active = $dropdown_content.find('[data-selectable]:first');
}
}
} else {
$active = $create;
}
self.setActiveOption($active);
if (triggerDropdown && !self.isOpen) { self.open(); }
} else {
self.setActiveOption(null);
if (triggerDropdown && self.isOpen) { self.close(); }
}
},
/**
* Adds an available option. If it already exists,
* nothing will happen. Note: this does not refresh
* the options list dropdown (use `refreshOptions`
* for that).
*
* Usage:
*
* this.addOption(data)
*
* @param {object|array} data
*/
addOption: function(data) {
var i, n, value, self = this;
if ($.isArray(data)) {
for (i = 0, n = data.length; i < n; i++) {
self.addOption(data[i]);
}
return;
}
if (value = self.registerOption(data)) {
self.userOptions[value] = true;
self.lastQuery = null;
self.trigger('option_add', value, data);
}
},
/**
* Registers an option to the pool of options.
*
* @param {object} data
* @return {boolean|string}
*/
registerOption: function(data) {
var key = hash_key(data[this.settings.valueField]);
if (typeof key === 'undefined' || key === null || this.options.hasOwnProperty(key)) return false;
data.$order = data.$order || ++this.order;
this.options[key] = data;
return key;
},
/**
* Registers an option group to the pool of option groups.
*
* @param {object} data
* @return {boolean|string}
*/
registerOptionGroup: function(data) {
var key = hash_key(data[this.settings.optgroupValueField]);
if (!key) return false;
data.$order = data.$order || ++this.order;
this.optgroups[key] = data;
return key;
},
/**
* Registers a new optgroup for options
* to be bucketed into.
*
* @param {string} id
* @param {object} data
*/
addOptionGroup: function(id, data) {
data[this.settings.optgroupValueField] = id;
if (id = this.registerOptionGroup(data)) {
this.trigger('optgroup_add', id, data);
}
},
/**
* Removes an existing option group.
*
* @param {string} id
*/
removeOptionGroup: function(id) {
if (this.optgroups.hasOwnProperty(id)) {
delete this.optgroups[id];
this.renderCache = {};
this.trigger('optgroup_remove', id);
}
},
/**
* Clears all existing option groups.
*/
clearOptionGroups: function() {
this.optgroups = {};
this.renderCache = {};
this.trigger('optgroup_clear');
},
/**
* Updates an option available for selection. If
* it is visible in the selected items or options
* dropdown, it will be re-rendered automatically.
*
* @param {string} value
* @param {object} data
*/
updateOption: function(value, data) {
var self = this;
var $item, $item_new;
var value_new, index_item, cache_items, cache_options, order_old;
value = hash_key(value);
value_new = hash_key(data[self.settings.valueField]);
// sanity checks
if (value === null) return;
if (!self.options.hasOwnProperty(value)) return;
if (typeof value_new !== 'string') throw new Error('Value must be set in option data');
order_old = self.options[value].$order;
// update references
if (value_new !== value) {
delete self.options[value];
index_item = self.items.indexOf(value);
if (index_item !== -1) {
self.items.splice(index_item, 1, value_new);
}
}
data.$order = data.$order || order_old;
self.options[value_new] = data;
// invalidate render cache
cache_items = self.renderCache['item'];
cache_options = self.renderCache['option'];
if (cache_items) {
delete cache_items[value];
delete cache_items[value_new];
}
if (cache_options) {
delete cache_options[value];
delete cache_options[value_new];
}
// update the item if it's selected
if (self.items.indexOf(value_new) !== -1) {
$item = self.getItem(value);
$item_new = $(self.render('item', data));
if ($item.hasClass('active')) $item_new.addClass('active');
$item.replaceWith($item_new);
}
// invalidate last query because we might have updated the sortField
self.lastQuery = null;
// update dropdown contents
if (self.isOpen) {
self.refreshOptions(false);
}
},
/**
* Removes a single option.
*
* @param {string} value
* @param {boolean} silent
*/
removeOption: function(value, silent) {
var self = this;
value = hash_key(value);
var cache_items = self.renderCache['item'];
var cache_options = self.renderCache['option'];
if (cache_items) delete cache_items[value];
if (cache_options) delete cache_options[value];
delete self.userOptions[value];
delete self.options[value];
self.lastQuery = null;
self.trigger('option_remove', value);
self.removeItem(value, silent);
},
/**
* Clears all options.
*/
clearOptions: function() {
var self = this;
self.loadedSearches = {};
self.userOptions = {};
self.renderCache = {};
self.options = self.sifter.items = {};
self.lastQuery = null;
self.trigger('option_clear');
self.clear();
},
/**
* Returns the jQuery element of the option
* matching the given value.
*
* @param {string} value
* @returns {object}
*/
getOption: function(value) {
return this.getElementWithValue(value, this.$dropdown_content.find('[data-selectable]'));
},
/**
* Returns the jQuery element of the next or
* previous selectable option.
*
* @param {object} $option
* @param {int} direction can be 1 for next or -1 for previous
* @return {object}
*/
getAdjacentOption: function($option, direction) {
var $options = this.$dropdown.find('[data-selectable]');
var index = $options.index($option) + direction;
return index >= 0 && index < $options.length ? $options.eq(index) : $();
},
/**
* Finds the first element with a "data-value" attribute
* that matches the given value.
*
* @param {mixed} value
* @param {object} $els
* @return {object}
*/
getElementWithValue: function(value, $els) {
value = hash_key(value);
if (typeof value !== 'undefined' && value !== null) {
for (var i = 0, n = $els.length; i < n; i++) {
if ($els[i].getAttribute('data-value') === value) {
return $($els[i]);
}
}
}
return $();
},
/**
* Returns the jQuery element of the item
* matching the given value.
*
* @param {string} value
* @returns {object}
*/
getItem: function(value) {
return this.getElementWithValue(value, this.$control.children());
},
/**
* "Selects" multiple items at once. Adds them to the list
* at the current caret position.
*
* @param {string} value
* @param {boolean} silent
*/
addItems: function(values, silent) {
var items = $.isArray(values) ? values : [values];
for (var i = 0, n = items.length; i < n; i++) {
this.isPending = (i < n - 1);
this.addItem(items[i], silent);
}
},
/**
* "Selects" an item. Adds it to the list
* at the current caret position.
*
* @param {string} value
* @param {boolean} silent
*/
addItem: function(value, silent) {
var events = silent ? [] : ['change'];
debounce_events(this, events, function() {
var $item, $option, $options;
var self = this;
var inputMode = self.settings.mode;
var i, active, value_next, wasFull;
value = hash_key(value);
if (self.items.indexOf(value) !== -1) {
if (inputMode === 'single') self.close();
return;
}
if (!self.options.hasOwnProperty(value)) return;
if (inputMode === 'single') self.clear(silent);
if (inputMode === 'multi' && self.isFull()) return;
$item = $(self.render('item', self.options[value]));
wasFull = self.isFull();
self.items.splice(self.caretPos, 0, value);
self.insertAtCaret($item);
if (!self.isPending || (!wasFull && self.isFull())) {
self.refreshState();
}
if (self.isSetup) {
$options = self.$dropdown_content.find('[data-selectable]');
// update menu / remove the option (if this is not one item being added as part of series)
if (!self.isPending) {
$option = self.getOption(value);
value_next = self.getAdjacentOption($option, 1).attr('data-value');
self.refreshOptions(self.isFocused && inputMode !== 'single');
if (value_next) {
self.setActiveOption(self.getOption(value_next));
}
}
// hide the menu if the maximum number of items have been selected or no options are left
if (!$options.length || self.isFull()) {
self.close();
} else {
self.positionDropdown();
}
self.updatePlaceholder();
self.trigger('item_add', value, $item);
self.updateOriginalInput({silent: silent});
}
});
},
/**
* Removes the selected item matching
* the provided value.
*
* @param {string} value
*/
removeItem: function(value, silent) {
var self = this;
var $item, i, idx;
$item = (value instanceof $) ? value : self.getItem(value);
value = hash_key($item.attr('data-value'));
i = self.items.indexOf(value);
if (i !== -1) {
$item.remove();
if ($item.hasClass('active')) {
idx = self.$activeItems.indexOf($item[0]);
self.$activeItems.splice(idx, 1);
}
self.items.splice(i, 1);
self.lastQuery = null;
if (!self.settings.persist && self.userOptions.hasOwnProperty(value)) {
self.removeOption(value, silent);
}
if (i < self.caretPos) {
self.setCaret(self.caretPos - 1);
}
self.refreshState();
self.updatePlaceholder();
self.updateOriginalInput({silent: silent});
self.positionDropdown();
self.trigger('item_remove', value, $item);
}
},
/**
* Invokes the `create` method provided in the
* selectize options that should provide the data
* for the new item, given the user input.
*
* Once this completes, it will be added
* to the item list.
*
* @param {string} value
* @param {boolean} [triggerDropdown]
* @param {function} [callback]
* @return {boolean}
*/
createItem: function(input, triggerDropdown) {
var self = this;
var caret = self.caretPos;
input = input || $.trim(self.$control_input.val() || '');
var callback = arguments[arguments.length - 1];
if (typeof callback !== 'function') callback = function() {};
if (typeof triggerDropdown !== 'boolean') {
triggerDropdown = true;
}
if (!self.canCreate(input)) {
callback();
return false;
}
self.lock();
var setup = (typeof self.settings.create === 'function') ? this.settings.create : function(input) {
var data = {};
data[self.settings.labelField] = input;
data[self.settings.valueField] = input;
return data;
};
var create = once(function(data) {
self.unlock();
if (!data || typeof data !== 'object') return callback();
var value = hash_key(data[self.settings.valueField]);
if (typeof value !== 'string') return callback();
self.setTextboxValue('');
self.addOption(data);
self.setCaret(caret);
self.addItem(value);
self.refreshOptions(triggerDropdown && self.settings.mode !== 'single');
callback(data);
});
var output = setup.apply(this, [input, create]);
if (typeof output !== 'undefined') {
create(output);
}
return true;
},
/**
* Re-renders the selected item lists.
*/
refreshItems: function() {
this.lastQuery = null;
if (this.isSetup) {
this.addItem(this.items);
}
this.refreshState();
this.updateOriginalInput();
},
/**
* Updates all state-dependent attributes
* and CSS classes.
*/
refreshState: function() {
this.refreshValidityState();
this.refreshClasses();
},
/**
* Update the `required` attribute of both input and control input.
*
* The `required` property needs to be activated on the control input
* for the error to be displayed at the right place. `required` also
* needs to be temporarily deactivated on the input since the input is
* hidden and can't show errors.
*/
refreshValidityState: function() {
if (!this.isRequired) return false;
var invalid = !this.items.length;
this.isInvalid = invalid;
this.$control_input.prop('required', invalid);
this.$input.prop('required', !invalid);
},
/**
* Updates all state-dependent CSS classes.
*/
refreshClasses: function() {
var self = this;
var isFull = self.isFull();
var isLocked = self.isLocked;
self.$wrapper
.toggleClass('rtl', self.rtl);
self.$control
.toggleClass('focus', self.isFocused)
.toggleClass('disabled', self.isDisabled)
.toggleClass('required', self.isRequired)
.toggleClass('invalid', self.isInvalid)
.toggleClass('locked', isLocked)
.toggleClass('full', isFull).toggleClass('not-full', !isFull)
.toggleClass('input-active', self.isFocused && !self.isInputHidden)
.toggleClass('dropdown-active', self.isOpen)
.toggleClass('has-options', !$.isEmptyObject(self.options))
.toggleClass('has-items', self.items.length > 0);
self.$control_input.data('grow', !isFull && !isLocked);
},
/**
* Determines whether or not more items can be added
* to the control without exceeding the user-defined maximum.
*
* @returns {boolean}
*/
isFull: function() {
return this.settings.maxItems !== null && this.items.length >= this.settings.maxItems;
},
/**
* Refreshes the original <select> or <input>
* element to reflect the current state.
*/
updateOriginalInput: function(opts) {
var i, n, options, label, self = this;
opts = opts || {};
if (self.tagType === TAG_SELECT) {
options = [];
for (i = 0, n = self.items.length; i < n; i++) {
label = self.options[self.items[i]][self.settings.labelField] || '';
options.push('<option value="' + escape_html(self.items[i]) + '" selected="selected">' + escape_html(label) + '</option>');
}
if (!options.length && !this.$input.attr('multiple')) {
options.push('<option value="" selected="selected"></option>');
}
self.$input.html(options.join(''));
} else {
self.$input.val(self.getValue());
self.$input.attr('value',self.$input.val());
}
if (self.isSetup) {
if (!opts.silent) {
self.trigger('change', self.$input.val());
}
}
},
/**
* Shows/hide the input placeholder depending
* on if there items in the list already.
*/
updatePlaceholder: function() {
if (!this.settings.placeholder) return;
var $input = this.$control_input;
if (this.items.length) {
$input.removeAttr('placeholder');
} else {
$input.attr('placeholder', this.settings.placeholder);
}
$input.triggerHandler('update', {force: true});
},
/**
* Shows the autocomplete dropdown containing
* the available options.
*/
open: function() {
var self = this;
if (self.isLocked || self.isOpen || (self.settings.mode === 'multi' && self.isFull())) return;
self.focus();
self.isOpen = true;
self.refreshState();
self.$dropdown.css({visibility: 'hidden', display: 'block'});
self.positionDropdown();
self.$dropdown.css({visibility: 'visible'});
self.trigger('dropdown_open', self.$dropdown);
},
/**
* Closes the autocomplete dropdown menu.
*/
close: function() {
var self = this;
var trigger = self.isOpen;
if (self.settings.mode === 'single' && self.items.length) {
self.hideInput();
self.$control_input.blur(); // close keyboard on iOS
}
self.isOpen = false;
self.$dropdown.hide();
self.setActiveOption(null);
self.refreshState();
if (trigger) self.trigger('dropdown_close', self.$dropdown);
},
/**
* Calculates and applies the appropriate
* position of the dropdown.
*/
positionDropdown: function() {
var $control = this.$control;
var offset = this.settings.dropdownParent === 'body' ? $control.offset() : $control.position();
offset.top += $control.outerHeight(true);
this.$dropdown.css({
width : $control.outerWidth(),
top : offset.top,
left : offset.left
});
},
/**
* Resets / clears all selected items
* from the control.
*
* @param {boolean} silent
*/
clear: function(silent) {
var self = this;
if (!self.items.length) return;
self.$control.children(':not(input)').remove();
self.items = [];
self.lastQuery = null;
self.setCaret(0);
self.setActiveItem(null);
self.updatePlaceholder();
self.updateOriginalInput({silent: silent});
self.refreshState();
self.showInput();
self.trigger('clear');
},
/**
* A helper method for inserting an element
* at the current caret position.
*
* @param {object} $el
*/
insertAtCaret: function($el) {
var caret = Math.min(this.caretPos, this.items.length);
if (caret === 0) {
this.$control.prepend($el);
} else {
$(this.$control[0].childNodes[caret]).before($el);
}
this.setCaret(caret + 1);
},
/**
* Removes the current selected item(s).
*
* @param {object} e (optional)
* @returns {boolean}
*/
deleteSelection: function(e) {
var i, n, direction, selection, values, caret, option_select, $option_select, $tail;
var self = this;
direction = (e && e.keyCode === KEY_BACKSPACE) ? -1 : 1;
selection = getSelection(self.$control_input[0]);
if (self.$activeOption && !self.settings.hideSelected) {
option_select = self.getAdjacentOption(self.$activeOption, -1).attr('data-value');
}
// determine items that will be removed
values = [];
if (self.$activeItems.length) {
$tail = self.$control.children('.active:' + (direction > 0 ? 'last' : 'first'));
caret = self.$control.children(':not(input)').index($tail);
if (direction > 0) { caret++; }
for (i = 0, n = self.$activeItems.length; i < n; i++) {
values.push($(self.$activeItems[i]).attr('data-value'));
}
if (e) {
e.preventDefault();
e.stopPropagation();
}
} else if ((self.isFocused || self.settings.mode === 'single') && self.items.length) {
if (direction < 0 && selection.start === 0 && selection.length === 0) {
values.push(self.items[self.caretPos - 1]);
} else if (direction > 0 && selection.start === self.$control_input.val().length) {
values.push(self.items[self.caretPos]);
}
}
// allow the callback to abort
if (!values.length || (typeof self.settings.onDelete === 'function' && self.settings.onDelete.apply(self, [values]) === false)) {
return false;
}
// perform removal
if (typeof caret !== 'undefined') {
self.setCaret(caret);
}
while (values.length) {
self.removeItem(values.pop());
}
self.showInput();
self.positionDropdown();
self.refreshOptions(true);
// select previous option
if (option_select) {
$option_select = self.getOption(option_select);
if ($option_select.length) {
self.setActiveOption($option_select);
}
}
return true;
},
/**
* Selects the previous / next item (depending
* on the `direction` argument).
*
* > 0 - right
* < 0 - left
*
* @param {int} direction
* @param {object} e (optional)
*/
advanceSelection: function(direction, e) {
var tail, selection, idx, valueLength, cursorAtEdge, $tail;
var self = this;
if (direction === 0) return;
if (self.rtl) direction *= -1;
tail = direction > 0 ? 'last' : 'first';
selection = getSelection(self.$control_input[0]);
if (self.isFocused && !self.isInputHidden) {
valueLength = self.$control_input.val().length;
cursorAtEdge = direction < 0
? selection.start === 0 && selection.length === 0
: selection.start === valueLength;
if (cursorAtEdge && !valueLength) {
self.advanceCaret(direction, e);
}
} else {
$tail = self.$control.children('.active:' + tail);
if ($tail.length) {
idx = self.$control.children(':not(input)').index($tail);
self.setActiveItem(null);
self.setCaret(direction > 0 ? idx + 1 : idx);
}
}
},
/**
* Moves the caret left / right.
*
* @param {int} direction
* @param {object} e (optional)
*/
advanceCaret: function(direction, e) {
var self = this, fn, $adj;
if (direction === 0) return;
fn = direction > 0 ? 'next' : 'prev';
if (self.isShiftDown) {
$adj = self.$control_input[fn]();
if ($adj.length) {
self.hideInput();
self.setActiveItem($adj);
e && e.preventDefault();
}
} else {
self.setCaret(self.caretPos + direction);
}
},
/**
* Moves the caret to the specified index.
*
* @param {int} i
*/
setCaret: function(i) {
var self = this;
if (self.settings.mode === 'single') {
i = self.items.length;
} else {
i = Math.max(0, Math.min(self.items.length, i));
}
if(!self.isPending) {
// the input must be moved by leaving it in place and moving the
// siblings, due to the fact that focus cannot be restored once lost
// on mobile webkit devices
var j, n, fn, $children, $child;
$children = self.$control.children(':not(input)');
for (j = 0, n = $children.length; j < n; j++) {
$child = $($children[j]).detach();
if (j < i) {
self.$control_input.before($child);
} else {
self.$control.append($child);
}
}
}
self.caretPos = i;
},
/**
* Disables user input on the control. Used while
* items are being asynchronously created.
*/
lock: function() {
this.close();
this.isLocked = true;
this.refreshState();
},
/**
* Re-enables user input on the control.
*/
unlock: function() {
this.isLocked = false;
this.refreshState();
},
/**
* Disables user input on the control completely.
* While disabled, it cannot receive focus.
*/
disable: function() {
var self = this;
self.$input.prop('disabled', true);
self.$control_input.prop('disabled', true).prop('tabindex', -1);
self.isDisabled = true;
self.lock();
},
/**
* Enables the control so that it can respond
* to focus and user input.
*/
enable: function() {
var self = this;
self.$input.prop('disabled', false);
self.$control_input.prop('disabled', false).prop('tabindex', self.tabIndex);
self.isDisabled = false;
self.unlock();
},
/**
* Completely destroys the control and
* unbinds all event listeners so that it can
* be garbage collected.
*/
destroy: function() {
var self = this;
var eventNS = self.eventNS;
var revertSettings = self.revertSettings;
self.trigger('destroy');
self.off();
self.$wrapper.remove();
self.$dropdown.remove();
self.$input
.html('')
.append(revertSettings.$children)
.removeAttr('tabindex')
.removeClass('selectized')
.attr({tabindex: revertSettings.tabindex})
.show();
self.$control_input.removeData('grow');
self.$input.removeData('selectize');
$(window).off(eventNS);
$(document).off(eventNS);
$(document.body).off(eventNS);
delete self.$input[0].selectize;
},
/**
* A helper method for rendering "item" and
* "option" templates, given the data.
*
* @param {string} templateName
* @param {object} data
* @returns {string}
*/
render: function(templateName, data) {
var value, id, label;
var html = '';
var cache = false;
var self = this;
var regex_tag = /^[\t \r\n]*<([a-z][a-z0-9\-_]*(?:\:[a-z][a-z0-9\-_]*)?)/i;
if (templateName === 'option' || templateName === 'item') {
value = hash_key(data[self.settings.valueField]);
cache = !!value;
}
// pull markup from cache if it exists
if (cache) {
if (!isset(self.renderCache[templateName])) {
self.renderCache[templateName] = {};
}
if (self.renderCache[templateName].hasOwnProperty(value)) {
return self.renderCache[templateName][value];
}
}
// render markup
html = $(self.settings.render[templateName].apply(this, [data, escape_html]));
// add mandatory attributes
if (templateName === 'option' || templateName === 'option_create') {
html.attr('data-selectable', '');
}
else if (templateName === 'optgroup') {
id = data[self.settings.optgroupValueField] || '';
html.attr('data-group', id);
}
if (templateName === 'option' || templateName === 'item') {
html.attr('data-value', value || '');
}
// update cache
if (cache) {
self.renderCache[templateName][value] = html[0];
}
return html[0];
},
/**
* Clears the render cache for a template. If
* no template is given, clears all render
* caches.
*
* @param {string} templateName
*/
clearCache: function(templateName) {
var self = this;
if (typeof templateName === 'undefined') {
self.renderCache = {};
} else {
delete self.renderCache[templateName];
}
},
/**
* Determines whether or not to display the
* create item prompt, given a user input.
*
* @param {string} input
* @return {boolean}
*/
canCreate: function(input) {
var self = this;
if (!self.settings.create) return false;
var filter = self.settings.createFilter;
return input.length
&& (typeof filter !== 'function' || filter.apply(self, [input]))
&& (typeof filter !== 'string' || new RegExp(filter).test(input))
&& (!(filter instanceof RegExp) || filter.test(input));
}
});
Selectize.count = 0;
Selectize.defaults = {
options: [],
optgroups: [],
plugins: [],
delimiter: ',',
splitOn: null, // regexp or string for splitting up values from a paste command
persist: true,
diacritics: true,
create: false,
createOnBlur: false,
createFilter: null,
highlight: true,
openOnFocus: true,
maxOptions: 1000,
maxItems: null,
hideSelected: null,
addPrecedence: false,
selectOnTab: false,
preload: false,
allowEmptyOption: false,
closeAfterSelect: false,
scrollDuration: 60,
loadThrottle: 300,
loadingClass: 'loading',
dataAttr: 'data-data',
optgroupField: 'optgroup',
valueField: 'value',
labelField: 'text',
optgroupLabelField: 'label',
optgroupValueField: 'value',
lockOptgroupOrder: false,
sortField: '$order',
searchField: ['text'],
searchConjunction: 'and',
mode: null,
wrapperClass: 'selectize-control',
inputClass: 'selectize-input',
dropdownClass: 'selectize-dropdown',
dropdownContentClass: 'selectize-dropdown-content',
dropdownParent: null,
copyClassesToDropdown: true,
/*
load : null, // function(query, callback) { ... }
score : null, // function(search) { ... }
onInitialize : null, // function() { ... }
onChange : null, // function(value) { ... }
onItemAdd : null, // function(value, $item) { ... }
onItemRemove : null, // function(value) { ... }
onClear : null, // function() { ... }
onOptionAdd : null, // function(value, data) { ... }
onOptionRemove : null, // function(value) { ... }
onOptionClear : null, // function() { ... }
onOptionGroupAdd : null, // function(id, data) { ... }
onOptionGroupRemove : null, // function(id) { ... }
onOptionGroupClear : null, // function() { ... }
onDropdownOpen : null, // function($dropdown) { ... }
onDropdownClose : null, // function($dropdown) { ... }
onType : null, // function(str) { ... }
onDelete : null, // function(values) { ... }
*/
render: {
/*
item: null,
optgroup: null,
optgroup_header: null,
option: null,
option_create: null
*/
}
};
$.fn.selectize = function(settings_user) {
var defaults = $.fn.selectize.defaults;
var settings = $.extend({}, defaults, settings_user);
var attr_data = settings.dataAttr;
var field_label = settings.labelField;
var field_value = settings.valueField;
var field_optgroup = settings.optgroupField;
var field_optgroup_label = settings.optgroupLabelField;
var field_optgroup_value = settings.optgroupValueField;
/**
* Initializes selectize from a <input type="text"> element.
*
* @param {object} $input
* @param {object} settings_element
*/
var init_textbox = function($input, settings_element) {
var i, n, values, option;
var data_raw = $input.attr(attr_data);
if (!data_raw) {
var value = $.trim($input.val() || '');
if (!settings.allowEmptyOption && !value.length) return;
values = value.split(settings.delimiter);
for (i = 0, n = values.length; i < n; i++) {
option = {};
option[field_label] = values[i];
option[field_value] = values[i];
settings_element.options.push(option);
}
settings_element.items = values;
} else {
settings_element.options = JSON.parse(data_raw);
for (i = 0, n = settings_element.options.length; i < n; i++) {
settings_element.items.push(settings_element.options[i][field_value]);
}
}
};
/**
* Initializes selectize from a <select> element.
*
* @param {object} $input
* @param {object} settings_element
*/
var init_select = function($input, settings_element) {
var i, n, tagName, $children, order = 0;
var options = settings_element.options;
var optionsMap = {};
var readData = function($el) {
var data = attr_data && $el.attr(attr_data);
if (typeof data === 'string' && data.length) {
return JSON.parse(data);
}
return null;
};
var addOption = function($option, group) {
$option = $($option);
var value = hash_key($option.val());
if (!value && !settings.allowEmptyOption) return;
// if the option already exists, it's probably been
// duplicated in another optgroup. in this case, push
// the current group to the "optgroup" property on the
// existing option so that it's rendered in both places.
if (optionsMap.hasOwnProperty(value)) {
if (group) {
var arr = optionsMap[value][field_optgroup];
if (!arr) {
optionsMap[value][field_optgroup] = group;
} else if (!$.isArray(arr)) {
optionsMap[value][field_optgroup] = [arr, group];
} else {
arr.push(group);
}
}
return;
}
var option = readData($option) || {};
option[field_label] = option[field_label] || $option.text();
option[field_value] = option[field_value] || value;
option[field_optgroup] = option[field_optgroup] || group;
optionsMap[value] = option;
options.push(option);
if ($option.is(':selected')) {
settings_element.items.push(value);
}
};
var addGroup = function($optgroup) {
var i, n, id, optgroup, $options;
$optgroup = $($optgroup);
id = $optgroup.attr('label');
if (id) {
optgroup = readData($optgroup) || {};
optgroup[field_optgroup_label] = id;
optgroup[field_optgroup_value] = id;
settings_element.optgroups.push(optgroup);
}
$options = $('option', $optgroup);
for (i = 0, n = $options.length; i < n; i++) {
addOption($options[i], id);
}
};
settings_element.maxItems = $input.attr('multiple') ? null : 1;
$children = $input.children();
for (i = 0, n = $children.length; i < n; i++) {
tagName = $children[i].tagName.toLowerCase();
if (tagName === 'optgroup') {
addGroup($children[i]);
} else if (tagName === 'option') {
addOption($children[i]);
}
}
};
return this.each(function() {
if (this.selectize) return;
var instance;
var $input = $(this);
var tag_name = this.tagName.toLowerCase();
var placeholder = $input.attr('placeholder') || $input.attr('data-placeholder');
if (!placeholder && !settings.allowEmptyOption) {
placeholder = $input.children('option[value=""]').text();
}
var settings_element = {
'placeholder' : placeholder,
'options' : [],
'optgroups' : [],
'items' : []
};
if (tag_name === 'select') {
init_select($input, settings_element);
} else {
init_textbox($input, settings_element);
}
instance = new Selectize($input, $.extend(true, {}, defaults, settings_element, settings_user));
});
};
$.fn.selectize.defaults = Selectize.defaults;
$.fn.selectize.support = {
validity: SUPPORTS_VALIDITY_API
};
Selectize.define('drag_drop', function(options) {
if (!$.fn.sortable) throw new Error('The "drag_drop" plugin requires jQuery UI "sortable".');
if (this.settings.mode !== 'multi') return;
var self = this;
self.lock = (function() {
var original = self.lock;
return function() {
var sortable = self.$control.data('sortable');
if (sortable) sortable.disable();
return original.apply(self, arguments);
};
})();
self.unlock = (function() {
var original = self.unlock;
return function() {
var sortable = self.$control.data('sortable');
if (sortable) sortable.enable();
return original.apply(self, arguments);
};
})();
self.setup = (function() {
var original = self.setup;
return function() {
original.apply(this, arguments);
var $control = self.$control.sortable({
items: '[data-value]',
forcePlaceholderSize: true,
disabled: self.isLocked,
start: function(e, ui) {
ui.placeholder.css('width', ui.helper.css('width'));
$control.css({overflow: 'visible'});
},
stop: function() {
$control.css({overflow: 'hidden'});
var active = self.$activeItems ? self.$activeItems.slice() : null;
var values = [];
$control.children('[data-value]').each(function() {
values.push($(this).attr('data-value'));
});
self.setValue(values);
self.setActiveItem(active);
}
});
};
})();
});
Selectize.define('dropdown_header', function(options) {
var self = this;
options = $.extend({
title : 'Untitled',
headerClass : 'selectize-dropdown-header',
titleRowClass : 'selectize-dropdown-header-title',
labelClass : 'selectize-dropdown-header-label',
closeClass : 'selectize-dropdown-header-close',
html: function(data) {
return (
'<div class="' + data.headerClass + '">' +
'<div class="' + data.titleRowClass + '">' +
'<span class="' + data.labelClass + '">' + data.title + '</span>' +
'<a href="javascript:void(0)" class="' + data.closeClass + '">×</a>' +
'</div>' +
'</div>'
);
}
}, options);
self.setup = (function() {
var original = self.setup;
return function() {
original.apply(self, arguments);
self.$dropdown_header = $(options.html(options));
self.$dropdown.prepend(self.$dropdown_header);
};
})();
});
Selectize.define('optgroup_columns', function(options) {
var self = this;
options = $.extend({
equalizeWidth : true,
equalizeHeight : true
}, options);
this.getAdjacentOption = function($option, direction) {
var $options = $option.closest('[data-group]').find('[data-selectable]');
var index = $options.index($option) + direction;
return index >= 0 && index < $options.length ? $options.eq(index) : $();
};
this.onKeyDown = (function() {
var original = self.onKeyDown;
return function(e) {
var index, $option, $options, $optgroup;
if (this.isOpen && (e.keyCode === KEY_LEFT || e.keyCode === KEY_RIGHT)) {
self.ignoreHover = true;
$optgroup = this.$activeOption.closest('[data-group]');
index = $optgroup.find('[data-selectable]').index(this.$activeOption);
if(e.keyCode === KEY_LEFT) {
$optgroup = $optgroup.prev('[data-group]');
} else {
$optgroup = $optgroup.next('[data-group]');
}
$options = $optgroup.find('[data-selectable]');
$option = $options.eq(Math.min($options.length - 1, index));
if ($option.length) {
this.setActiveOption($option);
}
return;
}
return original.apply(this, arguments);
};
})();
var getScrollbarWidth = function() {
var div;
var width = getScrollbarWidth.width;
var doc = document;
if (typeof width === 'undefined') {
div = doc.createElement('div');
div.innerHTML = '<div style="width:50px;height:50px;position:absolute;left:-50px;top:-50px;overflow:auto;"><div style="width:1px;height:100px;"></div></div>';
div = div.firstChild;
doc.body.appendChild(div);
width = getScrollbarWidth.width = div.offsetWidth - div.clientWidth;
doc.body.removeChild(div);
}
return width;
};
var equalizeSizes = function() {
var i, n, height_max, width, width_last, width_parent, $optgroups;
$optgroups = $('[data-group]', self.$dropdown_content);
n = $optgroups.length;
if (!n || !self.$dropdown_content.width()) return;
if (options.equalizeHeight) {
height_max = 0;
for (i = 0; i < n; i++) {
height_max = Math.max(height_max, $optgroups.eq(i).height());
}
$optgroups.css({height: height_max});
}
if (options.equalizeWidth) {
width_parent = self.$dropdown_content.innerWidth() - getScrollbarWidth();
width = Math.round(width_parent / n);
$optgroups.css({width: width});
if (n > 1) {
width_last = width_parent - width * (n - 1);
$optgroups.eq(n - 1).css({width: width_last});
}
}
};
if (options.equalizeHeight || options.equalizeWidth) {
hook.after(this, 'positionDropdown', equalizeSizes);
hook.after(this, 'refreshOptions', equalizeSizes);
}
});
Selectize.define('remove_button', function(options) {
options = $.extend({
label : '×',
title : 'Remove',
className : 'remove',
append : true
}, options);
var singleClose = function(thisRef, options) {
options.className = 'remove-single';
var self = thisRef;
var html = '<a href="javascript:void(0)" class="' + options.className + '" tabindex="-1" title="' + escape_html(options.title) + '">' + options.label + '</a>';
/**
* Appends an element as a child (with raw HTML).
*
* @param {string} html_container
* @param {string} html_element
* @return {string}
*/
var append = function(html_container, html_element) {
return html_container + html_element;
};
thisRef.setup = (function() {
var original = self.setup;
return function() {
// override the item rendering method to add the button to each
if (options.append) {
var id = $(self.$input.context).attr('id');
var selectizer = $('#'+id);
var render_item = self.settings.render.item;
self.settings.render.item = function(data) {
return append(render_item.apply(thisRef, arguments), html);
};
}
original.apply(thisRef, arguments);
// add event listener
thisRef.$control.on('click', '.' + options.className, function(e) {
e.preventDefault();
if (self.isLocked) return;
self.clear();
});
};
})();
};
var multiClose = function(thisRef, options) {
var self = thisRef;
var html = '<a href="javascript:void(0)" class="' + options.className + '" tabindex="-1" title="' + escape_html(options.title) + '">' + options.label + '</a>';
/**
* Appends an element as a child (with raw HTML).
*
* @param {string} html_container
* @param {string} html_element
* @return {string}
*/
var append = function(html_container, html_element) {
var pos = html_container.search(/(<\/[^>]+>\s*)$/);
return html_container.substring(0, pos) + html_element + html_container.substring(pos);
};
thisRef.setup = (function() {
var original = self.setup;
return function() {
// override the item rendering method to add the button to each
if (options.append) {
var render_item = self.settings.render.item;
self.settings.render.item = function(data) {
return append(render_item.apply(thisRef, arguments), html);
};
}
original.apply(thisRef, arguments);
// add event listener
thisRef.$control.on('click', '.' + options.className, function(e) {
e.preventDefault();
if (self.isLocked) return;
var $item = $(e.currentTarget).parent();
self.setActiveItem($item);
if (self.deleteSelection()) {
self.setCaret(self.items.length);
}
});
};
})();
};
if (this.settings.mode === 'single') {
singleClose(this, options);
return;
} else {
multiClose(this, options);
}
});
Selectize.define('restore_on_backspace', function(options) {
var self = this;
options.text = options.text || function(option) {
return option[this.settings.labelField];
};
this.onKeyDown = (function() {
var original = self.onKeyDown;
return function(e) {
var index, option;
if (e.keyCode === KEY_BACKSPACE && this.$control_input.val() === '' && !this.$activeItems.length) {
index = this.caretPos - 1;
if (index >= 0 && index < this.items.length) {
option = this.options[this.items[index]];
if (this.deleteSelection(e)) {
this.setTextboxValue(options.text.apply(this, [option]));
this.refreshOptions(true);
}
e.preventDefault();
return;
}
}
return original.apply(this, arguments);
};
})();
});
return Selectize;
})); | PypiClean |
/OASYS1-WOFRY-1.0.41.tar.gz/OASYS1-WOFRY-1.0.41/orangecontrib/wofry/util/wofry_util.py | import numpy, decimal
from PyQt5.QtGui import QFont, QPalette, QColor
from PyQt5.QtWidgets import QWidget, QGridLayout, QLabel, QDialog, QVBoxLayout, QDialogButtonBox
from matplotlib.patches import FancyArrowPatch, ArrowStyle
from oasys.widgets import gui
from oasys.util.oasys_util import get_sigma, get_fwhm
from silx.gui.plot.ImageView import ImageView, PlotWindow
class InfoBoxWidget(QWidget):
total_field = ""
fwhm_h_field = ""
fwhm_v_field = ""
sigma_h_field = ""
sigma_v_field = ""
def __init__(self, x_scale_factor = 1.0, y_scale_factor = 1.0, is_2d=True):
super(InfoBoxWidget, self).__init__()
info_box_inner= gui.widgetBox(self, "Info")
info_box_inner.setFixedHeight(515*y_scale_factor)
info_box_inner.setFixedWidth(230*x_scale_factor)
self.total = gui.lineEdit(info_box_inner, self, "total_field", "Total", tooltip="Total", labelWidth=115, valueType=str, orientation="horizontal")
label_box_1 = gui.widgetBox(info_box_inner, "", addSpace=False, orientation="horizontal")
self.label_h = QLabel("FWHM ")
self.label_h.setFixedWidth(115)
palette = QPalette(self.label_h.palette())
palette.setColor(QPalette.Foreground, QColor('blue'))
self.label_h.setPalette(palette)
label_box_1.layout().addWidget(self.label_h)
self.fwhm_h = gui.lineEdit(label_box_1, self, "fwhm_h_field", "", tooltip="FWHM", labelWidth=115, valueType=str, orientation="horizontal")
if is_2d:
label_box_2 = gui.widgetBox(info_box_inner, "", addSpace=False, orientation="horizontal")
self.label_v = QLabel("FWHM ")
self.label_v.setFixedWidth(115)
palette = QPalette(self.label_h.palette())
palette.setColor(QPalette.Foreground, QColor('red'))
self.label_v.setPalette(palette)
label_box_2.layout().addWidget(self.label_v)
self.fwhm_v = gui.lineEdit(label_box_2, self, "fwhm_v_field", "", tooltip="FWHM", labelWidth=115, valueType=str, orientation="horizontal")
label_box_1 = gui.widgetBox(info_box_inner, "", addSpace=False, orientation="horizontal")
self.label_s_h = QLabel("\u03c3 ")
self.label_s_h.setFixedWidth(115)
palette = QPalette(self.label_s_h.palette())
palette.setColor(QPalette.Foreground, QColor('blue'))
self.label_s_h.setPalette(palette)
label_box_1.layout().addWidget(self.label_s_h)
self.sigma_h = gui.lineEdit(label_box_1, self, "sigma_h_field", "", tooltip="Sigma", labelWidth=115, valueType=str, orientation="horizontal")
if is_2d:
label_box_2 = gui.widgetBox(info_box_inner, "", addSpace=False, orientation="horizontal")
self.label_s_v = QLabel("\u03c3 ")
self.label_s_v.setFixedWidth(115)
palette = QPalette(self.label_s_v.palette())
palette.setColor(QPalette.Foreground, QColor('red'))
self.label_s_v.setPalette(palette)
label_box_2.layout().addWidget(self.label_s_v)
self.sigma_v = gui.lineEdit(label_box_2, self, "sigma_v_field", "", tooltip="Sigma", labelWidth=115, valueType=str, orientation="horizontal")
self.total.setReadOnly(True)
font = QFont(self.total.font())
font.setBold(True)
self.total.setFont(font)
palette = QPalette(self.total.palette())
palette.setColor(QPalette.Text, QColor('dark blue'))
palette.setColor(QPalette.Base, QColor(243, 240, 160))
self.total.setPalette(palette)
self.fwhm_h.setReadOnly(True)
font = QFont(self.fwhm_h.font())
font.setBold(True)
self.fwhm_h.setFont(font)
palette = QPalette(self.fwhm_h.palette())
palette.setColor(QPalette.Text, QColor('dark blue'))
palette.setColor(QPalette.Base, QColor(243, 240, 160))
self.fwhm_h.setPalette(palette)
self.sigma_h.setReadOnly(True)
font = QFont(self.sigma_h.font())
font.setBold(True)
self.sigma_h.setFont(font)
palette = QPalette(self.sigma_h.palette())
palette.setColor(QPalette.Text, QColor('dark blue'))
palette.setColor(QPalette.Base, QColor(243, 240, 160))
self.sigma_h.setPalette(palette)
if is_2d:
self.fwhm_v.setReadOnly(True)
font = QFont(self.fwhm_v.font())
font.setBold(True)
self.fwhm_v.setFont(font)
palette = QPalette(self.fwhm_v.palette())
palette.setColor(QPalette.Text, QColor('dark blue'))
palette.setColor(QPalette.Base, QColor(243, 240, 160))
self.fwhm_v.setPalette(palette)
self.sigma_v.setReadOnly(True)
font = QFont(self.sigma_v.font())
font.setBold(True)
self.sigma_v.setFont(font)
palette = QPalette(self.sigma_v.palette())
palette.setColor(QPalette.Text, QColor('dark blue'))
palette.setColor(QPalette.Base, QColor(243, 240, 160))
self.sigma_v.setPalette(palette)
def clear(self):
self.total.setText("0.0")
self.fwhm_h.setText("0.0000")
if hasattr(self, "fwhm_v"): self.fwhm_v.setText("0.0000")
self.sigma_h.setText("0.0000")
if hasattr(self, "sigma_v"): self.sigma_v.setText("0.0000")
class ImageViewWithFWHM(QWidget):
def __init__(self, x_scale_factor = 1.0, y_scale_factor = 1.0):
super(ImageViewWithFWHM, self).__init__()
self.plot_canvas = ImageView(parent=self)
self.set_plot_canvas_default_settings()
self.info_box = InfoBoxWidget(x_scale_factor, y_scale_factor)
layout = QGridLayout()
layout.addWidget(self.info_box, 0, 1, 1, 1)
layout.addWidget(self.plot_canvas, 0, 0, 1, 1)
layout.setColumnMinimumWidth(0, 600*x_scale_factor)
layout.setColumnMinimumWidth(1, 230*x_scale_factor)
self.setLayout(layout)
def get_ImageView(self):
return self.plot_canvas
def get_InfoBoxWidhet(self):
return self.info_box
def set_plot_canvas_default_settings(self):
self.get_ImageView().resetZoom()
self.get_ImageView().setXAxisAutoScale(True)
self.get_ImageView().setYAxisAutoScale(True)
self.get_ImageView().setGraphGrid(False)
self.get_ImageView().setKeepDataAspectRatio(True)
self.get_ImageView().yAxisInvertedAction.setVisible(False)
self.get_ImageView().setXAxisLogarithmic(False)
self.get_ImageView().setYAxisLogarithmic(False)
self.get_ImageView().getMaskAction().setVisible(False)
self.get_ImageView().getRoiAction().setVisible(False)
self.get_ImageView().getColormapAction().setVisible(True)
self.get_ImageView().setKeepDataAspectRatio(False)
def plot_2D(self, histogram,xx=None,yy=None,
title="", xtitle="", ytitle="", xum="[mm]", yum="[mm]",
plotting_range=None,factor1=1.0,factor2=1.0,colormap=None):
if xx is None:
xx = numpy.arange(histogram.shape[0])
if yy is None:
yy = numpy.arange(histogram.shape[1])
if plotting_range == None:
nbins_h = xx.size
nbins_v = yy.size
else:
range_x = numpy.where(numpy.logical_and(xx>=plotting_range[0], xx<=plotting_range[1]))
range_y = numpy.where(numpy.logical_and(yy>=plotting_range[2], yy<=plotting_range[3]))
xx = xx[range_x]
yy = yy[range_y]
nbins_h = xx.size
nbins_v = yy.size
if len(xx) == 0 or len(yy) == 0:
raise Exception("Nothing to plot in the given range")
xmin, xmax = xx.min(), xx.max()
ymin, ymax = yy.min(), yy.max()
origin = (xmin*factor1, ymin*factor2)
scale = (abs((xmax-xmin)/nbins_h)*factor1, abs((ymax-ymin)/nbins_v)*factor2)
# silx inverts axis!!!! histogram must be calculated reversed
data_to_plot = []
for y_index in range(0, nbins_v):
x_values = []
for x_index in range(0, nbins_h):
x_values.append(histogram[x_index][y_index])
data_to_plot.append(x_values)
data_to_plot = numpy.array(data_to_plot)
histogram_h = numpy.sum(data_to_plot, axis=0) # data to plot axis are inverted
histogram_v = numpy.sum(data_to_plot, axis=1)
ticket = {}
ticket['total'] = numpy.sum(data_to_plot)
ticket['fwhm_h'], ticket['fwhm_quote_h'], ticket['fwhm_coordinates_h'] = get_fwhm(histogram_h, xx)
ticket['sigma_h'] = get_sigma(histogram_h, xx)
ticket['fwhm_v'], ticket['fwhm_quote_v'], ticket['fwhm_coordinates_v'] = get_fwhm(histogram_v, yy)
ticket['sigma_v'] = get_sigma(histogram_v, yy)
self.plot_canvas.setColormap(colormap=colormap)
self.plot_canvas.setImage(data_to_plot, origin=origin, scale=scale)
self.plot_canvas.setGraphXLabel(xtitle)
self.plot_canvas.setGraphYLabel(ytitle)
self.plot_canvas.setGraphTitle(title)
self.plot_canvas._histoHPlot.setGraphYLabel('Counts')
self.plot_canvas._histoHPlot._backend.ax.xaxis.get_label().set_color('white')
self.plot_canvas._histoHPlot._backend.ax.xaxis.get_label().set_fontsize(1)
for label in self.plot_canvas._histoHPlot._backend.ax.xaxis.get_ticklabels():
label.set_color('white')
label.set_fontsize(1)
self.plot_canvas._histoVPlot.setGraphXLabel('Counts')
self.plot_canvas._histoVPlot._backend.ax.yaxis.get_label().set_color('white')
self.plot_canvas._histoVPlot._backend.ax.yaxis.get_label().set_fontsize(1)
for label in self.plot_canvas._histoVPlot._backend.ax.yaxis.get_ticklabels():
label.set_color('white')
label.set_fontsize(1)
n_patches = len(self.plot_canvas._histoHPlot._backend.ax.patches)
if (n_patches > 0): self.plot_canvas._histoHPlot._backend.ax.patches.remove(self.plot_canvas._histoHPlot._backend.ax.patches[n_patches-1])
if not ticket['fwhm_h'] == 0.0:
x_fwhm_i, x_fwhm_f = ticket['fwhm_coordinates_h']
x_fwhm_i, x_fwhm_f = x_fwhm_i*factor1, x_fwhm_f*factor1
y_fwhm = ticket['fwhm_quote_h']
self.plot_canvas._histoHPlot._backend.ax.add_patch(FancyArrowPatch([x_fwhm_i, y_fwhm],
[x_fwhm_f, y_fwhm],
arrowstyle=ArrowStyle.CurveAB(head_width=2, head_length=4),
color='b',
linewidth=1.5))
n_patches = len(self.plot_canvas._histoVPlot._backend.ax.patches)
if (n_patches > 0): self.plot_canvas._histoVPlot._backend.ax.patches.remove(self.plot_canvas._histoVPlot._backend.ax.patches[n_patches-1])
if not ticket['fwhm_v'] == 0.0:
y_fwhm_i, y_fwhm_f = ticket['fwhm_coordinates_v']
y_fwhm_i, y_fwhm_f = y_fwhm_i*factor2, y_fwhm_f*factor2
x_fwhm = ticket['fwhm_quote_v']
self.plot_canvas._histoVPlot._backend.ax.add_patch(FancyArrowPatch([x_fwhm, y_fwhm_i],
[x_fwhm, y_fwhm_f],
arrowstyle=ArrowStyle.CurveAB(head_width=2, head_length=4),
color='r',
linewidth=1.5))
self.plot_canvas._histoHPlot.replot()
self.plot_canvas._histoVPlot.replot()
self.plot_canvas.replot()
self.info_box.total.setText("{:.3e}".format(decimal.Decimal(ticket['total'])))
self.info_box.fwhm_h.setText("{:5.4f}".format(ticket['fwhm_h'] * factor1))
self.info_box.fwhm_v.setText("{:5.4f}".format(ticket['fwhm_v'] * factor2))
self.info_box.label_h.setText("FWHM " + xum)
self.info_box.label_v.setText("FWHM " + yum)
self.info_box.sigma_h.setText("{:5.4f}".format(ticket['sigma_h']*factor1))
self.info_box.sigma_v.setText("{:5.4f}".format(ticket['sigma_v']*factor2))
self.info_box.label_s_h.setText("\u03c3 " + xum)
self.info_box.label_s_v.setText("\u03c3 " + yum)
def clear(self):
self.plot_canvas.clear()
self.plot_canvas._histoHPlot.clear()
self.plot_canvas._histoVPlot.clear()
self.plot_canvas._histoHPlot._backend.ax.xaxis.get_label().set_color('white')
self.plot_canvas._histoHPlot._backend.ax.xaxis.get_label().set_fontsize(1)
for label in self.plot_canvas._histoHPlot._backend.ax.xaxis.get_ticklabels():
label.set_color('white')
label.set_fontsize(1)
self.plot_canvas._histoVPlot._backend.ax.yaxis.get_label().set_color('white')
self.plot_canvas._histoVPlot._backend.ax.yaxis.get_label().set_fontsize(1)
for label in self.plot_canvas._histoVPlot._backend.ax.yaxis.get_ticklabels():
label.set_color('white')
label.set_fontsize(1)
self.plot_canvas._histoHPlot.setGraphYLabel('')
self.plot_canvas._histoVPlot.setGraphXLabel('')
self.plot_canvas._histoHPlot.replot()
self.plot_canvas._histoVPlot.replot()
self.info_box.clear()
if __name__=="__main__":
from wofry.propagator.wavefront2D.generic_wavefront import GenericWavefront2D
w = GenericWavefront2D.initialize_wavefront_from_range(-0.002,0.002,-0.001,0.001,(200,200))
w.set_gaussian(0.00055,0.0002)
from PyQt5.QtWidgets import QApplication
app = QApplication([])
widget = QWidget()
layout = QVBoxLayout()
oo = ImageViewWithFWHM()
oo.plot_2D(w.get_intensity(),w.get_coordinate_x(),w.get_coordinate_y(),factor1=1e6,factor2=1e6,
title="Gaussian wavefront",xtitle="X / um", ytitle="Y / um",
colormap={"name":"temperature", "normalization":"linear", "autoscale":True, "vmin":0, "vmax":0, "colors":256})
layout.addWidget(oo)
widget.setLayout(layout)
widget.show()
# oo.clear()
app.exec_() | PypiClean |
/ImSwitchUC2-2.1.0.tar.gz/ImSwitchUC2-2.1.0/imswitch/imcontrol/model/managers/positioners/PositionerManager.py | from abc import ABC, abstractmethod
from typing import Dict, List
class PositionerManager(ABC):
""" Abstract base class for managers that control positioners. Each type of
positioner corresponds to a manager derived from this class. """
@abstractmethod
def __init__(self, positionerInfo, name: str, initialPosition: Dict[str, float]):
"""
Args:
positionerInfo: See setup file documentation.
name: The unique name that the device is identified with in the
setup file.
initialPosition: The initial position for each axis. This is a dict
in the format ``{ axis: position }``.
"""
self._positionerInfo = positionerInfo
self._position = initialPosition
initialSpeed={
axis: 1000 for axis in positionerInfo.axes # TODO: Hardcoded - hsould be updated according to JSon?
}
self._speed = initialSpeed
# seetings for homign the axis
initialHome={
axis: False for axis in positionerInfo.axes # TODO: Hardcoded - hsould be updated according to JSon?
}
self._home = initialHome # is homed?
# settings for stopping the axis
initialStop={
axis: False for axis in positionerInfo.axes # TODO: Hardcoded - hsould be updated according to JSon?
}
self._stop = initialStop # is stopped?
self.__name = name
self.__axes = positionerInfo.axes
self.__forPositioning = positionerInfo.forPositioning
self.__forScanning = positionerInfo.forScanning
self.__resetOnClose = positionerInfo.resetOnClose
if not positionerInfo.forPositioning and not positionerInfo.forScanning:
raise ValueError('At least one of forPositioning and forScanning must be set in'
' PositionerInfo.')
@property
def name(self) -> str:
""" Unique positioner name, defined in the positioner's setup info. """
return self.__name
@property
def position(self) -> Dict[str, float]:
""" The position of each axis. This is a dict in the format
``{ axis: position }``. """
return self._position
@property
def speed(self) -> Dict[str, float]:
""" The speed of each axis. This is a dict in the format
``{ axis: position }``. """
return self._speed
@property
def home(self) -> Dict[str, bool]:
""" The home of each axis. This is a dict in the format
``{ axis: homed }``. """
return self._home
@property
def stop(self) -> Dict[str, bool]:
""" The stop of each axis. This is a dict in the format
``{ axis: stopped }``. """
return self._stop
@property
def axes(self) -> List[str]:
""" The list of axes that are controlled by this positioner. """
return self.__axes
@property
def forPositioning(self) -> bool:
""" Whether the positioner is used for manual positioning. """
return self.__forPositioning
@property
def forScanning(self) -> bool:
""" Whether the positioner is used for scanning. """
return self.__forScanning
@property
def resetOnClose(self) -> bool:
""" Whether the positioner should be reset to 0-position upon closing. """
return self.__resetOnClose
@abstractmethod
def move(self, dist: float, axis: str):
""" Moves the positioner by the specified distance and returns the new
position. Derived classes will update the position field manually. If
the positioner controls multiple axes, the axis must be specified. """
pass
@abstractmethod
def setPosition(self, position: float, axis: str):
""" Adjusts the positioner to the specified position and returns the
new position. Derived classes will update the position field manually.
If the positioner controls multiple axes, the axis must be specified.
"""
pass
def finalize(self) -> None:
""" Close/cleanup positioner. """
pass
# Copyright (C) 2020-2021 ImSwitch developers
# This file is part of ImSwitch.
#
# ImSwitch is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ImSwitch is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>. | PypiClean |
/ConcurrentLogHandler-0.9.1.tar.gz/ConcurrentLogHandler-0.9.1/src/portalocker.py | __all__ = [
"lock",
"unlock",
"LOCK_EX",
"LOCK_SH",
"LOCK_NB",
"LockException",
]
import os
class LockException(Exception):
# Error codes:
LOCK_FAILED = 1
if os.name == 'nt':
import win32con
import win32file
import pywintypes
LOCK_EX = win32con.LOCKFILE_EXCLUSIVE_LOCK
LOCK_SH = 0 # the default
LOCK_NB = win32con.LOCKFILE_FAIL_IMMEDIATELY
# is there any reason not to reuse the following structure?
__overlapped = pywintypes.OVERLAPPED()
elif os.name == 'posix':
import fcntl
LOCK_EX = fcntl.LOCK_EX
LOCK_SH = fcntl.LOCK_SH
LOCK_NB = fcntl.LOCK_NB
else:
raise RuntimeError("PortaLocker only defined for nt and posix platforms")
if os.name == 'nt':
def lock(file, flags):
hfile = win32file._get_osfhandle(file.fileno())
try:
win32file.LockFileEx(hfile, flags, 0, -0x10000, __overlapped)
except pywintypes.error, exc_value:
# error: (33, 'LockFileEx', 'The process cannot access the file because another process has locked a portion of the file.')
if exc_value[0] == 33:
raise LockException(LockException.LOCK_FAILED, exc_value[2])
else:
# Q: Are there exceptions/codes we should be dealing with here?
raise
def unlock(file):
hfile = win32file._get_osfhandle(file.fileno())
try:
win32file.UnlockFileEx(hfile, 0, -0x10000, __overlapped)
except pywintypes.error, exc_value:
if exc_value[0] == 158:
# error: (158, 'UnlockFileEx', 'The segment is already unlocked.')
# To match the 'posix' implementation, silently ignore this error
pass
else:
# Q: Are there exceptions/codes we should be dealing with here?
raise
elif os.name == 'posix':
def lock(file, flags):
try:
fcntl.flock(file.fileno(), flags)
except IOError, exc_value:
# The exception code varies on different systems so we'll catch
# every IO error
raise LockException(*exc_value)
def unlock(file):
fcntl.flock(file.fileno(), fcntl.LOCK_UN)
if __name__ == '__main__':
from time import time, strftime, localtime
import sys
import portalocker
log = open('log.txt', "a+")
portalocker.lock(log, portalocker.LOCK_EX)
timestamp = strftime("%m/%d/%Y %H:%M:%S\n", localtime(time()))
log.write( timestamp )
print "Wrote lines. Hit enter to release lock."
dummy = sys.stdin.readline()
log.close() | PypiClean |
/Gnosis_Utils-1.2.2.tar.gz/Gnosis_Utils-1.2.2/gnosis/xml/relax/rnctree.py | from __future__ import generators
import sys
from rnc_tokenize import token_list
class ParseError(SyntaxError): pass
for t in """
ANY SOME MAYBE ONE BODY ANNOTATION ELEM ATTR GROUP LITERAL
NAME COMMENT TEXT EMPTY INTERLEAVE CHOICE SEQ ROOT
DEFAULT_NS NS DATATYPES DATATAG PATTERN START DEFINE
""".split(): globals()[t] = t
PAIRS = {'BEG_BODY':('END_BODY',BODY),
'BEG_PAREN':('END_PAREN',GROUP),
'BEG_ANNO':('END_ANNO',ANNOTATION)}
TAGS = { ONE: 'group',
SOME: 'oneOrMore',
MAYBE: 'optional',
ANY: 'zeroOrMore'}
DEFAULT_NAMESPACE = None
DATATYPE_LIB = [0, '"http://www.w3.org/2001/XMLSchema-datatypes"']
OTHER_NAMESPACE = {}
CONTEXT_FREE = 0
try: enumerate
except: enumerate = lambda seq: zip(range(len(seq)),seq)
nodetypes = lambda nl: tuple(map(lambda n: n.type, nl))
toNodes = lambda toks: map(lambda t: Node(t.type, t.value), toks)
class Node(object):
__slots__ = ('type','value','name','quant')
def __iter__(self): yield self
__len__ = lambda self: 1
def __init__(self, type='', value=[], name=None, quant=ONE):
self.type = type
self.value = value
self.name = name
self.quant = quant
def format(self, indent=0):
out = [' '*indent+repr(self)]
write = out.append
if isinstance(self.value, str):
if self.type==COMMENT:
write(' '*(1+indent)+self.value)
else:
for node in self.value:
write(node.format(indent+1))
return '\n'.join(out)
def prettyprint(self):
print self.format()
def toxml(self):
if CONTEXT_FREE:
out = []
write = out.append
write('<?xml version="1.0" encoding="UTF-8"?>')
write('<grammar>')
self.type = None
write(self.xmlnode(1))
write('</grammar>')
return self.add_ns('\n'.join(out))
else:
return self.add_ns(self.xmlnode())
def xmlnode(self, indent=0):
out = []
write = out.append
if self.type == ROOT:
write('<?xml version="1.0" encoding="UTF-8"?>')
for x in self.value:
if not isinstance(x, Node):
raise TypeError, "Unhappy Node.value: "+repr(x)
elif x.type == START:
startelem = '<start><ref name="%s"/></start>' % x.value
write(' '*indent+startelem)
elif x.type == DEFINE:
write(' '*indent+'<define name="%s">' % x.name)
write(x.xmlnode(indent+1))
write(' '*indent+'</define>')
elif x.type == NAME:
write(' '*indent+ '<ref name="%s"/>' % x.value)
elif x.type == COMMENT:
write(' '*indent+'<!-- %s -->' % x.value)
elif x.type == LITERAL:
write(' '*indent+'<value>%s</value>' % x.value)
elif x.type == ANNOTATION:
write(' '*indent+\
'<a:documentation>%s</a:documentation>' % x.value)
elif x.type == INTERLEAVE:
write(' '*indent+'<interleave>')
write(x.xmlnode(indent+1))
write(' '*indent+'</interleave>')
elif x.type == CHOICE:
write(' '*indent+'<choice>')
write(x.xmlnode(indent+1))
write(' '*indent+'</choice>')
elif x.type == GROUP:
write(x.xmlnode(indent))
elif x.type == TEXT:
write(' '*indent+'<text/>')
elif x.type == EMPTY:
write(' '*indent+'<empty/>')
elif x.type == DATATAG:
DATATYPE_LIB[0] = 1 # Use datatypes
if x.name is None: # no paramaters
write(' '*indent+'<data type="%s"/>' % x.value)
else:
write(' '*indent+'<data type="%s">' % x.name)
p = '<param name="pattern">%s</param>' % x.value
write(' '*(indent+1)+p)
write(' '*indent+'</data>')
elif x.type == ELEM:
if x.quant == ONE:
write(' '*indent+'<element name="%s">' % x.name)
write(x.xmlnode(indent+1))
write(' '*indent+'</element>')
else:
write(' '*indent+'<%s>' % TAGS[x.quant])
write(' '*(indent+1)+'<element name="%s">' % x.name)
write(x.xmlnode(indent+2))
write(' '*(indent+1)+'</element>')
write(' '*indent+'</%s>' % TAGS[x.quant])
elif x.type == ATTR:
if x.value[0].type == TEXT:
write(' '*indent+'<attribute name="%s"/>' % x.name)
elif x.value[0].type == EMPTY:
write(' '*indent+'<attribute name="%s">' % x.name)
write(' '*(indent+1)+'<empty/>')
write(' '*indent+'</attribute>')
return '\n'.join(out)
def __repr__(self):
return "Node(%s,%s,%s)[%d]" % (self.type, self.name,
self.quant, len(self.value))
def add_ns(self, xml):
"Add namespace attributes to top level element"
lines = xml.split('\n')
self.nest_annotations(lines) # annots not allowed before root elem
for i, line in enumerate(lines):
ltpos = line.find('<')
if ltpos >= 0 and line[ltpos+1] not in ('!','?'):
# We've got an element tag, not PI or comment
new = line[:line.find('>')]
new += ' xmlns="http://relaxng.org/ns/structure/1.0"'
if DEFAULT_NAMESPACE is not None:
new += '\n ns=%s' % DEFAULT_NAMESPACE
if DATATYPE_LIB[0]:
new += '\n datatypeLibrary=%s' % DATATYPE_LIB[1]
for ns, url in OTHER_NAMESPACE.items():
new += '\n xmlns:%s=%s' % (ns, url)
new += '>'
lines[i] = new
break
return '\n'.join(lines)
def nest_annotations(self, lines):
"Nest any top annotation within first element"
top_annotations = []
for i, line in enumerate(lines[:]):
if line.find('<a:') >= 0:
top_annotations.append(line)
del lines[i]
else:
ltpos = line.find('<')
if ltpos >= 0 and line[ltpos+1] not in ('!','?'):
break
for line in top_annotations:
lines.insert(i, ' '+line)
def findmatch(beg, nodes, offset):
level = 1
end = PAIRS[beg][0]
for i,t in enumerate(nodes[offset:]):
if t.type == beg: level += 1
elif t.type == end: level -= 1
if level == 0:
return i+offset
raise EOFError, ("No closing token encountered for %s @ %d"
% (beg,offset))
def match_pairs(nodes):
newnodes = []
i = 0
while 1:
if i >= len(nodes): break
node = nodes[i]
if node.type in PAIRS.keys():
# Look for enclosing brackets
match = findmatch(node.type, nodes, i+1)
matchtype = PAIRS[node.type][1]
node = Node(type=matchtype, value=nodes[i+1:match])
node.value = match_pairs(node.value)
newnodes.append(node)
i = match+1
else:
newnodes.append(node)
i += 1
if i >= len(nodes): break
if nodes[i].type in (ANY, SOME, MAYBE):
newnodes[-1].quant = nodes[i].type
i += 1
nodes[:] = newnodes
return nodes
def type_bodies(nodes):
newnodes = []
i = 0
while 1:
if i >= len(nodes): break
if nodetypes(nodes[i:i+3]) == (ELEM, NAME, BODY) or \
nodetypes(nodes[i:i+3]) == (ATTR, NAME, BODY):
name, body = nodes[i+1].value, nodes[i+2]
value, quant = type_bodies(body.value), body.quant
node = Node(nodes[i].type, value, name, quant)
newnodes.append(node)
i += 3
elif nodetypes(nodes[i:i+2]) == (DATATAG, PATTERN):
node = Node(DATATAG, nodes[i+1].value, nodes[i].value)
newnodes.append(node)
i += 2
elif nodes[i] == DEFINE:
print nodes[i:]
else:
if nodes[i].type == GROUP: # Recurse into groups
value = type_bodies(nodes[i].value)
nodes[i] = Node(GROUP, value, None, nodes[i].quant)
newnodes.append(nodes[i])
i += 1
nodes[:] = newnodes
return nodes
def nest_defines(nodes):
"Attach groups to named patterns"
newnodes = []
i = 0
while 1:
if i >= len(nodes): break
node = nodes[i]
newnodes.append(node)
if node.type == DEFINE:
group = []
while (i+1) < len(nodes) and nodes[i+1].type <> DEFINE:
group.append(nodes[i+1])
i += 1
node.name = node.value
node.value = Node(GROUP, group)
i += 1
nodes[:] = newnodes
return nodes
def intersperse(nodes):
"Look for interleaved, choice, or sequential nodes in groups/bodies"
for node in nodes:
if node.type in (ELEM, ATTR, GROUP, LITERAL):
val = node.value
ntypes = [n.type for n in val if not isinstance(val,str)]
inters = [t for t in ntypes if t in (INTERLEAVE,CHOICE,SEQ)]
inters = dict(zip(inters,[0]*len(inters)))
if len(inters) > 1:
raise ParseError, "Ambiguity in sequencing: %s" % node
if len(inters) > 0:
intertype = inters.keys()[0]
items = []
for pat in node.value:
if pat.type <> intertype:
items.append(pat)
node.value = Node(intertype, items)
if not isinstance(node.value, str): # No recurse to terminal str
intersperse(node.value)
return nodes
def scan_NS(nodes):
"Look for any namespace configuration lines"
global DEFAULT_NAMESPACE, OTHER_NAMESPACE, CONTEXT_FREE
for node in nodes:
if node.type == DEFAULT_NS:
DEFAULT_NAMESPACE = node.value
elif node.type == NS:
ns, url = map(str.strip, node.value.split('='))
OTHER_NAMESPACE[ns] = url
elif node.type == ANNOTATION and not OTHER_NAMESPACE.has_key('a'):
OTHER_NAMESPACE['a'] =\
'"http://relaxng.org/ns/compatibility/annotations/1.0"'
elif node.type == DATATYPES:
DATATYPE_LIB[:] = [1, node.value]
elif node.type == START:
CONTEXT_FREE = 1
def make_nodetree(tokens):
nodes = toNodes(tokens)
match_pairs(nodes)
type_bodies(nodes)
nest_defines(nodes)
intersperse(nodes)
scan_NS(nodes)
root = Node(ROOT, nodes)
return root
if __name__=='__main__':
make_nodetree(token_list(sys.stdin.read())).prettyprint() | PypiClean |
/Nuitka_winsvc-1.7.10-cp310-cp310-win_amd64.whl/nuitka/code_generation/AsyncgenCodes.py | from .CodeHelpers import generateStatementSequenceCode
from .Emission import SourceCodeCollector
from .FunctionCodes import (
finalizeFunctionLocalVariables,
getClosureCopyCode,
getFunctionCreationArgs,
getFunctionQualnameObj,
setupFunctionLocalVariables,
)
from .Indentation import indented
from .ModuleCodes import getModuleAccessCode
from .templates.CodeTemplatesAsyncgens import (
template_asyncgen_exception_exit,
template_asyncgen_noexception_exit,
template_asyncgen_object_body,
template_asyncgen_object_maker_template,
template_asyncgen_return_exit,
template_make_asyncgen,
)
from .YieldCodes import getYieldReturnDispatchCode
def _getAsyncgenMakerIdentifier(function_identifier):
return "MAKE_ASYNCGEN_" + function_identifier
def getAsyncgenObjectDeclCode(function_identifier, closure_variables):
asyncgen_creation_args = getFunctionCreationArgs(
defaults_name=None,
kw_defaults_name=None,
annotations_name=None,
closure_variables=closure_variables,
)
return template_asyncgen_object_maker_template % {
"asyncgen_maker_identifier": _getAsyncgenMakerIdentifier(function_identifier),
"asyncgen_creation_args": ", ".join(asyncgen_creation_args),
}
def getAsyncgenObjectCode(
context,
function_identifier,
closure_variables,
user_variables,
outline_variables,
temp_variables,
needs_exception_exit,
needs_generator_return,
):
# A bit of details going on here, pylint: disable=too-many-locals
setupFunctionLocalVariables(
context=context,
parameters=None,
closure_variables=closure_variables,
user_variables=user_variables + outline_variables,
temp_variables=temp_variables,
)
function_codes = SourceCodeCollector()
asyncgen_object_body = context.getOwner()
generateStatementSequenceCode(
statement_sequence=asyncgen_object_body.subnode_body,
allow_none=True,
emit=function_codes,
context=context,
)
function_cleanup = finalizeFunctionLocalVariables(context)
if needs_exception_exit:
(
exception_type,
exception_value,
exception_tb,
_exception_lineno,
) = context.variable_storage.getExceptionVariableDescriptions()
generator_exit = template_asyncgen_exception_exit % {
"function_cleanup": indented(function_cleanup),
"exception_type": exception_type,
"exception_value": exception_value,
"exception_tb": exception_tb,
}
else:
generator_exit = template_asyncgen_noexception_exit % {
"function_cleanup": indented(function_cleanup)
}
if needs_generator_return:
generator_exit += template_asyncgen_return_exit % {}
function_locals = context.variable_storage.makeCFunctionLevelDeclarations()
local_type_decl = context.variable_storage.makeCStructLevelDeclarations()
function_locals += context.variable_storage.makeCStructInits()
if local_type_decl:
heap_declaration = """\
struct %(function_identifier)s_locals *asyncgen_heap = \
(struct %(function_identifier)s_locals *)asyncgen->m_heap_storage;""" % {
"function_identifier": function_identifier
}
else:
heap_declaration = ""
asyncgen_creation_args = getFunctionCreationArgs(
defaults_name=None,
kw_defaults_name=None,
annotations_name=None,
closure_variables=closure_variables,
)
return template_asyncgen_object_body % {
"function_identifier": function_identifier,
"function_body": indented(function_codes.codes),
"heap_declaration": indented(heap_declaration),
"has_heap_declaration": 1 if heap_declaration != "" else 0,
"function_local_types": indented(local_type_decl),
"function_var_inits": indented(function_locals),
"function_dispatch": indented(getYieldReturnDispatchCode(context)),
"asyncgen_maker_identifier": _getAsyncgenMakerIdentifier(function_identifier),
"asyncgen_creation_args": ", ".join(asyncgen_creation_args),
"asyncgen_exit": generator_exit,
"asyncgen_module": getModuleAccessCode(context),
"asyncgen_name_obj": context.getConstantCode(
constant=asyncgen_object_body.getFunctionName()
),
"asyncgen_qualname_obj": getFunctionQualnameObj(asyncgen_object_body, context),
"code_identifier": context.getCodeObjectHandle(
code_object=asyncgen_object_body.getCodeObject()
),
"closure_name": "closure" if closure_variables else "NULL",
"closure_count": len(closure_variables),
}
def generateMakeAsyncgenObjectCode(to_name, expression, emit, context):
asyncgen_object_body = expression.subnode_asyncgen_ref.getFunctionBody()
closure_variables = expression.getClosureVariableVersions()
closure_name, closure_copy = getClosureCopyCode(
closure_variables=closure_variables, context=context
)
args = []
if closure_name:
args.append(closure_name)
emit(
template_make_asyncgen
% {
"to_name": to_name,
"asyncgen_maker_identifier": _getAsyncgenMakerIdentifier(
asyncgen_object_body.getCodeName()
),
"args": ", ".join(str(arg) for arg in args),
"closure_copy": indented(closure_copy, 0, True),
}
)
context.addCleanupTempName(to_name) | PypiClean |
/ECoXiPy-0.4.0.tar.gz/ECoXiPy-0.4.0/ecoxipy/validation.py | from ecoxipy import _unicode
class ValidationOutputWrapper(object):
'''\
Instances of this class wrap an :class:`ecoxipy.Output` instance and a
validator instance, the latter having a method like
:class:`ecoxipy.Output` for each XML node type it wishes to validate (i.e.
:meth:`element`, :meth:`text`, :meth:`comment`,
:meth:`processing_instruction` and :meth:`document`).
When a XML node is to be created using this class, first the appropriate
validator method is called. This might raise an exception to stop building
completely. If this returns :const:`None` or :const:`True`, the result of
calling the same method on the output instance is returned. Otherwise
the creation call returns :const:`None` to create nothing.
Note that a validator's :meth:`element` method receives the attributes
dictionary which is given to the output, thus changes made by a validator
are reflected in the created XML representation.
'''
def __init__(self, output, validator):
self._output = output
self._validator = validator
self._ValidationMethod(self, 'element')
self._ValidationMethod(self, 'text')
self._ValidationMethod(self, 'comment')
self._ValidationMethod(self, 'processing_instruction')
self._ValidationMethod(self, 'document')
try:
self.preprocess = output.preprocess
except AttributeError:
pass
class _ValidationMethod(object):
def __init__(self, wrapper, name):
try:
self._validation_method = getattr(wrapper._validator, name)
except AttributeError:
self._validation_method = None
self._creation_method = getattr(wrapper._output, name)
setattr(wrapper, name, self)
def __call__(self, *args):
if self._validation_method is None:
validation_result = None
else:
validation_result = self._validation_method(*args)
if validation_result is None or validation_result is True:
return self._creation_method(*args)
def is_native_type(self, content):
return self._output.is_native_type(content)
class ValidationError(Exception):
'''\
Should be raised by validators to indicate a error while validating, the
message should describe the problem.
'''
class ListValidator(object):
'''\
A simple black- or whitelist-based validator class (see
:class:`ValidationOutputWrapper`). It takes lists of element as well as
attribute names and processing instruction targets, all given names and
targets are converted to Unicode. If the ``blacklist`` argument is
:const:`True` the lists define which elements, attributes and processing
instructions are invalid. If ``blacklist`` is :const:`False` the instance
works as a whitelist, thus the lists define the valid elements, attributes
and processing instructions. If the argument ``silent`` is :const:`True`,
the validating methods return :const:`False` on validation errors,
otherwise they raise a :class:`ValidationError`.
:param element_names: An iterable of element names or :const:`None` to
accept all elements.
:param attribute_names: An iterable of attribute names or :const:`None` to
accept all attributes.
:param pi_targets: An iterable of processing instruction targets or
:const:`None` to accept all processing instructions.
:param blacklist: If this is :const:`True`, the instance works as a
blacklist, otherwise as a whitelist.
:type blacklist: :class:`bool`
:param silent: If this is :const:`True`, failed validations return
:const:`False` for invalid element names or processing instruction
targets and invalid attributes are deleted. Otherwise they raise a
:class:`ValidationError`.
:type silent: :class:`bool`
'''
def __init__(self, element_names=None, attribute_names=None,
pi_targets=None, blacklist=True, silent=True):
if bool(blacklist):
self._invalid = self._blacklist_invalid
none_container = self._NothingContainer
else:
self._invalid = self._whitelist_invalid
none_container = self._EverythingContainer
create_set = lambda items: (none_container if items is None
else {_unicode(item) for item in items})
self._element_names = create_set(element_names)
self._attribute_names = create_set(attribute_names)
self._pi_targets = create_set(pi_targets)
self._silent = bool(silent)
@staticmethod
def _whitelist_invalid(item, allowed):
return item not in allowed
@staticmethod
def _blacklist_invalid(item, forbidden):
return item in forbidden
class _EverythingContainer(object):
def __contains__(self, item):
return True
_EverythingContainer = _EverythingContainer()
class _NothingContainer(object):
def __contains__(self, item):
return False
_NothingContainer = _NothingContainer()
def element(self, name, children, attributes):
if self._invalid(name, self._element_names):
if self._silent:
return False
raise ValidationError(
'The element name "{}" is not allowed.'.format(name))
for attr_name in list(attributes.keys()):
if self._invalid(attr_name, self._attribute_names):
if self._silent:
del attributes[attr_name]
else:
raise ValidationError(
'The attribute name "{}" is not allowed.'.format(
attr_name))
def processing_instruction(self, target, content):
if self._invalid(target, self._pi_targets):
if self._silent:
return False
raise ValidationError(
'The processing instruction target "{}" is not allowed.'.format(
target)) | PypiClean |
/Mask_anonymization_framework-1.3.2-py3-none-any.whl/mask_framework_lib/ner_plugins/utils/spec_tokenizers.py | import nltk
nltk.download('punkt')
from nltk.tokenize.util import align_tokens
from nltk.tokenize.treebank import TreebankWordTokenizer
import re
import tensorflow as tf
sess = tf.compat.v1.Session()
_treebank_word_tokenizer = TreebankWordTokenizer()
def tokenize_to_seq(documents):
sequences = []
sequence = []
for doc in documents:
if len(sequence)>0:
sequences.append(sequence)
sequence = []
text = doc["text"]
file = doc["id"]
text = text.replace("\"", "'")
text = text.replace("`", "'")
text = text.replace("``", "")
text = text.replace("''", "")
tokens = custom_span_tokenize(text)
for token in tokens:
token_txt = text[token[0]:token[1]]
found = False
for tag in doc["tags"]:
if int(tag["start"])<=token[0] and int(tag["end"])>=token[1]:
token_tag = tag["tag"]
#token_tag_type = tag["type"]
found = True
if found==False:
token_tag = "O"
#token_tag_type = "O"
sequence.append((token_txt,token_tag))
if token_txt == "." or token_txt == "?" or token_txt == "!":
sequences.append(sequence)
sequence = []
sequences.append(sequence)
return sequences
def tokenize_fa(documents):
"""
Tokenization function. Returns list of sequences
:param documents: list of texts
:type language: list
"""
sequences = []
sequence = []
for doc in documents:
if len(sequence) > 0:
sequences.append(sequence)
sequence = []
text = doc
text = text.replace("\"", "'")
text = text.replace("`", "'")
text = text.replace("``", "")
text = text.replace("''", "")
tokens = custom_span_tokenize(text)
for token in tokens:
token_txt = text[token[0]:token[1]]
found = False
if found == False:
token_tag = "O"
# token_tag_type = "O"
sequence.append((token_txt, token_tag))
if token_txt == "." or token_txt == "?" or token_txt == "!":
sequences.append(sequence)
sequence = []
sequences.append(sequence)
return sequences
def custom_span_tokenize(text, language='english', preserve_line=True):
"""
Returns a spans of tokens in text.
:param text: text to split into words
:param language: the model name in the Punkt corpus
:type language: str
:param preserve_line: An option to keep the preserve the sentence and not sentence tokenize it.
:type preserver_line: bool
"""
tokens = custom_word_tokenize(text)
tokens = ['"' if tok in ['``', "''"] else tok for tok in tokens]
return align_tokens(tokens, text)
def custom_word_tokenize(text, language='english', preserve_line=False):
"""
Return a tokenized copy of *text*,
using NLTK's recommended word tokenizer
(currently an improved :class:`.TreebankWordTokenizer`
along with :class:`.PunktSentenceTokenizer`
for the specified language).
:param text: text to split into words
:param text: str
:param language: the model name in the Punkt corpus
:type language: str
:param preserve_line: An option to keep the preserve the sentence and not sentence tokenize it.
:type preserver_line: bool
"""
tokens = []
sentences = [text] if preserve_line else nltk.sent_tokenize(text, language)
for sent in sentences:
for token in _treebank_word_tokenizer.tokenize(sent):
if "-" in token:
m = re.compile("(\d+)(-)([a-zA-z-]+)")
g = m.match(token)
if g:
for group in g.groups():
tokens.append(group)
else:
tokens.append(token)
else:
tokens.append(token)
return tokens
def shape(self,word):
shape = ""
for letter in word:
if letter.isdigit():
shape = shape + "d"
elif letter.isalpha():
if letter.isupper():
shape = shape + "W"
else:
shape = shape + "w"
else:
shape = shape + letter
return shape | PypiClean |
/Bravo-2.0.tar.gz/Bravo-2.0/bravo/plugins/generators.py | from __future__ import division
from array import array
from itertools import combinations, product
from random import Random
from zope.interface import implements
from bravo.blocks import blocks
from bravo.ibravo import ITerrainGenerator
from bravo.simplex import octaves2, octaves3, set_seed
from bravo.utilities.maths import morton2
R = Random()
class BoringGenerator(object):
"""
Generates boring slabs of flat stone.
This generator relies on implementation details of ``Chunk``.
"""
implements(ITerrainGenerator)
def populate(self, chunk, seed):
"""
Fill the bottom half of the chunk with stone.
"""
# Optimized fill. Fill the bottom eight sections with stone.
stone = array("B", [blocks["stone"].slot] * 16 * 16 * 16)
for section in chunk.sections[:8]:
section.blocks[:] = stone[:]
name = "boring"
before = tuple()
after = tuple()
class SimplexGenerator(object):
"""
Generates waves of stone.
This class uses a simplex noise generator to procedurally generate
organic-looking, continuously smooth terrain.
"""
implements(ITerrainGenerator)
def populate(self, chunk, seed):
"""
Make smooth waves of stone.
"""
set_seed(seed)
# And into one end he plugged the whole of reality as extrapolated
# from a piece of fairy cake, and into the other end he plugged his
# wife: so that when he turned it on she saw in one instant the whole
# infinity of creation and herself in relation to it.
factor = 1 / 256
for x, z in product(xrange(16), repeat=2):
magx = (chunk.x * 16 + x) * factor
magz = (chunk.z * 16 + z) * factor
height = octaves2(magx, magz, 6)
# Normalize around 70. Normalization is scaled according to a
# rotated cosine.
#scale = rotated_cosine(magx, magz, seed, 16 * 10)
height *= 15
height = int(height + 70)
# Make our chunk offset, and render into the chunk.
for y in range(height):
chunk.set_block((x, y, z), blocks["stone"].slot)
name = "simplex"
before = tuple()
after = tuple()
class ComplexGenerator(object):
"""
Generate islands of stone.
This class uses a simplex noise generator to procedurally generate
ridiculous things.
"""
implements(ITerrainGenerator)
def populate(self, chunk, seed):
"""
Make smooth islands of stone.
"""
set_seed(seed)
factor = 1 / 256
for x, z, y in product(xrange(16), xrange(16), xrange(256)):
magx = (chunk.x * 16 + x) * factor
magz = (chunk.z * 16 + z) * factor
sample = octaves3(magx, magz, y * factor, 6)
if sample > 0.5:
chunk.set_block((x, y, z), blocks["stone"].slot)
name = "complex"
before = tuple()
after = tuple()
class WaterTableGenerator(object):
"""
Create a water table.
"""
implements(ITerrainGenerator)
def populate(self, chunk, seed):
"""
Generate a flat water table halfway up the map.
"""
for x, z, y in product(xrange(16), xrange(16), xrange(62)):
if chunk.get_block((x, y, z)) == blocks["air"].slot:
chunk.set_block((x, y, z), blocks["spring"].slot)
name = "watertable"
before = tuple()
after = ("trees", "caves")
class ErosionGenerator(object):
"""
Erodes stone surfaces into dirt.
"""
implements(ITerrainGenerator)
def populate(self, chunk, seed):
"""
Turn the top few layers of stone into dirt.
"""
chunk.regenerate_heightmap()
for x, z in product(xrange(16), repeat=2):
y = chunk.height_at(x, z)
if chunk.get_block((x, y, z)) == blocks["stone"].slot:
bottom = max(y - 3, 0)
for i in range(bottom, y + 1):
chunk.set_block((x, i, z), blocks["dirt"].slot)
name = "erosion"
before = ("boring", "simplex")
after = ("watertable",)
class GrassGenerator(object):
"""
Find exposed dirt and grow grass.
"""
implements(ITerrainGenerator)
def populate(self, chunk, seed):
"""
Find the top dirt block in each y-level and turn it into grass.
"""
chunk.regenerate_heightmap()
for x, z in product(xrange(16), repeat=2):
y = chunk.height_at(x, z)
if (chunk.get_block((x, y, z)) == blocks["dirt"].slot and
(y == 127 or
chunk.get_block((x, y + 1, z)) == blocks["air"].slot)):
chunk.set_block((x, y, z), blocks["grass"].slot)
name = "grass"
before = ("erosion", "complex")
after = tuple()
class BeachGenerator(object):
"""
Generates simple beaches.
Beaches are areas of sand around bodies of water. This generator will form
beaches near all bodies of water regardless of size or composition; it
will form beaches at large seashores and frozen lakes. It will even place
beaches on one-block puddles.
"""
implements(ITerrainGenerator)
above = set([blocks["air"].slot, blocks["water"].slot,
blocks["spring"].slot, blocks["ice"].slot])
replace = set([blocks["dirt"].slot, blocks["grass"].slot])
def populate(self, chunk, seed):
"""
Find blocks within a height range and turn them into sand if they are
dirt and underwater or exposed to air. If the height range is near the
water table level, this creates fairly good beaches.
"""
chunk.regenerate_heightmap()
for x, z in product(xrange(16), repeat=2):
y = chunk.height_at(x, z)
while y > 60 and chunk.get_block((x, y, z)) in self.above:
y -= 1
if not 60 < y < 66:
continue
if chunk.get_block((x, y, z)) in self.replace:
chunk.set_block((x, y, z), blocks["sand"].slot)
name = "beaches"
before = ("erosion", "complex")
after = ("saplings",)
class OreGenerator(object):
"""
Place ores and clay.
"""
implements(ITerrainGenerator)
def populate(self, chunk, seed):
set_seed(seed)
xzfactor = 1 / 16
yfactor = 1 / 32
for x, z in product(xrange(16), repeat=2):
for y in range(chunk.height_at(x, z) + 1):
magx = (chunk.x * 16 + x) * xzfactor
magz = (chunk.z * 16 + z) * xzfactor
magy = y * yfactor
sample = octaves3(magx, magz, magy, 3)
if sample > 0.9999:
# Figure out what to place here.
old = chunk.get_block((x, y, z))
new = None
if old == blocks["sand"].slot:
# Sand becomes clay.
new = blocks["clay"].slot
elif old == blocks["dirt"].slot:
# Dirt becomes gravel.
new = blocks["gravel"].slot
elif old == blocks["stone"].slot:
# Stone becomes one of the ores.
if y < 12:
new = blocks["diamond-ore"].slot
elif y < 24:
new = blocks["gold-ore"].slot
elif y < 36:
new = blocks["redstone-ore"].slot
elif y < 48:
new = blocks["iron-ore"].slot
else:
new = blocks["coal-ore"].slot
if new:
chunk.set_block((x, y, z), new)
name = "ore"
before = ("erosion", "complex", "beaches")
after = tuple()
class SafetyGenerator(object):
"""
Generates terrain features essential for the safety of clients.
"""
implements(ITerrainGenerator)
def populate(self, chunk, seed):
"""
Spread a layer of bedrock along the bottom of the chunk, and clear the
top two layers to avoid players getting stuck at the top.
"""
for x, z in product(xrange(16), repeat=2):
chunk.set_block((x, 0, z), blocks["bedrock"].slot)
chunk.set_block((x, 126, z), blocks["air"].slot)
chunk.set_block((x, 127, z), blocks["air"].slot)
name = "safety"
before = ("boring", "simplex", "complex", "cliffs", "float", "caves")
after = tuple()
class CliffGenerator(object):
"""
This class/generator creates cliffs by selectively applying a offset of
the noise map to blocks based on height. Feel free to make this more
realistic.
This generator relies on implementation details of ``Chunk``.
"""
implements(ITerrainGenerator)
def populate(self, chunk, seed):
"""
Make smooth waves of stone, then compare to current landscape.
"""
set_seed(seed)
factor = 1 / 256
for x, z in product(xrange(16), repeat=2):
magx = ((chunk.x + 32) * 16 + x) * factor
magz = ((chunk.z + 32) * 16 + z) * factor
height = octaves2(magx, magz, 6)
height *= 15
height = int(height + 70)
current_height = chunk.heightmap[x * 16 + z]
if (-6 < current_height - height < 3 and
current_height > 63 and height > 63):
for y in range(height - 3):
chunk.set_block((x, y, z), blocks["stone"].slot)
for y in range(y, 128):
chunk.set_block((x, y, z), blocks["air"].slot)
name = "cliffs"
before = tuple()
after = tuple()
class FloatGenerator(object):
"""
Rips chunks out of the map, to create surreal chunks of floating land.
This generator relies on implementation details of ``Chunk``.
"""
implements(ITerrainGenerator)
def populate(self, chunk, seed):
"""
Create floating islands.
"""
# Eat moar stone
R.seed(seed)
factor = 1 / 256
for x, z in product(xrange(16), repeat=2):
magx = ((chunk.x+16) * 16 + x) * factor
magz = ((chunk.z+16) * 16 + z) * factor
height = octaves2(magx, magz, 6)
height *= 15
height = int(height + 70)
if abs(chunk.heightmap[x * 16 + z] - height) < 10:
height = 256
else:
height = height - 30 + R.randint(-15, 10)
for y in range(height):
chunk.set_block((x, y, z), blocks["air"].slot)
name = "float"
before = tuple()
after = tuple()
class CaveGenerator(object):
"""
Carve caves and seams out of terrain.
"""
implements(ITerrainGenerator)
def populate(self, chunk, seed):
"""
Make smooth waves of stone.
"""
sede = seed ^ 0xcafebabe
xzfactor = 1 / 128
yfactor = 1 / 64
for x, z in product(xrange(16), repeat=2):
magx = (chunk.x * 16 + x) * xzfactor
magz = (chunk.z * 16 + z) * xzfactor
for y in range(128):
if not chunk.get_block((x, y, z)):
continue
magy = y * yfactor
set_seed(seed)
should_cave = abs(octaves3(magx, magz, magy, 3))
set_seed(sede)
should_cave *= abs(octaves3(magx, magz, magy, 3))
if should_cave < 0.002:
chunk.set_block((x, y, z), blocks["air"].slot)
name = "caves"
before = ("grass", "erosion", "simplex", "complex", "boring")
after = tuple()
class SaplingGenerator(object):
"""
Plant saplings at relatively silly places around the map.
"""
implements(ITerrainGenerator)
primes = [401, 409, 419, 421, 431, 433, 439, 443, 449, 457, 461, 463, 467,
479, 487, 491, 499, 503, 509, 521, 523, 541, 547, 557, 563, 569,
571, 577, 587, 593, 599, 601, 607, 613, 617, 619, 631, 641, 643,
647, 653, 659, 661, 673, 677, 683, 691]
"""
A field of prime numbers, used to select factors for trees.
"""
ground = (blocks["grass"].slot, blocks["dirt"].slot)
def populate(self, chunk, seed):
"""
Place saplings.
The algorithm used to pick locations for the saplings is quite
simple, although slightly involved. The basic technique is to
calculate a Morton number for every xz-column in the chunk, and then
use coprime offsets to sprinkle selected points fairly evenly
throughout the chunk.
Saplings are only placed on dirt and grass blocks.
"""
R.seed(seed)
factors = R.choice(list(combinations(self.primes, 3)))
for x, z in product(xrange(16), repeat=2):
# Make a Morton number.
morton = morton2(chunk.x * 16 + x, chunk.z * 16 + z)
if not all(morton % factor for factor in factors):
# Magic number is how many tree types are available
species = morton % 4
# Plant a sapling.
y = chunk.height_at(x, z)
if chunk.get_block((x, y, z)) in self.ground:
chunk.set_block((x, y + 1, z), blocks["sapling"].slot)
chunk.set_metadata((x, y + 1, z), species)
name = "saplings"
before = ("grass", "erosion", "simplex", "complex", "boring")
after = tuple() | PypiClean |
/GenIce2-2.1.7.1.tar.gz/GenIce2-2.1.7.1/genice2/lattices/mu.py | from genice2.cell import cellvectors
import genice2.lattices
desc = {"ref": {"SpaceFullerene": 'Sikiric 2010'},
"usage": "No options available.",
"brief": "A space fullerene."
}
class Lattice(genice2.lattices.Lattice):
def __init__(self):
self.pairs = """
130 194
58 74
80 211
151 4
47 173
213 112
9 30
170 86
105 77
115 38
1 115
12 52
137 193
122 134
99 205
66 71
18 145
198 155
135 61
168 3
16 119
78 169
150 160
204 138
178 195
124 192
73 19
167 81
189 7
56 37
63 90
64 91
93 64
123 65
141 17
84 27
205 94
136 182
113 161
133 73
57 145
208 55
26 100
123 204
83 166
219 187
141 55
7 210
99 120
172 157
169 197
40 126
122 27
26 122
199 20
200 22
163 155
132 83
117 102
206 35
100 47
92 21
178 185
202 90
82 163
188 14
28 134
131 50
69 206
218 22
45 167
118 21
162 35
116 44
182 129
63 57
111 54
159 150
176 162
11 174
31 106
132 206
212 120
164 118
37 20
0 72
19 36
144 70
166 25
117 209
84 14
58 102
185 173
107 111
2 119
108 114
141 142
51 52
92 93
0 184
135 152
147 45
135 126
48 7
170 46
117 191
143 3
17 180
60 96
6 109
48 45
208 203
185 54
42 22
186 210
28 68
34 91
10 86
193 53
71 205
147 165
79 221
10 89
164 64
164 155
46 152
24 203
185 44
88 134
140 93
87 211
123 56
57 46
211 53
193 175
169 76
12 200
104 54
188 122
125 104
168 38
85 174
127 109
1 69
16 74
150 158
212 67
208 162
148 94
132 177
189 215
87 59
82 175
144 65
153 181
130 89
139 121
181 103
140 155
164 192
187 49
89 216
4 83
26 215
199 172
24 110
124 175
32 194
125 212
157 114
41 171
109 161
13 111
12 114
81 134
159 31
25 206
214 202
56 147
23 167
96 192
1 182
47 27
166 180
10 145
210 70
18 161
118 39
162 17
178 95
138 128
2 92
76 80
90 152
210 100
80 191
11 132
170 216
15 41
18 61
160 196
189 65
28 149
146 29
54 99
33 133
37 160
204 108
207 110
76 203
176 129
172 3
27 149
102 197
141 50
101 190
16 220
188 158
140 103
202 113
69 156
151 172
178 88
51 106
2 87
195 116
43 174
187 108
101 36
139 66
34 29
188 186
62 156
99 183
11 9
30 35
75 171
123 48
71 112
19 86
146 207
37 187
33 170
49 196
24 97
126 198
129 156
127 198
51 168
125 213
16 79
5 215
221 53
220 53
160 106
32 202
116 120
77 100
165 70
39 184
84 167
105 67
154 73
107 44
39 163
104 95
195 67
117 55
58 97
218 38
79 39
119 118
52 196
213 98
201 92
144 158
143 11
189 14
77 81
73 139
48 128
184 103
119 91
75 209
215 88
109 214
209 76
110 55
125 68
121 179
201 79
168 108
116 149
23 88
4 182
143 115
21 96
151 218
191 207
80 97
5 77
52 85
40 60
33 101
43 136
177 85
13 33
75 62
131 30
23 7
15 142
86 113
56 158
42 115
58 59
159 14
104 179
130 126
153 90
78 110
28 95
31 165
219 199
213 107
136 200
94 217
137 201
198 181
128 70
66 120
1 177
194 60
62 25
36 145
5 195
157 138
32 61
204 106
49 3
173 183
6 40
142 78
127 61
221 64
211 91
34 191
154 190
157 196
31 65
13 205
154 113
179 94
143 4
131 62
150 128
15 197
32 10
112 36
15 176
133 71
186 81
12 219
137 74
85 38
135 216
148 95
176 166
127 72
190 216
20 165
146 2
72 192
29 193
148 67
171 102
159 45
98 19
83 22
43 219
59 207
68 105
34 74
142 156
41 25
23 47
30 129
72 194
9 136
43 151
133 57
8 35
105 173
0 153
5 84
89 63
29 93
68 44
212 217
214 181
214 60
41 8
42 9
13 98
217 139
144 26
146 220
17 69
131 180
147 186
169 59
82 96
121 111
42 180
101 121
174 49
51 199
124 221
140 175
40 163
177 200
21 103
24 171
6 152
154 63
8 203
190 18
208 197
183 149
148 183
6 0
218 114
75 78
20 138
82 201
98 217
112 179
66 107
209 50
8 50
124 184
130 153
46 161
137 87
220 97
"""
self.waters = """
0.66922 0.70961 0.68577
0.33077 0.29039 0.31424
0.41923 0.20961 0.55512
0.0 0.0 0.23421
0.12372 0.87628 0.29514
0.33589 0.29295 0.0191
0.87628 0.75256 0.70486
0.08077 0.54039 0.07423
0.74744 0.87372 0.40756
0.70961 0.04039 0.31424
0.45961 0.91923 0.77788
0.75256 0.87628 0.29514
0.66667 0.33333 0.23454
0.04295 0.70705 0.86606
0.45961 0.54039 0.07423
0.12628 0.87372 0.40756
0.79039 0.58077 0.55512
0.20961 0.41923 0.37153
0.87628 0.12372 0.77821
0.41667 0.08333 0.83333
0.33333 0.91667 0.16667
0.41923 0.20961 0.62847
0.91922 0.45961 0.29062
0.91411 0.45705 0.0382
0.62372 0.62628 0.46728
0.79294 0.58589 0.37604
0.0 0.0 0.05729
0.70706 0.66411 0.0191
0.66411 0.95705 0.9809
0.20706 0.41411 0.55545
0.62372 0.99744 0.35243
0.33589 0.29294 0.13395
0.54294 0.08589 0.74089
0.91667 0.58334 0.83333
0.99744 0.37372 0.53272
0.58077 0.79039 0.37153
0.66667 0.08333 0.83333
0.58333 0.91667 0.16667
0.20706 0.4141 0.25911
0.79039 0.58077 0.62847
0.04038 0.70961 0.68577
0.87628 0.75256 0.40756
0.95961 0.29039 0.31424
0.45706 0.91411 0.25911
0.33334 0.66667 0.94722
0.66667 0.33334 0.09912
0.87628 0.75256 0.77821
0.95706 0.66411 0.0191
0.9141 0.45705 0.11154
0.75256 0.87628 0.22179
0.87628 0.12372 0.40756
0.33078 0.29039 0.19939
0.54039 0.45961 0.22212
0.58589 0.79295 0.55545
0.91923 0.45961 0.92577
0.20961 0.41923 0.44487
0.70706 0.04295 0.13395
0.66923 0.70962 0.80061
0.0 0.75 0.5
0.25 0.0 0.5
0.29039 0.95961 0.68577
0.79295 0.20705 0.74089
0.66667 0.33334 0.39063
0.45961 0.54039 0.77788
0.87372 0.12628 0.59244
0.12628 0.25256 0.11121
0.45706 0.54295 0.88846
0.25256 0.12628 0.95729
0.45706 0.91411 0.96181
0.37372 0.37628 0.35243
0.12628 0.87372 0.11121
0.66411 0.70705 0.86605
0.66923 0.95962 0.68577
0.41667 0.33334 0.83333
0.99744 0.62372 0.53272
0.66667 0.33334 0.43213
0.62372 0.99744 0.46728
0.33589 0.04295 0.0191
0.41411 0.20706 0.44455
0.87372 0.74744 0.59244
0.75 0.0 0.5
0.54295 0.0859 0.0382
0.20705 0.79295 0.62396
0.95961 0.66923 0.31424
0.54295 0.45705 0.0382
0.45706 0.54295 0.25911
0.29039 0.95961 0.80061
0.37628 0.00256 0.53272
0.95706 0.29295 0.0191
0.33333 0.66667 0.76546
0.54295 0.45706 0.74089
0.79039 0.20961 0.55512
0.25256 0.12628 0.59244
0.12372 0.24744 0.59244
0.0 0.0 0.90121
0.87372 0.12628 0.95729
0.37628 0.00256 0.64757
0.75 0.75 0.5
0.29295 0.95705 0.86606
0.79039 0.58078 0.92578
0.12628 0.87372 0.04271
0.91667 0.33334 0.83333
0.00256 0.62628 0.46728
0.37628 0.37372 0.64757
0.79039 0.20961 0.92578
0.29295 0.95705 0.9809
0.33333 0.41667 0.16667
0.33334 0.66667 0.90088
0.95961 0.29039 0.19939
0.0 0.0 0.71944
0.37372 0.37628 0.46728
0.0859 0.54295 0.88846
0.66411 0.95705 0.86605
0.24744 0.12372 0.77821
0.91922 0.45961 0.22212
0.12372 0.24744 0.29514
0.45706 0.54295 0.96181
0.00256 0.37628 0.46728
0.66667 0.33333 0.61389
0.66667 0.33333 0.56754
0.54039 0.45962 0.92578
0.04295 0.3359 0.86606
0.74744 0.87372 0.04271
0.95705 0.29294 0.13395
0.58589 0.79295 0.62396
0.54039 0.08077 0.92578
0.08077 0.54039 0.70938
0.87628 0.12372 0.70486
0.95705 0.66411 0.13395
0.37372 0.99744 0.35243
0.33333 0.66667 0.72396
0.79294 0.20706 0.37604
0.70961 0.66923 0.31424
0.66667 0.58334 0.83333
0.70705 0.04294 0.0191
0.91411 0.45706 0.74089
0.54039 0.08077 0.29062
0.20706 0.79295 0.55545
0.08333 0.66667 0.16667
0.29295 0.3359 0.86606
0.20705 0.41411 0.62396
0.12628 0.25256 0.40756
0.24744 0.12372 0.40756
0.0 0.0 0.28056
0.0 0.0 0.0988
0.66923 0.95961 0.80061
0.37628 0.37372 0.53272
0.54295 0.0859 0.11154
0.0 0.0 0.94271
0.66411 0.70706 0.9809
0.70706 0.66411 0.13395
0.20706 0.79295 0.25911
0.79294 0.5859 0.74089
0.45961 0.54039 0.70938
0.29039 0.33078 0.80061
0.99744 0.37372 0.64757
0.41411 0.20706 0.37604
0.95961 0.66923 0.19939
0.74744 0.87372 0.11121
0.54295 0.45705 0.11154
0.58333 0.66667 0.16667
0.0 0.0 0.76579
0.33333 0.66667 0.38611
0.99744 0.62372 0.64757
0.79039 0.20962 0.62847
0.33589 0.04295 0.13395
0.00256 0.62628 0.35243
0.66667 0.33334 0.05278
0.12372 0.24744 0.22179
0.37372 0.99744 0.46728
0.04039 0.70961 0.80061
0.79294 0.58589 0.44455
0.12372 0.87628 0.22179
0.04295 0.70706 0.9809
0.5859 0.79295 0.25911
0.33333 0.66667 0.60937
0.20961 0.79039 0.37153
0.54039 0.45961 0.29062
0.04295 0.33589 0.9809
0.87372 0.12628 0.88879
0.00256 0.37628 0.35243
0.29039 0.33077 0.68576
0.33077 0.04039 0.31424
0.87372 0.74744 0.95729
0.62628 0.62372 0.64757
0.0859 0.54295 0.96181
0.45961 0.91923 0.07423
0.70961 0.04039 0.19939
0.58078 0.79039 0.07423
0.20961 0.41922 0.07423
0.04039 0.33078 0.80061
0.0 0.25 0.5
0.62628 0.00256 0.64757
0.33334 0.66667 0.56787
0.45961 0.91923 0.70938
0.29295 0.33589 0.9809
0.70961 0.66923 0.19939
0.20961 0.79039 0.44487
0.04039 0.33078 0.68577
0.33078 0.04039 0.19939
0.66667 0.33333 0.27604
0.12372 0.87628 0.59244
0.4141 0.20705 0.74089
0.58077 0.79039 0.44488
0.08333 0.41667 0.16667
0.87372 0.74744 0.88879
0.62372 0.62628 0.35243
0.25 0.25 0.5
0.33333 0.66667 0.43246
0.79295 0.20706 0.44455
0.20961 0.79039 0.07423
0.62628 0.00256 0.53272
0.41922 0.20961 0.92577
0.45706 0.9141 0.88846
0.24744 0.12372 0.70486
0.12628 0.25256 0.04271
0.08078 0.54039 0.77788
0.25256 0.12628 0.88879
0.08589 0.54295 0.25911
0.54039 0.08077 0.22212
0.62628 0.62372 0.53272
0.75256 0.87628 0.59244
"""
self.coord = "relative"
self.cages = """
14 0.33333333333333337 0.6666666666666667 0.12606666666666666
12 -0.34357 -0.17179 -0.07639
12 0.33333 0.66667 -0.33333
16 0.9999966666666668 2.0000033333333334 1.3513933333333334
12 0.34357 0.17179 0.07639
12 0.6769033333333333 0.8384566666666667 0.7430566666666667
12 -0.6769033333333333 -0.8384566666666667 -0.7430566666666667
15 0.33333333333333337 0.6666666666666667 0.8326766666666667
15 0.0 0.0 -0.16601
14 0.0 0.0 0.5406
16 0.66667 0.33333 -0.01806
12 -0.010236666666666666 0.49487666666666663 0.5902766666666667
12 -0.17178 -0.34357 0.07639
12 0.17178 -0.17179 -0.07639
12 0.5051233333333334 1.0102366666666667 0.5902766666666667
14 0.33333333333333337 0.6666666666666667 1.2072666666666667
14 0.6666666666666667 1.3333333333333335 1.8739333333333335
16 0.6666633333333333 1.3333366666666666 0.6847266666666667
12 0.5051133333333333 0.49487666666666663 0.5902766666666667
15 0.6666666666666667 0.33333333333333337 -0.8326766666666667
12 1.0102366666666667 1.5051233333333334 1.4097233333333334
16 1.3333366666666666 0.6666633333333333 -0.6847266666666667
12 -0.17179 0.17178 0.07639
12 0.17179 0.34357 -0.07639
12 1.0000033333333334 0.9999966666666668 0.9999966666666668
12 0.16154333333333334 0.8384466666666667 0.7430566666666667
12 0.8384466666666667 0.16154333333333334 -0.7430566666666667
16 0.33333 0.66667 0.01806
14 0.0 0.0 -0.5406
16 1.0000033333333334 0.9999966666666668 0.6486066666666667
15 0.33333333333333337 0.6666666666666667 0.5006566666666666
12 0.66667 0.33333 0.33333
12 -0.5051233333333334 -1.0102366666666667 -0.5902766666666667
15 0.6666666666666667 1.3333333333333335 1.4993433333333335
12 0.16155333333333333 0.3230966666666667 0.7430566666666667
15 0.0 0.0 0.16601
14 0.6666666666666667 0.33333333333333337 -1.2072666666666667
12 0.8384566666666667 0.6769033333333333 -0.7430566666666667
12 0.49487666666666663 1.5051133333333335 1.4097233333333334
"""
self.bondlen = 3
self.cell = """
12.746393818818838 0.0 0.0
-6.373196909409416 11.038700853738058 0.0
4.332977656078051e-15 7.504937448387889e-15 70.76289521345811
"""
self.density = 0.6664596154282791
self.cell = cellvectors(a=12.746393818818838,
b=12.746393818818838,
c=70.76289521345811,
C=119.99999999999999) | PypiClean |
/Mathics_Django-6.0.0-py3-none-any.whl/mathics_django/web/media/js/mathjax/jax/output/HTML-CSS/fonts/STIX/General/BoldItalic/IPAExtensions.js | MathJax.Hub.Insert(MathJax.OutputJax["HTML-CSS"].FONTDATA.FONTS["STIXGeneral-bold-italic"],{592:[473,14,512,13,492],593:[473,14,612,25,592],594:[473,14,612,25,592],595:[691,13,500,-14,449],596:[462,13,444,-5,392],597:[462,157,444,-5,406],598:[699,233,500,-21,517],599:[683,13,570,-21,653],600:[462,13,444,5,421],601:[462,13,444,5,398],602:[462,13,626,5,626],603:[475,14,444,5,482],604:[475,14,480,5,469],605:[475,14,689,5,689],606:[475,14,486,7,475],607:[462,207,367,-100,364],608:[683,245,720,-52,751],609:[472,245,549,-52,520],610:[462,11,561,21,544],611:[462,234,444,20,400],612:[450,10,493,10,488],613:[459,249,556,-13,498],614:[683,9,556,-13,498],615:[683,205,533,-13,475],616:[684,9,278,-10,262],617:[456,8,253,2,237],618:[462,0,304,-32,321],619:[699,9,320,9,368],620:[699,9,445,17,417],621:[699,233,291,-47,290],622:[699,236,623,2,585],623:[462,9,778,-14,723],624:[462,233,778,-14,723],625:[462,233,759,-14,704],626:[462,233,694,-109,632],627:[462,233,505,-6,486],628:[462,12,588,-27,614],629:[462,13,500,-3,441],630:[462,5,749,23,751],631:[477,2,685,-3,626],632:[685,231,691,-3,632],633:[462,0,427,0,410],634:[699,0,493,0,476],635:[462,233,436,0,417],636:[462,233,389,-87,389],637:[462,233,389,-47,389],638:[484,0,360,-21,417],639:[484,0,338,10,292],640:[464,0,498,8,515],641:[464,0,498,8,597],642:[462,218,389,-32,333],643:[683,233,424,-104,584],644:[683,207,394,-90,576],645:[470,233,415,79,344],646:[683,243,521,-40,641],647:[513,90,310,7,299],648:[594,233,311,-60,281],649:[462,9,556,-16,514],650:[452,8,500,15,552],651:[462,10,534,18,492],652:[462,13,444,15,401],653:[462,13,667,15,614],654:[667,0,444,16,502],655:[464,0,633,65,606],656:[449,218,440,-24,405],657:[449,97,411,-24,376],658:[450,236,499,-10,558],659:[450,307,499,-10,528],660:[685,0,530,25,520],661:[685,0,530,65,509],662:[669,14,487,25,453],663:[462,237,479,20,544],664:[680,17,723,13,734],665:[464,0,493,-10,486],666:[475,14,465,16,504],667:[538,11,580,29,690],668:[464,0,582,21,676],669:[685,233,475,-50,463],670:[457,250,500,22,528],671:[464,0,485,10,468],672:[582,205,488,1,674],673:[685,0,530,25,520],674:[685,0,530,65,507],675:[699,13,750,-21,735],676:[699,236,820,-21,813],677:[699,97,817,-21,743],678:[594,13,560,-3,524],679:[683,233,453,-30,670],680:[594,18,600,-3,618]});MathJax.Ajax.loadComplete(MathJax.OutputJax["HTML-CSS"].fontDir+"/General/BoldItalic/IPAExtensions.js"); | PypiClean |
/FreePyBX-1.0-RC1.tar.gz/FreePyBX-1.0-RC1/freepybx/public/js/dojox/gantt/GanttResourceItem.js.uncompressed.js | define("dojox/gantt/GanttResourceItem", ["dijit","dojo","dojox","dojo/require!dojo/date/locale"], function(dijit,dojo,dojox){
dojo.provide("dojox.gantt.GanttResourceItem");
dojo.require("dojo.date.locale");
dojo.declare("dojox.gantt.GanttResourceItem", null, {
constructor: function(ganttchart){
this.ganttChart = ganttchart;
this.ownerItem = [];
this.ownerNameItem = [];
this.ownerTaskNodeMapping = {};
this.ownerTaskNodeMapping_time = {};
this.resourceInfo = {};
this.ownerTimeConsume = {};
},
clearAll: function(){
this.clearData();
this.clearItems();
},
clearData: function(){
this.ownerItem = [];
this.ownerNameItem = [];
this.ownerTaskNodeMapping = {};
this.ownerTaskNodeMapping_time = {};
this.resourceInfo = {};
this.ownerTimeConsume = {};
},
clearItems: function(){
if(this.content.firstChild){
dojo.destroy(this.content.firstChild);
}
},
buildResource: function(){
var resourceInfo = {};
dojo.forEach(this.ganttChart.arrProjects, function(project){
dojo.forEach(project.arrTasks, function(task){
task.buildResourceInfo(resourceInfo);
}, this);
}, this);
return resourceInfo;
},
buildOwnerTimeConsume: function(){
var ownerTimeConsume = {};
for(var owner in this.resourceInfo){
var tasks = this.resourceInfo[owner];
//combine time zone (startTime - this.startDate) / (60 * 60 * 1000) * this.pixelsPerHour;
var timeZoom = {};
for(var i = 0; i < tasks.length; i++){
var task = tasks[i];
var startTime = task.taskItem.startTime.getTime(), dur = task.taskItem.duration * 24 * 60 * 60 * 1000 / this.ganttChart.hsPerDay;
timeZoom.min = timeZoom.min ? Math.min(timeZoom.min, startTime) : startTime;
timeZoom.max = timeZoom.max ? Math.max(timeZoom.max, (startTime + dur)) : (startTime + dur);
}
timeZoom.dur = (timeZoom.max - timeZoom.min) * this.ganttChart.hsPerDay / (24 * 60 * 60 * 1000);
timeZoom.min = new Date(timeZoom.min);
timeZoom.max = new Date(timeZoom.max);
ownerTimeConsume[owner] = timeZoom;
}
return ownerTimeConsume;
},
refresh: function(){
this.ownerTimeConsume = this.buildOwnerTimeConsume();
//resize outer div
this.contentData.firstChild.style.width = Math.max(1200, this.ganttChart.pixelsPerDay * this.ganttChart.totalDays) + "px";
for(var owner in this.resourceInfo){
this.refreshOwnerEntry(owner);
}
},
reConstruct: function(){
this.clearAll();
this.resourceInfo = this.buildResource();
this.ownerTimeConsume = this.buildOwnerTimeConsume();
this.tableControl = dojo.create("table", {
cellPadding: "0",
cellSpacing: "0",
className: "ganttResourceTableControl"
});
var newRowTblControl = this.tableControl.insertRow(this.tableControl.rows.length);
//Add to content Table
this.contentHeight = this.content.offsetHeight;
this.contentWidth = this.content.offsetWidth;
this.content.appendChild(this.tableControl);
//Creation panel contentData
this.contentData = dojo.create("div", {className: "ganttResourceContentDataContainer"});
this.contentData.appendChild(this.createPanelOwners());
dojo.style(this.contentData, "height", (this.contentHeight - this.ganttChart.panelTimeHeight) + "px");
//Creation panel of names
var newCellTblControl = dojo.create("td", {
vAlign: "top"
});
this.panelNames = dojo.create("div", {className: "ganttResourcePanelNames"});
this.panelNames.appendChild(this.createPanelNamesOwners());
newCellTblControl.appendChild(this.panelNames);
newRowTblControl.appendChild(newCellTblControl);
//add to control contentData and contentDataTime
newCellTblControl = dojo.create("td", {
vAlign: "top"
});
var divCell = dojo.create("div", {className: "ganttResourceDivCell"});
divCell.appendChild(this.contentData);
newCellTblControl.appendChild(divCell);
newRowTblControl.appendChild(newCellTblControl);
//Show panel of names
dojo.style(this.panelNames, {
height: (this.contentHeight - this.ganttChart.panelTimeHeight - this.ganttChart.scrollBarWidth) + "px",
width: this.ganttChart.maxWidthPanelNames + "px"
});
this.contentData.style.width = (this.contentWidth - this.ganttChart.maxWidthPanelNames) + "px";
this.contentData.firstChild.style.width = this.ganttChart.pixelsPerDay * (this.ganttChart.panelTime.firstChild.firstChild.rows[3].cells.length) + "px";
var _this = this;
this.contentData.onscroll = function(){
if(_this.panelNames){
_this.panelNames.scrollTop = this.scrollTop;
}
}
this.contentData.scrollLeft = this.ganttChart.contentData.scrollLeft;
for(var owner in this.resourceInfo){
this.createOwnerEntry(owner);
}
this.postAdjustment();
},
create: function(){
var resourceHeader = dojo.create("div", {
innerHTML: "Resource Chart:",
className: "ganttResourceHeader"
}, this.ganttChart.content, "after");
dojo.style(resourceHeader, "width", this.ganttChart.contentWidth + "px");
var content = dojo.create("div", {className: "ganttResourceContent"}, resourceHeader, "after");
dojo.style(content, {
width: this.ganttChart.contentWidth + "px",
height: (this.ganttChart.resourceChartHeight || (this.ganttChart.contentHeight * 0.8)) + "px"
});
this.content = content || this.content;
//create Table
this.reConstruct();
},
postAdjustment: function(){
//contentData height
this.contentData.firstChild.style.height = (this.ownerItem.length * 23) + "px";
this.panelNames.firstChild.style.height = (this.ownerItem.length * 23) + "px";
},
refreshOwnerEntry: function(owner){
this.refreshOwnerItem(owner);
dojo.forEach(this.resourceInfo[owner], function(task, i){
var item = this.ownerTaskNodeMapping[owner].tasks[i][0];
this.refreshDetailedTaskEntry(owner, item, task);
}, this);
},
createOwnerEntry: function(owner){
var containerOwner = this.contentData.firstChild;
var previousOwner = this.ownerItem[this.ownerItem.length - 1];
this.ownerTaskNodeMapping[owner] = {};
this.ownerTaskNodeMapping[owner][owner] = [];
//create nodes
var pos = dojo.position(containerOwner);
//creation arrTasks
var posY = (previousOwner ? parseInt(previousOwner.style.top) : (6 - 23)) + this.ganttChart.heightTaskItem + 11;
//creation task item
var oItem = this.createOwnerItem(owner, posY);
containerOwner.appendChild(oItem);
this.ownerItem.push(oItem);
this.ownerTaskNodeMapping[owner][owner].push(oItem);
if(this.panelNames){
var oNameItem = this.createOwnerNameItem(owner, posY);
this.panelNames.firstChild.appendChild(oNameItem);
this.ownerNameItem.push(oNameItem);
this.ownerTaskNodeMapping[owner][owner].push(oNameItem);
}
var currentOwnerNode = this.ownerItem[this.ownerNameItem.length - 1],
currentOwnerNameNode = this.ownerNameItem[this.ownerNameItem.length - 1];
//adjust nodes
if(this.panelNames){
this.checkWidthTaskNameItem(currentOwnerNameNode);
var treeImg = this.createTreeImg(currentOwnerNameNode);
this.panelNames.firstChild.appendChild(treeImg);
this.ownerTaskNodeMapping[owner][owner].push(treeImg);
}
this.ownerTaskNodeMapping[owner]["taskCount"] = this.resourceInfo[owner].length;
this.ownerTaskNodeMapping[owner]["isOpen"] = false;
this.ownerTaskNodeMapping[owner]["tasks"] = [];
dojo.forEach(this.resourceInfo[owner], function(task){
this.ownerTaskNodeMapping[owner]["tasks"].push(this.createDetailedTaskEntry(owner, currentOwnerNameNode, task));
}, this);
return this;
},
createOwnerNameItem: function(owner, posY){
var ownerName = dojo.create("div", {
id: owner,
title: owner,
innerHTML: owner,
className: "ganttOwnerNameItem"
});
dojo.style(ownerName, "top", posY + "px");
return ownerName;
},
refreshOwnerItem: function(owner){
var item = this.ownerTaskNodeMapping[owner][owner][0],
start = this.ownerTimeConsume[owner].min, end = this.ownerTimeConsume[owner].max, dur = this.ownerTimeConsume[owner].dur,
posX = this.ganttChart.getPosOnDate(start); // should be task start date
item.style.left = posX + "px";
item.style.width = dur * this.ganttChart.pixelsPerWorkHour + "px";
dojo.forEach(this.resourceInfo[owner], function(task, i){
var tposX = this.ganttChart.getPosOnDate(task.taskItem.startTime); // should be task start date
dojo.style(item.childNodes[i], {
left: (tposX - posX) + "px",
width: task.taskItem.duration * this.ganttChart.pixelsPerWorkHour + "px"
});
}, this);
},
createOwnerItem: function(owner, posY){
var start = this.ownerTimeConsume[owner].min, end = this.ownerTimeConsume[owner].max, dur = this.ownerTimeConsume[owner].dur;
var posX = this.ganttChart.getPosOnDate(start); // should be task start date
var ownerControl = dojo.create("div", {
id: owner,
owner: true,
className: "ganttOwnerBar"
});
dojo.style(ownerControl, {
left: posX + "px",
top: posY + "px",
width: dur * this.ganttChart.pixelsPerWorkHour + "px",
height: this.ganttChart.heightTaskItem + "px"
});
dojo.forEach(this.resourceInfo[owner], function(task){
var ownerTaskItem = dojo.create("div", {
id: owner,
className: "ganttOwnerTaskBar"
}, ownerControl);
var tposX = this.ganttChart.getPosOnDate(task.taskItem.startTime); // should be task start date
dojo.style(ownerTaskItem, {
left: (tposX - posX) + "px",
width: task.taskItem.duration * this.ganttChart.pixelsPerWorkHour + "px", // should be task duration
height: this.ganttChart.heightTaskItem + "px"
});
}, this);
return ownerControl;
},
refreshDetailedTaskEntry: function(owner, item, task){
this.refreshTaskItem(item, task);
},
createDetailedTaskEntry: function(owner, parentNode, task){
var taskItems = [];
var containerTasks = this.contentData.firstChild;
var posY = parseInt(parentNode.style.top);
//creation task item
var taskItem = this.createTaskItem(task, posY);
taskItem.style.display = "none";
containerTasks.appendChild(taskItem);
this.ownerItem.push(taskItem);
taskItems.push(taskItem);
if(this.panelNames){
var taskNameItem = this.createTaskNameItem(task.taskItem.name, posY);
this.panelNames.firstChild.appendChild(taskNameItem);
taskNameItem.style.display = "none";
this.ownerNameItem.push(taskNameItem);
taskItems.push(taskNameItem);
}
if(this.panelNames){
this.ownerNameItem[this.ownerNameItem.length - 1].style.left = dojo.style(parentNode, "left") + 15 + "px";
var arrConnectingLinesNames = this.createConnectingLinesPN(parentNode, this.ownerNameItem[this.ownerNameItem.length - 1]);
dojo.forEach(arrConnectingLinesNames, function(lineName){
lineName.style.display = "none";
}, this);
taskItems.push({
"v": arrConnectingLinesNames[0],
"h": arrConnectingLinesNames[1]
});
this.checkWidthTaskNameItem(this.ownerNameItem[this.ownerNameItem.length - 1]);
}
return taskItems;
},
createTaskNameItem: function(owner, posY){
var taskNameItem = dojo.create("div", {
id: owner,
className: "ganttTaskNameItem",
title: owner,
innerHTML: owner
});
dojo.style(taskNameItem, "top", posY + "px");
return taskNameItem;
},
refreshTaskItem: function(item, task){
var posX = this.ganttChart.getPosOnDate(task.taskItem.startTime); // should be task start date
dojo.style(item, {
left: posX + "px",
width: task.taskItem.duration * this.ganttChart.pixelsPerWorkHour + "px"
});
},
createTaskItem: function(task, posY){
var posX = this.ganttChart.getPosOnDate(task.taskItem.startTime); // should be task start date
var itemControl = dojo.create("div", {
id: task.taskItem.name,
className: "ganttTaskBar"
});
dojo.style(itemControl, {
left: posX + "px",
top: posY + "px",
width: task.taskItem.duration * this.ganttChart.pixelsPerWorkHour + "px",
height: this.ganttChart.heightTaskItem + "px"
});
return itemControl;
},
createConnectingLinesPN: function(parentNode, currentNode){
var arrConnectingLinesNames = [];
var lineVerticalLeft = dojo.create("div", {
innerHTML: " ",
className: "ganttResourceLineVerticalLeft"
}, this.panelNames.firstChild);
lineVerticalLeft.cNode = currentNode;
lineVerticalLeft.pNode = parentNode;
var LineHorizontalLeft = dojo.create("div", {
noShade: true,
color: "#000",
className: "ganttResourceLineHorizontalLeft"
}, this.panelNames.firstChild);
LineHorizontalLeft.cNode = currentNode;
LineHorizontalLeft.pNode = parentNode;
this.panelNames.firstChild.appendChild(LineHorizontalLeft);
arrConnectingLinesNames.push(lineVerticalLeft);
arrConnectingLinesNames.push(LineHorizontalLeft);
return arrConnectingLinesNames;
},
createTreeImg: function(ownerNameItem){
var treeImg = dojo.create("div", {
id: ownerNameItem.id,
className: "ganttImageTreeExpand"
});
dojo.attr(treeImg, "tabIndex", 0);
var currentItem = this.ownerTaskNodeMapping[ownerNameItem.id];
dojo.forEach(["onclick", "onkeydown"], function(e){
this.ganttChart._events.push(
dojo.connect(treeImg, e, this, function(evt){
var reachTarget = false, owner, ownerItem;
if(e == "onkeydown" && evt.keyCode != dojo.keys.ENTER){ return; }
//TODO: perhaps the following conditional can be collapsed? Duplicate code.
if(currentItem.isOpen){
dojo.removeClass(treeImg, "ganttImageTreeCollapse");
dojo.addClass(treeImg, "ganttImageTreeExpand");
currentItem.isOpen = false;
//collapse
for(owner in this.ownerTaskNodeMapping){
ownerItem = this.ownerTaskNodeMapping[owner];
if(reachTarget){
dojo.forEach(ownerItem[owner], function(tItem){
dojo.style(tItem, "top", dojo.style(tItem, "top") - currentItem.taskCount * 23 + "px");
});
dojo.forEach(ownerItem.tasks, function(tItems){
dojo.forEach(tItems, function(tItem){
var item = !tItem.v && !tItem.h ? [tItem] : [tItem.v, tItem.h];
dojo.forEach(item, function(t){
dojo.style(t, "top", dojo.style(t, "top") - currentItem.taskCount * 23 + "px");
});
});
});
}else{
if(owner == ownerNameItem.id){
reachTarget = true;
dojo.forEach(ownerItem.tasks, function(tItems, i){
dojo.forEach(tItems, function(tItem){
this.styleOwnerItem(tItem, ownerItem[owner][0], "none", 0);
}, this);
}, this);
}
}
}
}else{
dojo.removeClass(treeImg, "ganttImageTreeExpand");
dojo.addClass(treeImg, "ganttImageTreeCollapse");
currentItem.isOpen = true;
//expand
for(owner in this.ownerTaskNodeMapping){
ownerItem = this.ownerTaskNodeMapping[owner];
if(reachTarget){
dojo.forEach(ownerItem[owner], function(tItem){
dojo.style(tItem, "top", dojo.style(tItem, "top") + currentItem.taskCount * 23 + "px");
});
dojo.forEach(ownerItem.tasks, function(tItems){
dojo.forEach(tItems, function(tItem){
var item = !tItem.v && !tItem.h ? [tItem] : [tItem.v, tItem.h];
dojo.forEach(item, function(t){
dojo.style(t, "top", dojo.style(t, "top") + currentItem.taskCount * 23 + "px");
});
});
});
}else{
if(owner == ownerNameItem.id){
reachTarget = true;
dojo.forEach(ownerItem.tasks, function(tItems, i){
dojo.forEach(tItems, function(tItem){
this.styleOwnerItem(tItem, ownerItem[owner][0], "inline", (i + 1) * 23);
}, this);
}, this);
}
}
}
}
})
);
}, this);
dojo.addClass(treeImg, "ganttResourceTreeImage");
dojo.style(treeImg, {
left: (dojo.style(ownerNameItem, "left") - 12) + "px",
top: (dojo.style(ownerNameItem, "top") + 3) + "px"
});
return treeImg;
},
styleOwnerItem: function(tItem, owner, displayType, topOffset){
if(tItem.v || tItem.h){
dojo.style(tItem.v, {
height: Math.max(1, (tItem.v.cNode.offsetTop - tItem.v.pNode.offsetTop)) + "px",
top: (tItem.v.pNode.offsetTop + 5) + "px",
left: (tItem.v.pNode.offsetLeft - 9) + "px",
display: displayType
});
dojo.style(tItem.h, {
width: Math.max(1, (tItem.h.cNode.offsetLeft - tItem.h.pNode.offsetLeft + 4)) + "px",
top: (tItem.h.cNode.offsetTop + 5) + "px",
left: (tItem.h.pNode.offsetLeft - 9) + "px",
display: displayType
});
}else{
dojo.style(tItem, {
display: displayType,
top: parseInt(owner.style.top) + topOffset + "px"
});
}
},
checkWidthTaskNameItem: function(taskNameItem){
if(taskNameItem && taskNameItem.offsetWidth + taskNameItem.offsetLeft > this.ganttChart.maxWidthPanelNames){
var width = taskNameItem.offsetWidth + taskNameItem.offsetLeft - this.ganttChart.maxWidthPanelNames,
countChar = Math.round(width / (taskNameItem.offsetWidth / taskNameItem.firstChild.length)),
tName = taskNameItem.id.substring(0, taskNameItem.firstChild.length - countChar - 3);
taskNameItem.innerHTML = tName + "...";
}
},
createPanelOwners: function(){
var panelOwner = dojo.create("div", {
className: "ganttOwnerPanel"
});
dojo.style(panelOwner, {
height: (this.contentHeight - this.ganttChart.panelTimeHeight - this.ganttChart.scrollBarWidth) + "px"
});
return panelOwner;
},
createPanelNamesOwners: function(){
var panelNameOwner = dojo.create("div", {
innerHTML: " ",
className: "ganttResourcePanelNamesOwners"
});
dojo.style(panelNameOwner, {
height: (this.contentHeight - this.ganttChart.panelTimeHeight - this.ganttChart.scrollBarWidth) + "px",
width: this.ganttChart.maxWidthPanelNames + "px"
});
return panelNameOwner;
}
});
}); | PypiClean |
/BrickPi-0.1.3.tar.gz/BrickPi-0.1.3/README.txt | =======
BrickPi
=======
BrickPi is a package that provides access to the BrickPi Raspberry Pi extension board.
The BrickPi extension board is a microprocessor board that allows the Raspberry Pi to
communicate with LEGO Mindstorms motors and sensors. The package provides Python and
Scratch interfaces to the BrickPi.
Scratch interface
=================
The Scratch interface is via a BrickPiScratch class that inherits from GenericDevice,
where GenericDevice is a plugin base class in the RpiScratchIO package.
RpiScratchIO configuration file
-------------------------------
The Scratch interface uses scratchpy via RpiScratchIO. Sensors should be added by
declaring them in the configuration file::
[DeviceTypes]
LEGO = import BrickPi; from BrickPi.BrickPiScratch import BrickPiScratch; BrickPiScratch()
[DeviceConnections]
LEGO = UART0
[BrickPi]
S1 = ULTRASONIC_CONT
MA =
MB =
In this example, one ultrasonic sensor and two motors are attached to the BrickPi.
Motors can be added to the MC or MD ports by declaring them in the same manner. Sensors
can be added by assigning the senors names to the sensor ports (S1 to S4). The available
sensor names are::
TOUCH
ULTRASONIC_CONT
ULTRASONIC_SS
RCX_LIGHT
COLOR_FULL
COLOR_RED
COLOR_GREEN
COLOR_BLUE
COLOR_NONE
I2C
I2C_9V
When instantiated, the BrickPiScratch class starts a separate thread to update values
between the BrickPi and the Raspberry Pi at a rate of 10Hz. Values can then be read
from the Raspberry Pi on demand or within the data acquisition loop. To configure the
automatic readout to Scratch during the data acquisition loop, the readout period can be
stated in the configuration file::
LEGO = import BrickPi; from BrickPi.BrickPiScratch import BrickPiScratch; BrickPiScratch(5)
where this line should replace the constructor line in the previous example and the number
5 is the readout period. This means that the sensor or motor encoder values will be
updated in Scratch once for every five readout loops. Since the readout loop runs at
10Hz, this implies that the sensors in Scratch are updated at a rate of 2Hz. For a
simple Scratch program running on the Raspberry Pi, a 2Hz update rate is the maximum
that Scratch can process without a delay.
Sensors or motor encoders can be added to the automatic readout loop by using
the channel number (explained later) or "s" (for all sensors) or "m" (for all motor
encoders) or "all" (for both sensors and motor encoders). The period and sensors can also
be added from Scratch by using the config broadcast command (explained later). To prevent the
automatic update of sensors or motor encoders when Scratch starts, set the readout
period to 0::
LEGO = import BrickPi; from BrickPi.BrickPiScratch import BrickPiScratch; BrickPiScratch(0,"S")
where the active sensor channels have all been added in this case too.
Access from Scratch
-------------------
Start Scratch from the command line or the menu. Then enable the remote sensor
connections by right clicking on the *sensor value* text, which can be found under the
*Sensing* tool palette. A dialog box should appear to say that the remote sensor
connections have been enabled. At this point, Scratch becomes a server. Do not run
more than one Scratch window on the same machine, otherwise only the first one will be
accessible from the Python API. When Scratch has been started, type::
RpiScratchIO configFile.cfg
where *configFile.cfg* should be replaced with the name of the configuration file that
was created in the previous step. If the name of the configuration file is omitted,
then RpiScratchIO will try to use RpiScratchIO.cfg instead.
When RpiScratchIO starts, it loads the BrickPiScratch Python class. This updates
Scratch with several new sensors. Using the example configuration files given above,
the variables are::
LEGO:0
LEGO:1
LEGO:2
LEGO:3
LEGO:10
LEGO:11
LEGO:12
LEGO:13
LEGO:20
LEGO:21
LEGO:22
LEGO:23
where these correspond to the sensor ports S1-S4 (0-3), motor ports MA-MD (10-13) and
motor encoder ports MA-MD (20-23). The motor channels (10-13) contain the value that was
written to the motors. Values can be read into the sensor values on demand by sending a
Scratch broadcast message of the form::
LEGO:read:0
where 0 is the channel number (S1 in this case). The value will then appear in the
corresponding sensor approximately 0.2s later.
Values can be written to the motors by sending a Scratch broadcast request of the form::
LEGO:write:10,200
where 10 is the channel number (MA in this case) and 200 is the motor speed value.
Scratch can be used to enable the automatic updating of enabled sensor values by broadcasting::
LEGO:config:update,s
where the list of channels or wild card options (s for all sensors, m for all motor
encoders or a list of channels separated by spaces), should follow update. The rate of
the update can be set from Scratch by broadcasting::
LEOG:config:period,5
where 5 implies 2Hz and 10 implies 1Hz etc.. To disable the automatic readout, the
period should be set to 0. | PypiClean |
/OWSLib-0.29.2.tar.gz/OWSLib-0.29.2/owslib/owscontext/atom.py | from owslib.etree import etree, ParseError
from owslib import util
from owslib.namespaces import Namespaces
from owslib.util import nspath_eval, element_to_string
from owslib.util import log
from owslib.owscontext.common import is_empty, extract_p, \
try_int, try_float
# default variables
add_namespaces = {"georss": "http://www.georss.org/georss",
"owc": "http://www.opengis.net/owc/1.0",
"xml": "http://www.w3.org/XML/1998/namespace"}
def get_namespaces():
n = Namespaces()
ns = n.get_namespaces(["atom", "dc", "gml", "gml32", "xlink"])
ns.update(add_namespaces)
ns[None] = n.get_namespace("atom")
return ns
ns = get_namespaces()
def nspv(path):
"""
short-hand syntax seen in waterml2.py
:param path: xpath namespace aware
:return: xml element
"""
return nspath_eval(path, ns)
def ns_elem(ns_prefix, elem_name):
ns_uri = ns.get(ns_prefix)
if ns_uri is not None:
return """{%(ns_uri)s}%(elem_name)s""" % {"ns_uri": ns_uri,
"elem_name": elem_name}
def parse_owc_content(content_node):
mimetype = util.testXMLAttribute(content_node, 'type')
url = util.testXMLAttribute(content_node, 'href')
title = util.testXMLAttribute(content_node, 'title')
child_elem = None
if len(list(content_node)) > 0:
child_elem = element_to_string(
list(content_node)[0], False)
content_dict = {
"type": mimetype,
"url": url,
"content": str(child_elem),
"title": title
}
return content_dict
def parse_entry(entry_node):
"""
parse an aotm entry into a feature/resource dict to build OwcResource from
:param entry_node: xml element root node of the atom:entry
:return: dictionary for OwcResource.from_dict()
"""
resource_base_dict = {
"type": "Feature",
"id": None,
"geometry": None,
"properties": {
'title': None,
'abstract': None,
'updated': None,
'date': None,
'authors': [],
'publisher': None,
'rights': None,
'categories': [],
"links": {
"alternates": [],
"previews": [],
"data": [],
"via": [],
},
'offerings': [],
'active': None,
'minscaledenominator': None,
'maxscaledenominator': None,
'folder': None
}
}
# <id>ftp://ftp.remotesensing.org/pub/geotiff/samples/gdal_eg/cea.txt</id>
val = entry_node.find(util.nspath_eval('atom:id', ns))
id = util.testXMLValue(val)
# log.debug("entry :id %s :: %s", id, val)
resource_base_dict.update({"id": id})
# <title>GeoTIFF Example</title>
val = entry_node.find(util.nspath_eval('atom:title', ns))
title = util.testXMLValue(val)
# log.debug("entry: title %s :: %s", id, val)
resource_base_dict['properties'].update({"title": title})
# <updated>2011-11-01T00:00:00Z</updated>
val = entry_node.find(util.nspath_eval('atom:updated', ns))
update_date = util.testXMLValue(val)
# log.debug("entry: updated %s :: %s", update_date, val)
resource_base_dict['properties'].update({"updated": update_date})
# <dc:publisher>
val = entry_node.find(util.nspath_eval('dc:publisher', ns))
publisher = util.testXMLValue(val)
# log.debug("entry: dc:publisher %s :: %s", publisher, val)
resource_base_dict['properties'].update({"publisher": publisher})
# <dc:rights>
val = entry_node.find(util.nspath_eval('dc:rights', ns))
rights = util.testXMLValue(val)
# log.debug("entry: rights %s :: %s", rights, val)
resource_base_dict['properties'].update({"rights": rights})
# <georss:where>
val = entry_node.find(util.nspath_eval('georss:where', ns))
if val is not None:
if len(list(val)) > 0:
# xmltxt = etree.tostring(
# list(val)[0], encoding='utf8', method='xml')
xmltxt = element_to_string(
list(val)[0], False)
# TODO here parse geometry??
# log.debug("entry: geometry %s :: %s", xmltxt, val)
resource_base_dict.update({"geometry": xmltxt.decode('utf-8')})
# <content type = "text" > aka subtitle, aka abstract
val = entry_node.find(util.nspath_eval('atom:content', ns))
subtitle = util.testXMLValue(val)
# log.debug("entry: subtitle %s :: %s", subtitle, val)
resource_base_dict['properties'].update({"abstract": subtitle})
# <author> ..
# <name>
# <email>
vals = entry_node.findall(util.nspath_eval('atom:author', ns))
authors = []
for val in vals:
val_name = val.find(util.nspath_eval('atom:name', ns))
val_email = val.find(util.nspath_eval('atom:email', ns))
val_uri = val.find(util.nspath_eval('atom:uri', ns))
name = util.testXMLValue(val_name)
email = util.testXMLValue(val_email)
uri = util.testXMLValue(val_uri)
author = {
"name": name,
"email": email,
"uri": uri
}
# log.debug("entry: author %s :: %s", author, vals)
if not is_empty(author):
authors.append(author)
resource_base_dict['properties'].update({"authors": authors})
# <link rel="enclosure" type="image/png"
# length="12345" title="..." href="http://o..."/>
# <link rel="icon" type="image/png" title="Preview f..."
# href="http://..."/>
# <link rel="via" type="application/vnd.ogc.wms_xml"
# title="Original .." href="...."/>
vals = entry_node.findall(util.nspath_eval('atom:link', ns))
links_alternates = []
links_previews = []
links_data = []
links_via = []
for val in vals:
rel = util.testXMLAttribute(val, 'rel')
href = util.testXMLAttribute(val, 'href')
mimetype = util.testXMLAttribute(val, 'type')
lang = util.testXMLAttribute(val, 'lang')
title = util.testXMLAttribute(val, 'title')
length = util.testXMLAttribute(val, 'length')
link = {
"href": href,
"type": mimetype,
"length": length,
"lang": lang,
"title": title,
"rel": rel
}
# log.debug("entry: link %s :: %s", link, vals)
if link.get("rel") == "alternate" and not is_empty(link):
links_alternates.append(link)
elif link.get("rel") == "icon" and not is_empty(link):
links_previews.append(link)
elif link.get("rel") == "enclosure" and not is_empty(link):
links_data.append(link)
elif link.get("rel") == "via" and not is_empty(link):
links_via.append(link)
else:
log.warning(
"unknown link type in Ows Resource entry section: %r", link)
resource_base_dict['properties']['links'].update(
{"alternates": links_alternates})
resource_base_dict['properties']['links'].update(
{"previews": links_previews})
resource_base_dict['properties']['links'].update({"data": links_data})
resource_base_dict['properties']['links'].update({"via": links_via})
# <owc:offering code="http://www.opengis.net/spec/owc-at...">
# <owc:content type="image/tiff" href=".."
# <owc:offering code="http://www.opengis.net/spec....l">
# <owc:content type="application/gml+xml">
# <owc:operation code="GetCapabilities" method="GET"
# type="applica..." href="..."
# <owc:request type="application/xml"> ..
# <owc:styleSet>
# <owc:name>raster</owc:name>
# <owc:title>Default Raster</owc:title>
# <owc:abstract>A sample style that draws a </owc:abstract>
# <owc:legendURL href="h...." type="image/png"/>
# </owc:styleSet>
offering_nodes = entry_node.findall(util.nspath_eval('owc:offering', ns))
offerings = []
for offering_node in offering_nodes:
offering_code = util.testXMLAttribute(offering_node, 'code')
operations = []
contents = []
styles = []
operation_nodes = offering_node.findall(
util.nspath_eval('owc:operation', ns))
for op_val in operation_nodes:
operations_code = util.testXMLAttribute(op_val, 'code')
http_method = util.testXMLAttribute(op_val, 'method')
mimetype = util.testXMLAttribute(op_val, 'type')
request_url = util.testXMLAttribute(op_val, 'href')
req_content_val = val.find(util.nspath_eval('owc:request', ns))
req_content = None
if req_content_val is not None:
req_content = parse_owc_content(req_content_val)
# TODO no example for result/response
op_dict = {
"code": operations_code,
"method": http_method,
"type": mimetype,
"href": request_url,
"request": None if is_empty(req_content) else req_content,
"result": None
}
# log.debug("entry: operation %s :: %s", op_dict, vals)
if not is_empty(op_dict):
operations.append(op_dict)
content_nodes = offering_node.findall(
util.nspath_eval('owc:content', ns))
for cont_val in content_nodes:
content_dict = parse_owc_content(cont_val)
# log.debug("entry: content_dict %s :: %s", content_dict, vals)
if not is_empty(content_dict):
contents.append(content_dict)
style_nodes = offering_node.findall(
util.nspath_eval('owc:styleSet', ns))
for style_val in style_nodes:
val_name = style_val.find(util.nspath_eval('owc:name', ns))
val_title = style_val.find(util.nspath_eval('owc:title', ns))
val_abstr = style_val.find(util.nspath_eval('owc:abstract', ns))
val_uri = style_val.find(util.nspath_eval('owc:legendURL', ns))
name = util.testXMLValue(val_name)
title = util.testXMLValue(val_title)
abstr = util.testXMLValue(val_abstr)
legend_url = util.testXMLAttribute(val_uri, 'href')
style_set = {
"name": name,
"title": title,
"abstract": abstr,
"default": None,
"legendURL": legend_url,
"content": None
}
# log.debug("entry: style_set %s :: %s", style_set, vals)
if not is_empty(style_set):
styles.append(style_set)
offering_dict = {
"code": offering_code,
"operations": operations,
"contents": contents,
"styles": styles
}
if offering_code is not None:
offerings.append(offering_dict)
resource_base_dict['properties'].update(
{"offerings": offerings})
# TODO no examples for active attribute
# <owc:minScaleDenominator>2500</owc:minScaleDenominator>
val = entry_node.find(util.nspath_eval('owc:minScaleDenominator', ns))
min_scale_denominator = util.testXMLValue(val)
# log.debug("entry: min-scale-... %s :: %s", min_scale_denominator, val)
resource_base_dict['properties'].update(
{"minscaledenominator": min_scale_denominator})
# <owc:maxScaleDenominator>25000</owc:maxScaleDenominator>
val = entry_node.find(util.nspath_eval('owc:maxScaleDenominator', ns))
max_scale_denominator = util.testXMLValue(val)
# log.debug("entry: max_scale_... %s :: %s", max_scale_denominator, val)
resource_base_dict['properties'].update(
{"maxscaledenominator": max_scale_denominator})
# TODO no examples for folder attribute
return resource_base_dict
def decode_atomxml(xml_string):
"""
here parse atom xml to a dict for instanciating of OWC:Context
:param xmlstring:
:return: OwcContext-ready dict
"""
context_base_dict = {
"type": "FeatureCollection",
"id": None,
"bbox": None,
"properties": {
"lang": None,
"links": {
"profiles": [],
"via": [],
},
'title': None,
'abstract': None,
'updated': None,
'authors': [],
'publisher': None,
'generator': None,
'display': None,
'rights': None,
'date': None,
'categories': [],
},
'features': []
}
feed_root = etree.fromstring(xml_string)
# feed_root = etree.parse(xml_bytes)
# feed_root xml lang use?
# # log.debug(feed_root)
# feed xml:lang=en
# lang = feed_root.get('{http://www.w3.org/XML/1998/namespace}lang')
lang = util.testXMLAttribute(
feed_root, '{http://www.w3.org/XML/1998/namespace}lang')
# log.debug("lang %s ", lang)
context_base_dict['properties'].update({"lang": lang})
# <id>
val = feed_root.find(util.nspath_eval('atom:id', ns))
id = util.testXMLValue(val)
# log.debug("id %s :: %s", id, val)
context_base_dict.update({"id": id})
# <link rel="profile"
# href="http://www.opengis.net/spec/owc-atom/1.0/req/core"
# title="compliant bla bla"
# < link rel = "via" type = "application/xml" href = "..." title = "..."
vals = feed_root.findall(util.nspath_eval('atom:link', ns))
links_profile = []
links_via = []
for val in vals:
rel = util.testXMLAttribute(val, 'rel')
href = util.testXMLAttribute(val, 'href')
mimetype = util.testXMLAttribute(val, 'type')
lang = util.testXMLAttribute(val, 'lang')
title = util.testXMLAttribute(val, 'title')
length = util.testXMLAttribute(val, 'length')
link = {
"href": href,
"type": mimetype,
"length": length,
"lang": lang,
"title": title,
"rel": rel
}
# log.debug("link %s :: %s", link, vals)
if link.get("rel") == "profile" and not is_empty(link):
links_profile.append(link)
elif link.get("rel") == "via" and not is_empty(link):
links_via.append(link)
else:
log.warning("unknown link type in Ows Context section: %r", link)
context_base_dict['properties']['links'].update(
{"profiles": links_profile})
context_base_dict['properties']['links'].update({"via": links_via})
# <title>
val = feed_root.find(util.nspath_eval('atom:title', ns))
title = util.testXMLValue(val)
# log.debug("title %s :: %s", title, val)
context_base_dict['properties'].update({"title": title})
# <subtitle type = "html"
val = feed_root.find(util.nspath_eval('atom:subtitle', ns))
subtitle = util.testXMLValue(val)
# log.debug("subtitle %s :: %s", subtitle, val)
context_base_dict['properties'].update({"abstract": subtitle})
# <author> ..
# <name>
# <email>
vals = feed_root.findall(util.nspath_eval('atom:author', ns))
authors = []
for val in vals:
val_name = val.find(util.nspath_eval('atom:name', ns))
val_email = val.find(util.nspath_eval('atom:email', ns))
val_uri = val.find(util.nspath_eval('atom:uri', ns))
name = util.testXMLValue(val_name)
email = util.testXMLValue(val_email)
uri = util.testXMLValue(val_uri)
author = {
"name": name,
"email": email,
"uri": uri
}
# log.debug("author %s :: %s", author, vals)
if not is_empty(author):
authors.append(author)
context_base_dict['properties'].update({"authors": authors})
# <georss:where>
val = feed_root.find(util.nspath_eval('georss:where', ns))
if val is not None:
if len(list(val)) > 0:
xmltxt = element_to_string(
list(val)[0], False)
# log.debug("geometry %s :: %s", xmltxt, val)
context_base_dict['properties'].update({"bbox": xmltxt.decode('utf-8')})
# <updated>2012-11-04T17:26:23Z</updated>
val = feed_root.find(util.nspath_eval('atom:updated', ns))
update_date = util.testXMLValue(val)
# log.debug("updated %s :: %s", update_date, val)
context_base_dict['properties'].update({"updated": update_date})
# <dc:date>2009-01-23T09:08:56.000Z/2009-01-23T09:14:08.000Z</dc:date>
val = feed_root.find(util.nspath_eval('dc:date', ns))
time_interval_of_interest = util.testXMLValue(val)
# log.debug("dc:date %s :: %s", time_interval_of_interest, val)
context_base_dict['properties'].update(
{"date": time_interval_of_interest})
# <rights>
val = feed_root.find(util.nspath_eval('atom:rights', ns))
rights = util.testXMLValue(val)
# log.debug("rights %s :: %s", rights, val)
context_base_dict['properties'].update({"rights": rights})
# <dc:publisher>
val = feed_root.find(util.nspath_eval('dc:publisher', ns))
publisher = util.testXMLValue(val)
# log.debug("dc:publisher %s :: %s", publisher, val)
context_base_dict['properties'].update({"publisher": publisher})
# <owc:display>
# <owc:pixelWidth>
val_display = feed_root.find(util.nspath_eval('owc:display', ns))
val_pixel_width = None if val_display is None \
else val_display.find(util.nspath_eval('owc:pixelWidth', ns))
val_pixel_height = None if val_display is None \
else val_display.find(util.nspath_eval('owc:pixelHeight', ns))
val_mm_per_pixel = None if val_display is None \
else val_display.find(util.nspath_eval('owc:mmPerPixel', ns))
pixel_width = util.testXMLValue(val_pixel_width)
pixel_height = util.testXMLValue(val_pixel_height)
mm_per_pixel = util.testXMLValue(val_mm_per_pixel)
owc_display = {
"pixelWidth": pixel_width,
"pixelHeight": pixel_height,
"mmPerPixel": mm_per_pixel
}
# log.debug("display %s :: %s", owc_display, val_display)
if not is_empty(owc_display):
context_base_dict['properties'].update({"display": owc_display})
# <generator uri="http://w.." version="1.0">MiraMon</generator>
val = feed_root.find(util.nspath_eval('atom:generator', ns))
name = util.testXMLValue(val)
version = util.testXMLAttribute(val, 'version')
uri = util.testXMLAttribute(val, 'uri')
owc_generator = {
"name": name,
"version": version,
"uri": uri
}
# log.debug("generator %s :: %s", owc_generator, val)
if not is_empty(owc_generator):
context_base_dict['properties'].update({"generator": owc_generator})
# <category term="maps" label="This file contains maps"/>
vals = feed_root.findall(util.nspath_eval('atom:category', ns))
categories = []
for val in vals:
term = util.testXMLAttribute(val, 'term')
scheme = util.testXMLAttribute(val, 'scheme')
label = util.testXMLAttribute(val, 'label')
category = {
"term": term,
"scheme": scheme,
"label": label
}
# log.debug("category %s :: %s", category, vals)
if not is_empty(category):
categories.append(category)
context_base_dict['properties'].update({"categories": categories})
# <entry> ...
entries = feed_root.findall(util.nspath_eval('atom:entry', ns))
resources = []
for entry in entries:
entry_dict = parse_entry(entry)
if entry_dict.get("id") is not None:
resources.append(entry_dict)
else:
log.warning("feature entry has no id, not allowed: skipping!")
context_base_dict.update({"features": resources})
return context_base_dict
def encode_atomxml(obj_d):
"""
encode instance of OwcContext dict into atom xml encoding,
because we can't do circular imports
:param obj_d: the dict from owscontext to dict
:return: b'atomxml'
"""
# try:
# xml_tree = axml_context(obj_d)
# tree = etree.ElementTree(xml_tree)
# return tree
# except TypeError as te:
# log.warning('problem encoding context to xml', te)
# raise te
# except AttributeError as ae:
# log.warning('problem encoding context to xml', ae)
# raise ae
# except ValueError as ve:
# log.warning('problem encoding context to xml', ve)
# raise ve
# except ParseError as pe:
# log.warning('problem encoding context to xml', pe)
# raise pe
xml_tree = axml_context(obj_d)
tree = etree.ElementTree(xml_tree)
return element_to_string(tree, encoding='utf-8', xml_declaration=False)
def axml_context(d):
"""
encodes base OwcContext as dict to atom xml tree
:param d:
:return:
"""
xml = etree.Element("feed", nsmap=ns)
etree.SubElement(xml, "id").text = d['id']
spec_reference = [axml_link(do) for do in
extract_p('properties.links.profiles', d, [])]
[xml.append(el) for el in spec_reference if el is not None]
area_of_interest = extract_p('bbox', d, None)
if area_of_interest is not None:
try:
gml = etree.fromstring(area_of_interest)
georss = etree.SubElement(xml, ns_elem("georss", "where"))
georss.append(gml)
except Exception as ex:
log.warning('could encode bbox into georss:where', ex)
pass
context_metadata = [axml_link(do) for do in
extract_p('properties.links.via', d, [])]
[xml.append(el) for el in context_metadata if el is not None]
language = extract_p('properties.lang', d, None)
if language is not None:
xml.set(ns_elem("xml", "lang"), language)
title = extract_p('properties.title', d, None)
if title is not None:
etree.SubElement(xml, "title").text = title
# <subtitle type = "html"
subtitle = extract_p('properties.abstract', d, None)
if subtitle is not None:
etree.SubElement(xml, "subtitle").text = subtitle
update_date = extract_p('properties.updated', d, None)
if update_date is not None:
etree.SubElement(xml, "updated").text = update_date
authors = [axml_author(do) for do in extract_p('properties.authors', d, [])]
[xml.append(el) for el in authors if el is not None]
publisher = extract_p('properties.publisher', d, None)
if publisher is not None:
etree.SubElement(xml, ns_elem("dc", "publisher")).text = publisher
creator_application = axml_creator_app(extract_p('properties.generator', d, None))
if creator_application is not None and not is_empty(creator_application):
xml.append(creator_application)
creator_display = axml_display(extract_p('properties.display', d, None))
if creator_display is not None:
xml.append(creator_display)
rights = extract_p('properties.rights', d, None)
if rights is not None:
etree.SubElement(xml, "rights").text = rights
time_interval_of_interest = extract_p('properties.date', d, None)
if time_interval_of_interest is not None:
etree.SubElement(xml, ns_elem("dc", "date")).text = time_interval_of_interest
keywords = [axml_category(do) for do in
extract_p('properties.categories', d, [])]
[xml.append(el) for el in keywords if el is not None]
# here we generate single elements and attach them
resources = [axml_resource(do) for do in
extract_p('features', d, [])]
[xml.append(el) for el in resources if el is not None]
return xml
def axml_resource(d):
"""
encodes an OwcResource as dict into atom xml tree
:param d:
:return:
"""
entry = etree.Element("entry", nsmap=ns)
etree.SubElement(entry, "id").text = d['id']
geospatial_extent = extract_p('geometry', d, None)
if geospatial_extent is not None:
try:
gml = etree.fromstring(geospatial_extent)
georss = etree.SubElement(entry, ns_elem("georss", "where"))
georss.append(gml)
except Exception as ex:
log.warning('could encode geometry into georss:where', ex)
pass
title = d['properties']['title']
if title is not None:
etree.SubElement(entry, "title").text = title
subtitle = extract_p('properties.abstract', d, None)
# <content type = "text" >
if subtitle is not None:
etree.SubElement(entry, "content").text = subtitle
update_date = extract_p('properties.updated', d, None)
if update_date is not None:
etree.SubElement(entry, "updated").text = update_date
authors = [axml_author(do) for do in
extract_p('properties.authors', d, [])]
[entry.append(el) for el in authors if el is not None]
publisher = extract_p('properties.publisher', d, None)
if update_date is not None:
etree.SubElement(entry, ns_elem("dc", "publisher")).text = publisher
rights = extract_p('properties.rights', d, None)
if update_date is not None:
etree.SubElement(entry, ns_elem("dc", "rights")).text = rights
temporal_extent = extract_p('properties.date', d, None)
if temporal_extent is not None:
etree.SubElement(entry, "date").text = temporal_extent
keywords = [axml_category(do) for do in
extract_p('properties.categories', d, [])]
[entry.append(el) for el in keywords if el is not None]
resource_metadata = [axml_link(do) for do in
extract_p('properties.links.via', d, [])]
[entry.append(el) for el in resource_metadata if el is not None]
content_description = [axml_content(do)
for do in extract_p('properties.links.alternates', d, [])]
[entry.append(el) for el in content_description if el is not None]
preview = [axml_link(do) for do in
extract_p('properties.links.preview', d, [])]
[entry.append(el) for el in preview if el is not None]
content_by_ref = [axml_link(do) for do in
extract_p('properties.links.data', d, [])]
[entry.append(el) for el in content_by_ref if el is not None]
offerings = [axml_offering(do) for do in
extract_p('properties.offerings', d, [])]
[entry.append(el) for el in offerings if el is not None]
# TODO no examples for active attribute
active = extract_p('properties.active', d, None)
if active is not None:
etree.SubElement(entry, "active").text = active
min_scale_denominator = try_float(extract_p(
'properties.minscaledenominator', d, None))
# <owc:minScaleDenominator>2500</owc:minScaleDenominator>
if min_scale_denominator is not None:
etree.SubElement(entry, ns_elem(
"owc", "minScaleDenominator")).text = str(min_scale_denominator)
max_scale_denominator = try_float(extract_p(
'properties.maxscaledenominator', d, None))
# <owc:maxScaleDenominator>25000</owc:maxScaleDenominator>
if max_scale_denominator is not None:
etree.SubElement(entry, ns_elem(
"owc", "maxScaleDenominator")).text = str(max_scale_denominator)
# TODO no examples for folder attribute
folder = extract_p('properties.folder', d, None)
if folder is not None:
etree.SubElement(entry, "folder").text = folder
# xml.append(entry)
return entry
def axml_creator_app(d):
# <generator uri="http://w.." version="1.0">MiraMon</generator>
if is_empty(d):
return None
else:
try:
creator_app = etree.Element("generator", nsmap=ns)
title = extract_p('title', d, None)
if title is not None:
creator_app.text = title
uri = extract_p('uri', d, None)
if uri is not None:
creator_app.set("uri", uri)
version = extract_p('version', d, None)
if version is not None:
creator_app.set("version", version)
return creator_app
except Exception as ex:
log.warning('could encode creator_app', ex)
return None
def axml_display(d):
# <owc:display>
# <owc:pixelWidth>
if is_empty(d):
return None
else:
try:
creator_display = etree.Element(ns_elem("owc", "display"), nsmap=ns)
pixel_width = try_int(extract_p('pixelWidth', d, None))
if pixel_width is not None:
etree.SubElement(creator_display, ns_elem(
"owc", "pixelWidth")).text = str(pixel_width)
pixel_height = try_int(extract_p('pixelHeight', d, None))
if pixel_height is not None:
etree.SubElement(creator_display, ns_elem(
"owc", "pixelHeight")).text = str(pixel_height)
mm_per_pixel = try_float(extract_p('mmPerPixel', d, None))
if mm_per_pixel is not None:
etree.SubElement(creator_display, ns_elem(
"owc", "mmPerPixel")).text = str(mm_per_pixel)
return creator_display
except Exception as ex:
log.warning('could encode creator_display', ex)
return None
def axml_link(d):
# < link rel = "via" type = "application/xml" href = "..." title = "..."
if is_empty(d):
return None
else:
try:
link = etree.Element("link", nsmap=ns)
href = extract_p('href', d, None)
if href is not None:
link.set("href", href)
rel = extract_p('rel', d, None)
if rel is not None:
link.set("rel", rel)
mimetype = extract_p('type', d, None)
if mimetype is not None:
link.set("type", mimetype)
lang = extract_p('lang', d, None)
if lang is not None:
link.set("lang", lang)
title = extract_p('title', d, None)
if title is not None:
link.set("title", title)
length = try_int(extract_p('length', d, None))
if length is not None:
link.set("length", str(length))
return link
except Exception as ex:
log.warning('could not encode link', ex)
return None
def axml_category(d):
# <category term="maps" label="This file contains maps"/>
if is_empty(d):
return None
else:
try:
category = etree.Element("category", nsmap=ns)
term = extract_p('term', d, None)
if term is not None:
category.set("term", term)
scheme = extract_p('scheme', d, None)
if scheme is not None:
category.set("scheme", scheme)
label = extract_p('label', d, None)
if label is not None:
category.set("label", label)
return category
except Exception as ex:
log.warning('could encode category', ex)
return None
def axml_author(d):
# <author> ..
# <name>
# <email>
if is_empty(d):
return None
else:
try:
author = etree.Element("author", nsmap=ns)
name = extract_p('name', d, None)
if name is not None:
etree.SubElement(author, "name").text = name
email = extract_p('email', d, None)
if email is not None:
etree.SubElement(author, "email").text = email
uri = extract_p('uri', d, None)
if uri is not None:
etree.SubElement(author, "uri").text = uri
return author
except Exception as ex:
log.warning('could encode author', ex)
return None
def axml_offering(d):
# <owc:offering code="http://www.opengis.net/spec/owc-at...">
# <owc:offering code="http://www.opengis.net/spec....l">
# <owc:content type="application/gml+xml">
if is_empty(d):
return None
else:
try:
offering_code = extract_p('code', d, None)
offering = etree.Element(ns_elem("owc", "offering"), attrib={"code": offering_code}, nsmap=ns)
# use axml_operation here
operations = [axml_operation(do) for do in
extract_p('operations', d, [])]
[offering.append(el) for el in operations if el is not None]
# use axml_content here
contents = [axml_content(do) for do in
extract_p('contents', d, [])]
[offering.append(el) for el in contents if el is not None]
# use axml_styeset here
styles = [axml_styleset(do) for do in
extract_p('styles', d, [])]
[offering.append(el) for el in styles if el is not None]
return offering
except Exception as ex:
log.warning('could encode offering', ex)
return None
def axml_operation(d):
# <owc:operation code="GetCapabilities" method="GET"
# type="applica..." href="..."
# <owc:request type="application/xml"> ..
# etree.SubElement(entry, ns_elem("owc", "offering"), name="blah").text = "some value1"
if is_empty(d):
return None
else:
try:
operation = etree.Element(ns_elem("owc", "operation"), nsmap=ns)
operations_code = extract_p('code', d, None)
if operations_code is not None:
operation.set("code", operations_code)
http_method = extract_p('method', d, None)
if http_method is not None:
operation.set("method", http_method)
mimetype = extract_p('type', d, None)
if mimetype is not None:
operation.set("type", mimetype)
request_url = extract_p('href', d, None)
if request_url is not None:
operation.set("href", request_url)
# use axml_content here
request = extract_p('request', d, None)
request_enc = None if request is None else axml_content(request)
if request_enc is not None:
operation.append(request_enc)
# use axml_content here
result = extract_p('result', d, None)
result_enc = None if result is None else axml_content(result)
if result_enc is not None:
operation.append(result_enc)
return operation
except Exception as ex:
log.warning('could encode operation', ex)
return None
def axml_styleset(d):
# <owc:styleSet>
# <owc:name>raster</owc:name>
# <owc:title>Default Raster</owc:title>
# <owc:abstract>A sample style that draws a </owc:abstract>
# <owc:legendURL href="h...." type="image/png"/>
# </owc:styleSet>
if is_empty(d):
return None
else:
try:
styleset = etree.Element(ns_elem("owc", "styleSet"), nsmap=ns)
name = extract_p('name', d, None)
if name is not None:
etree.SubElement(styleset, ns_elem("owc", "name")).text = name
title = extract_p('title', d, None)
if title is not None:
etree.SubElement(styleset, ns_elem("owc", "title")).text = title
subtitle = extract_p('abstract', d, None)
if subtitle is not None:
etree.SubElement(styleset, ns_elem("owc", "abstract")).text = subtitle
is_default = extract_p('default', d, None)
# TODO no example for default setting on style set
if is_default is not None:
etree.SubElement(styleset, ns_elem("owc", "default")).text = is_default
legend_url = extract_p('legendURL', d, None)
if legend_url is not None:
etree.SubElement(styleset, ns_elem("owc", "legendURL")).text = legend_url
# TODO no example for content on style set
content = extract_p('content', d, None)
content_enc = None if content is None else axml_content(content)
if content_enc is not None:
styleset.append(content_enc)
return styleset
except Exception as ex:
log.warning('could encode styleset', ex)
return None
def axml_content(d):
"""
OwcContent dict to Atom XML
:param d:
:return:
"""
# <owc:content type="image/tiff" href=".."
if is_empty(d):
return None
else:
try:
content_elem = etree.Element(ns_elem("owc", "content"), nsmap=ns)
mimetype = extract_p('type', d, None)
if mimetype is not None:
content_elem.set("type", mimetype)
url = extract_p('url', d, None)
if url is not None:
content_elem.set("href", url)
title = extract_p('title', d, None)
if title is not None:
content_elem.set("title", title)
content = extract_p('content', d, None)
if content is None:
content_elem.text = content
return content_elem
except Exception as ex:
log.warning('could encode content', ex)
return None | PypiClean |
/Nuitka_fixed-1.1.2-cp310-cp310-win_amd64.whl/nuitka/code_generation/ComparisonCodes.py | from nuitka.nodes.shapes.BuiltinTypeShapes import tshape_bool
from nuitka.nodes.shapes.StandardShapes import tshape_unknown
from nuitka.PythonOperators import (
comparison_inversions,
rich_comparison_arg_swaps,
)
from .c_types.CTypeBooleans import CTypeBool
from .c_types.CTypeNuitkaBooleans import CTypeNuitkaBoolEnum
from .c_types.CTypeNuitkaVoids import CTypeNuitkaVoidEnum
from .c_types.CTypePyObjectPointers import CTypePyObjectPtr
from .CodeHelpers import generateExpressionCode
from .CodeHelperSelection import selectCodeHelper
from .ComparisonHelperDefinitions import (
getNonSpecializedComparisonOperations,
getSpecializedComparisonOperations,
rich_comparison_codes,
rich_comparison_subset_codes,
)
from .ErrorCodes import (
getErrorExitBoolCode,
getErrorExitCode,
getReleaseCode,
getReleaseCodes,
)
from .ExpressionCTypeSelectionHelpers import decideExpressionCTypes
def _handleArgumentSwapAndInversion(
comparator, needs_argument_swap, left_c_type, right_c_type
):
needs_result_inversion = False
if needs_argument_swap:
comparator = rich_comparison_arg_swaps[comparator]
else:
# Same types, we can swap too, but this time to avoid the comparator variety.
if (
left_c_type is right_c_type
and comparator not in rich_comparison_subset_codes
):
needs_result_inversion = True
comparator = comparison_inversions[comparator]
return comparator, needs_result_inversion
def getRichComparisonCode(
to_name, comparator, left, right, needs_check, source_ref, emit, context
):
# This is detail rich stuff, encoding the complexity of what helpers are
# available, and can be used as a fallback.
# pylint: disable=too-many-branches,too-many-locals,too-many-statements
# TODO: Move the value_name to a context generator, then this will be
# a bit less complex.
(
unknown_types,
needs_argument_swap,
left_shape,
right_shape,
left_c_type,
right_c_type,
) = decideExpressionCTypes(left=left, right=right, may_swap_arguments="always")
if unknown_types:
assert not needs_argument_swap
needs_result_inversion = False
else:
# Same types, we can swap too, but this time to avoid the comparator variety.
comparator, needs_result_inversion = _handleArgumentSwapAndInversion(
comparator, needs_argument_swap, left_c_type, right_c_type
)
# If a more specific C type was picked that "PyObject *" then we can use that to have the helper.
helper_type = target_type = to_name.getCType()
if needs_check:
# If an exception may occur, we do not have the "NVOID" helpers though, we
# instead can use the CTypeNuitkaBoolEnum that will easily convert to
# it.
if helper_type is CTypeNuitkaVoidEnum:
helper_type = CTypeNuitkaBoolEnum
# Need to represent the intermediate exception, so we cannot have that.
if helper_type is CTypeBool:
helper_type = CTypeNuitkaBoolEnum
report_missing = True
else:
# If no exception can occur, do not require a helper that can indicate
# it, but use the one that produces simpler code, this means we can
# avoid the CTypeNuitkaBoolEnum (NBOOL) helpers except for things that
# can really raise. Once we have expression for types depending on the
# value to raise or not, this will get us into trouble, due to using a
# fallback
helper_type = CTypeBool
report_missing = False
specialized_helpers_set = getSpecializedComparisonOperations()
non_specialized_helpers_set = getNonSpecializedComparisonOperations()
prefix = "RICH_COMPARE_" + rich_comparison_codes[comparator]
helper_type, helper_function = selectCodeHelper(
prefix=prefix,
specialized_helpers_set=specialized_helpers_set,
non_specialized_helpers_set=non_specialized_helpers_set,
result_type=helper_type,
left_shape=left_shape,
right_shape=right_shape,
left_c_type=left_c_type,
right_c_type=right_c_type,
argument_swap=needs_argument_swap,
report_missing=report_missing,
source_ref=source_ref,
)
# If we failed to find CTypeBool, that should be OK.
if helper_function is None and not report_missing:
helper_type, helper_function = selectCodeHelper(
prefix=prefix,
specialized_helpers_set=specialized_helpers_set,
non_specialized_helpers_set=non_specialized_helpers_set,
result_type=CTypeNuitkaBoolEnum,
left_shape=left_shape,
right_shape=right_shape,
left_c_type=left_c_type,
right_c_type=right_c_type,
argument_swap=needs_argument_swap,
report_missing=True,
source_ref=source_ref,
)
# print("PICKED", left, right, left_c_type, right_c_type, helper_function)
if helper_function is None:
# Give up and warn about it.
left_c_type = CTypePyObjectPtr
right_c_type = CTypePyObjectPtr
helper_type, helper_function = selectCodeHelper(
prefix=prefix,
specialized_helpers_set=specialized_helpers_set,
non_specialized_helpers_set=non_specialized_helpers_set,
result_type=helper_type,
left_shape=tshape_unknown,
right_shape=tshape_unknown,
left_c_type=left_c_type,
right_c_type=right_c_type,
argument_swap=needs_argument_swap,
report_missing=True,
source_ref=source_ref,
)
assert helper_function is not None, (to_name, left_shape, right_shape)
left_name = context.allocateTempName("cmp_expr_left", type_name=left_c_type.c_type)
right_name = context.allocateTempName(
"cmp_expr_right", type_name=right_c_type.c_type
)
generateExpressionCode(
to_name=left_name, expression=left, emit=emit, context=context
)
generateExpressionCode(
to_name=right_name, expression=right, emit=emit, context=context
)
if needs_argument_swap:
arg1_name = right_name
arg2_name = left_name
else:
arg1_name = left_name
arg2_name = right_name
# May need to convert return value.
if helper_type is not target_type:
value_name = context.allocateTempName(
to_name.code_name + "_" + helper_type.helper_code.lower(),
type_name=helper_type.c_type,
unique=to_name.code_name == "tmp_unused",
)
else:
value_name = to_name
emit(
"%s = %s(%s, %s);"
% (
value_name,
helper_function,
arg1_name,
arg2_name,
)
)
if value_name.getCType().hasErrorIndicator():
getErrorExitCode(
check_name=value_name,
release_names=(left_name, right_name),
needs_check=needs_check,
emit=emit,
context=context,
)
else:
# Otherwise we picked the wrong kind of helper.
assert not needs_check, (
to_name,
left_shape,
right_shape,
helper_function,
value_name.getCType(),
)
getReleaseCodes(
release_names=(left_name, right_name), emit=emit, context=context
)
# TODO: Depending on operation, we could not produce a reference, if result *must*
# be boolean, but then we would have some helpers that do it, and some that do not
# do it.
if helper_type is CTypePyObjectPtr:
context.addCleanupTempName(value_name)
if value_name is not to_name:
target_type.emitAssignConversionCode(
to_name=to_name,
value_name=value_name,
# TODO: Right now we don't do conversions here that could fail.
needs_check=False,
emit=emit,
context=context,
)
# When this is done on freshly assigned "Py_True" and "Py_False", the C
# compiler should be able to optimize it away by inlining "CHECK_IF_TRUE"
# branches on these two values.
if needs_result_inversion:
target_type.emitAssignInplaceNegatedValueCode(
to_name=to_name,
# We only do get here, target_type doesn't cause issues.
needs_check=False,
emit=emit,
context=context,
)
def generateComparisonExpressionCode(to_name, expression, emit, context):
left = expression.subnode_left
right = expression.subnode_right
comparator = expression.getComparator()
type_name = "PyObject *"
if comparator in ("Is", "IsNot"):
if left.getTypeShape() is tshape_bool and right.getTypeShape() is tshape_bool:
type_name = "nuitka_bool"
left_name = context.allocateTempName("cmp_expr_left", type_name=type_name)
right_name = context.allocateTempName("cmp_expr_right", type_name=type_name)
generateExpressionCode(
to_name=left_name, expression=left, emit=emit, context=context
)
generateExpressionCode(
to_name=right_name, expression=right, emit=emit, context=context
)
if comparator in ("In", "NotIn"):
needs_check = right.mayRaiseExceptionIn(BaseException, expression.subnode_left)
res_name = context.getIntResName()
emit(
"%s = PySequence_Contains(%s, %s);"
% (res_name, right_name, left_name) # sequence goes first in the API.
)
getErrorExitBoolCode(
condition="%s == -1" % res_name,
release_names=(left_name, right_name),
needs_check=needs_check,
emit=emit,
context=context,
)
to_name.getCType().emitAssignmentCodeFromBoolCondition(
to_name=to_name,
condition="%s == %d" % (res_name, 1 if comparator == "In" else 0),
emit=emit,
)
elif comparator == "Is":
to_name.getCType().emitAssignmentCodeFromBoolCondition(
to_name=to_name, condition="%s == %s" % (left_name, right_name), emit=emit
)
getReleaseCodes(
release_names=(left_name, right_name), emit=emit, context=context
)
elif comparator == "IsNot":
to_name.getCType().emitAssignmentCodeFromBoolCondition(
to_name=to_name, condition="%s != %s" % (left_name, right_name), emit=emit
)
getReleaseCodes(
release_names=(left_name, right_name), emit=emit, context=context
)
elif comparator in ("exception_match", "exception_mismatch"):
needs_check = expression.mayRaiseExceptionComparison()
res_name = context.getIntResName()
emit("%s = EXCEPTION_MATCH_BOOL(%s, %s);" % (res_name, left_name, right_name))
getErrorExitBoolCode(
condition="%s == -1" % res_name,
release_names=(left_name, right_name),
needs_check=needs_check,
emit=emit,
context=context,
)
to_name.getCType().emitAssignmentCodeFromBoolCondition(
to_name=to_name,
condition="%s %s 0"
% (res_name, "!=" if comparator == "exception_match" else "=="),
emit=emit,
)
else:
assert False, comparator
def generateRichComparisonExpressionCode(to_name, expression, emit, context):
return getRichComparisonCode(
to_name=to_name,
comparator=expression.getComparator(),
left=expression.subnode_left,
right=expression.subnode_right,
needs_check=expression.mayRaiseExceptionComparison(),
source_ref=expression.source_ref,
emit=emit,
context=context,
)
def generateBuiltinIsinstanceCode(to_name, expression, emit, context):
inst_name = context.allocateTempName("isinstance_inst")
cls_name = context.allocateTempName("isinstance_cls")
generateExpressionCode(
to_name=inst_name,
expression=expression.subnode_instance,
emit=emit,
context=context,
)
generateExpressionCode(
to_name=cls_name,
expression=expression.subnode_classes,
emit=emit,
context=context,
)
context.setCurrentSourceCodeReference(expression.getCompatibleSourceReference())
res_name = context.getIntResName()
emit("%s = PyObject_IsInstance(%s, %s);" % (res_name, inst_name, cls_name))
getErrorExitBoolCode(
condition="%s == -1" % res_name,
release_names=(inst_name, cls_name),
emit=emit,
context=context,
)
to_name.getCType().emitAssignmentCodeFromBoolCondition(
to_name=to_name, condition="%s != 0" % res_name, emit=emit
)
def generateBuiltinIssubclassCode(to_name, expression, emit, context):
cls_name = context.allocateTempName("issubclass_cls")
classes_name = context.allocateTempName("issubclass_classes")
generateExpressionCode(
to_name=cls_name,
expression=expression.subnode_cls,
emit=emit,
context=context,
)
generateExpressionCode(
to_name=classes_name,
expression=expression.subnode_classes,
emit=emit,
context=context,
)
context.setCurrentSourceCodeReference(expression.getCompatibleSourceReference())
res_name = context.getIntResName()
emit("%s = PyObject_IsSubclass(%s, %s);" % (res_name, cls_name, classes_name))
getErrorExitBoolCode(
condition="%s == -1" % res_name,
release_names=(cls_name, classes_name),
emit=emit,
context=context,
)
to_name.getCType().emitAssignmentCodeFromBoolCondition(
to_name=to_name, condition="%s != 0" % res_name, emit=emit
)
def generateTypeCheckCode(to_name, expression, emit, context):
cls_name = context.allocateTempName("issubclass_cls")
generateExpressionCode(
to_name=cls_name,
expression=expression.subnode_cls,
emit=emit,
context=context,
)
context.setCurrentSourceCodeReference(expression.getCompatibleSourceReference())
res_name = context.getIntResName()
emit("%s = PyType_Check(%s);" % (res_name, cls_name))
getReleaseCode(
release_name=cls_name,
emit=emit,
context=context,
)
to_name.getCType().emitAssignmentCodeFromBoolCondition(
to_name=to_name, condition="%s != 0" % res_name, emit=emit
)
def generateMatchTypeCheckMappingCode(to_name, expression, emit, context):
cls_name = context.allocateTempName("mapping_check_cls")
generateExpressionCode(
to_name=cls_name,
expression=expression.subnode_value,
emit=emit,
context=context,
)
res_name = context.getIntResName()
emit("%s = Py_TYPE(%s)->tp_flags & Py_TPFLAGS_MAPPING;" % (res_name, cls_name))
getReleaseCode(
release_name=cls_name,
emit=emit,
context=context,
)
to_name.getCType().emitAssignmentCodeFromBoolCondition(
to_name=to_name, condition="%s" % res_name, emit=emit
)
def generateMatchTypeCheckSequenceCode(to_name, expression, emit, context):
cls_name = context.allocateTempName("sequence_check_cls")
generateExpressionCode(
to_name=cls_name,
expression=expression.subnode_value,
emit=emit,
context=context,
)
res_name = context.getIntResName()
emit("%s = Py_TYPE(%s)->tp_flags & Py_TPFLAGS_SEQUENCE;" % (res_name, cls_name))
getReleaseCode(
release_name=cls_name,
emit=emit,
context=context,
)
to_name.getCType().emitAssignmentCodeFromBoolCondition(
to_name=to_name, condition="%s" % res_name, emit=emit
) | PypiClean |
/Flask_Admin-1.6.1-py3-none-any.whl/flask_admin/form/widgets.py | from wtforms import widgets
from flask import current_app
from flask_admin.babel import gettext, ngettext
from flask_admin import helpers as h
__all__ = ['Select2Widget', 'DatePickerWidget', 'DateTimePickerWidget', 'RenderTemplateWidget', 'Select2TagsWidget', ]
def _is_bootstrap3():
view = h.get_current_view()
return view and view.admin.template_mode == 'bootstrap3'
class Select2Widget(widgets.Select):
"""
`Select2 <https://github.com/ivaynberg/select2>`_ styled select widget.
You must include select2.js, form-x.x.x.js and select2 stylesheet for it to
work.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('data-role', u'select2')
allow_blank = getattr(field, 'allow_blank', False)
if allow_blank and not self.multiple:
kwargs['data-allow-blank'] = u'1'
return super(Select2Widget, self).__call__(field, **kwargs)
class Select2TagsWidget(widgets.TextInput):
"""`Select2 <http://ivaynberg.github.com/select2/#tags>`_ styled text widget.
You must include select2.js, form-x.x.x.js and select2 stylesheet for it to work.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('data-role', u'select2-tags')
kwargs.setdefault('data-allow-duplicate-tags', "true" if getattr(field, 'allow_duplicates', False) else "false")
return super(Select2TagsWidget, self).__call__(field, **kwargs)
class DatePickerWidget(widgets.TextInput):
"""
Date picker widget.
You must include bootstrap-datepicker.js and form-x.x.x.js for styling to work.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('data-role', u'datepicker')
kwargs.setdefault('data-date-format', u'YYYY-MM-DD')
self.date_format = kwargs['data-date-format']
return super(DatePickerWidget, self).__call__(field, **kwargs)
class DateTimePickerWidget(widgets.TextInput):
"""
Datetime picker widget.
You must include bootstrap-datepicker.js and form-x.x.x.js for styling to work.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('data-role', u'datetimepicker')
kwargs.setdefault('data-date-format', u'YYYY-MM-DD HH:mm:ss')
return super(DateTimePickerWidget, self).__call__(field, **kwargs)
class TimePickerWidget(widgets.TextInput):
"""
Date picker widget.
You must include bootstrap-datepicker.js and form-x.x.x.js for styling to work.
"""
def __call__(self, field, **kwargs):
kwargs.setdefault('data-role', u'timepicker')
kwargs.setdefault('data-date-format', u'HH:mm:ss')
return super(TimePickerWidget, self).__call__(field, **kwargs)
class RenderTemplateWidget(object):
"""
WTForms widget that renders Jinja2 template
"""
def __init__(self, template):
"""
Constructor
:param template:
Template path
"""
self.template = template
def __call__(self, field, **kwargs):
kwargs.update({
'field': field,
'_gettext': gettext,
'_ngettext': ngettext,
'h': h,
})
template = current_app.jinja_env.get_template(self.template)
return template.render(kwargs) | PypiClean |
/Nuitka_winsvc-1.7.10-cp310-cp310-win_amd64.whl/nuitka/build/inline_copy/tqdm/tqdm/std.py | from __future__ import absolute_import, division
import sys
from collections import OrderedDict, defaultdict
from contextlib import contextmanager
from datetime import datetime, timedelta
from numbers import Number
from time import time
from warnings import warn
from weakref import WeakSet
from ._monitor import TMonitor
from .utils import (
CallbackIOWrapper, Comparable, DisableOnWriteError, FormatReplace,
SimpleTextIOWrapper, _basestring, _is_ascii, _range, _screen_shape_wrapper,
_supports_unicode, _term_move_up, _unich, _unicode, disp_len, disp_trim)
__author__ = "https://github.com/tqdm/tqdm#contributions"
__all__ = ['tqdm', 'trange',
'TqdmTypeError', 'TqdmKeyError', 'TqdmWarning',
'TqdmExperimentalWarning', 'TqdmDeprecationWarning',
'TqdmMonitorWarning']
class TqdmTypeError(TypeError):
pass
class TqdmKeyError(KeyError):
pass
class TqdmWarning(Warning):
"""base class for all tqdm warnings.
Used for non-external-code-breaking errors, such as garbled printing.
"""
def __init__(self, msg, fp_write=None, *a, **k):
if fp_write is not None:
fp_write("\n" + self.__class__.__name__ + ": " + str(msg).rstrip() + '\n')
else:
super(TqdmWarning, self).__init__(msg, *a, **k)
class TqdmExperimentalWarning(TqdmWarning, FutureWarning):
"""beta feature, unstable API and behaviour"""
pass
class TqdmDeprecationWarning(TqdmWarning, DeprecationWarning):
# not suppressed if raised
pass
class TqdmMonitorWarning(TqdmWarning, RuntimeWarning):
"""tqdm monitor errors which do not affect external functionality"""
pass
def TRLock(*args, **kwargs):
"""threading RLock"""
try:
from threading import RLock
return RLock(*args, **kwargs)
except (ImportError, OSError): # pragma: no cover
pass
class TqdmDefaultWriteLock(object):
"""
Provide a default write lock for thread and multiprocessing safety.
Works only on platforms supporting `fork` (so Windows is excluded).
You must initialise a `tqdm` or `TqdmDefaultWriteLock` instance
before forking in order for the write lock to work.
On Windows, you need to supply the lock from the parent to the children as
an argument to joblib or the parallelism lib you use.
"""
# global thread lock so no setup required for multithreading.
# NB: Do not create multiprocessing lock as it sets the multiprocessing
# context, disallowing `spawn()`/`forkserver()`
th_lock = TRLock()
def __init__(self):
# Create global parallelism locks to avoid racing issues with parallel
# bars works only if fork available (Linux/MacOSX, but not Windows)
cls = type(self)
root_lock = cls.th_lock
if root_lock is not None:
root_lock.acquire()
cls.create_mp_lock()
self.locks = [lk for lk in [cls.mp_lock, cls.th_lock] if lk is not None]
if root_lock is not None:
root_lock.release()
def acquire(self, *a, **k):
for lock in self.locks:
lock.acquire(*a, **k)
def release(self):
for lock in self.locks[::-1]: # Release in inverse order of acquisition
lock.release()
def __enter__(self):
self.acquire()
def __exit__(self, *exc):
self.release()
@classmethod
def create_mp_lock(cls):
if not hasattr(cls, 'mp_lock'):
try:
from multiprocessing import RLock
cls.mp_lock = RLock()
except (ImportError, OSError): # pragma: no cover
cls.mp_lock = None
@classmethod
def create_th_lock(cls):
assert hasattr(cls, 'th_lock')
warn("create_th_lock not needed anymore", TqdmDeprecationWarning, stacklevel=2)
class Bar(object):
"""
`str.format`-able bar with format specifiers: `[width][type]`
- `width`
+ unspecified (default): use `self.default_len`
+ `int >= 0`: overrides `self.default_len`
+ `int < 0`: subtract from `self.default_len`
- `type`
+ `a`: ascii (`charset=self.ASCII` override)
+ `u`: unicode (`charset=self.UTF` override)
+ `b`: blank (`charset=" "` override)
"""
ASCII = " 123456789#"
UTF = u" " + u''.join(map(_unich, range(0x258F, 0x2587, -1)))
BLANK = " "
COLOUR_RESET = '\x1b[0m'
COLOUR_RGB = '\x1b[38;2;%d;%d;%dm'
COLOURS = {'BLACK': '\x1b[30m', 'RED': '\x1b[31m', 'GREEN': '\x1b[32m',
'YELLOW': '\x1b[33m', 'BLUE': '\x1b[34m', 'MAGENTA': '\x1b[35m',
'CYAN': '\x1b[36m', 'WHITE': '\x1b[37m'}
def __init__(self, frac, default_len=10, charset=UTF, colour=None):
if not 0 <= frac <= 1:
warn("clamping frac to range [0, 1]", TqdmWarning, stacklevel=2)
frac = max(0, min(1, frac))
assert default_len > 0
self.frac = frac
self.default_len = default_len
self.charset = charset
self.colour = colour
@property
def colour(self):
return self._colour
@colour.setter
def colour(self, value):
if not value:
self._colour = None
return
try:
if value.upper() in self.COLOURS:
self._colour = self.COLOURS[value.upper()]
elif value[0] == '#' and len(value) == 7:
self._colour = self.COLOUR_RGB % tuple(
int(i, 16) for i in (value[1:3], value[3:5], value[5:7]))
else:
raise KeyError
except (KeyError, AttributeError):
warn("Unknown colour (%s); valid choices: [hex (#00ff00), %s]" % (
value, ", ".join(self.COLOURS)),
TqdmWarning, stacklevel=2)
self._colour = None
def __format__(self, format_spec):
if format_spec:
_type = format_spec[-1].lower()
try:
charset = {'a': self.ASCII, 'u': self.UTF, 'b': self.BLANK}[_type]
except KeyError:
charset = self.charset
else:
format_spec = format_spec[:-1]
if format_spec:
N_BARS = int(format_spec)
if N_BARS < 0:
N_BARS += self.default_len
else:
N_BARS = self.default_len
else:
charset = self.charset
N_BARS = self.default_len
nsyms = len(charset) - 1
bar_length, frac_bar_length = divmod(int(self.frac * N_BARS * nsyms), nsyms)
res = charset[-1] * bar_length
if bar_length < N_BARS: # whitespace padding
res = res + charset[frac_bar_length] + \
charset[0] * (N_BARS - bar_length - 1)
return self.colour + res + self.COLOUR_RESET if self.colour else res
class EMA(object):
"""
Exponential moving average: smoothing to give progressively lower
weights to older values.
Parameters
----------
smoothing : float, optional
Smoothing factor in range [0, 1], [default: 0.3].
Increase to give more weight to recent values.
Ranges from 0 (yields old value) to 1 (yields new value).
"""
def __init__(self, smoothing=0.3):
self.alpha = smoothing
self.last = 0
self.calls = 0
def __call__(self, x=None):
"""
Parameters
----------
x : float
New value to include in EMA.
"""
beta = 1 - self.alpha
if x is not None:
self.last = self.alpha * x + beta * self.last
self.calls += 1
return self.last / (1 - beta ** self.calls) if self.calls else self.last
class tqdm(Comparable):
"""
Decorate an iterable object, returning an iterator which acts exactly
like the original iterable, but prints a dynamically updating
progressbar every time a value is requested.
"""
monitor_interval = 10 # set to 0 to disable the thread
monitor = None
_instances = WeakSet()
@staticmethod
def format_sizeof(num, suffix='', divisor=1000):
"""
Formats a number (greater than unity) with SI Order of Magnitude
prefixes.
Parameters
----------
num : float
Number ( >= 1) to format.
suffix : str, optional
Post-postfix [default: ''].
divisor : float, optional
Divisor between prefixes [default: 1000].
Returns
-------
out : str
Number with Order of Magnitude SI unit postfix.
"""
for unit in ['', 'k', 'M', 'G', 'T', 'P', 'E', 'Z']:
if abs(num) < 999.5:
if abs(num) < 99.95:
if abs(num) < 9.995:
return '{0:1.2f}'.format(num) + unit + suffix
return '{0:2.1f}'.format(num) + unit + suffix
return '{0:3.0f}'.format(num) + unit + suffix
num /= divisor
return '{0:3.1f}Y'.format(num) + suffix
@staticmethod
def format_interval(t):
"""
Formats a number of seconds as a clock time, [H:]MM:SS
Parameters
----------
t : int
Number of seconds.
Returns
-------
out : str
[H:]MM:SS
"""
mins, s = divmod(int(t), 60)
h, m = divmod(mins, 60)
if h:
return '{0:d}:{1:02d}:{2:02d}'.format(h, m, s)
else:
return '{0:02d}:{1:02d}'.format(m, s)
@staticmethod
def format_num(n):
"""
Intelligent scientific notation (.3g).
Parameters
----------
n : int or float or Numeric
A Number.
Returns
-------
out : str
Formatted number.
"""
f = '{0:.3g}'.format(n).replace('+0', '+').replace('-0', '-')
n = str(n)
return f if len(f) < len(n) else n
@staticmethod
def status_printer(file):
"""
Manage the printing and in-place updating of a line of characters.
Note that if the string is longer than a line, then in-place
updating may not work (it will print a new line at each refresh).
"""
fp = file
fp_flush = getattr(fp, 'flush', lambda: None) # pragma: no cover
def fp_write(s):
fp.write(_unicode(s))
fp_flush()
last_len = [0]
def print_status(s):
len_s = disp_len(s)
fp_write('\r' + s + (' ' * max(last_len[0] - len_s, 0)))
last_len[0] = len_s
return print_status
@staticmethod
def format_meter(n, total, elapsed, ncols=None, prefix='', ascii=False, unit='it',
unit_scale=False, rate=None, bar_format=None, postfix=None,
unit_divisor=1000, initial=0, colour=None, **extra_kwargs):
"""
Return a string-based progress bar given some parameters
Parameters
----------
n : int or float
Number of finished iterations.
total : int or float
The expected total number of iterations. If meaningless (None),
only basic progress statistics are displayed (no ETA).
elapsed : float
Number of seconds passed since start.
ncols : int, optional
The width of the entire output message. If specified,
dynamically resizes `{bar}` to stay within this bound
[default: None]. If `0`, will not print any bar (only stats).
The fallback is `{bar:10}`.
prefix : str, optional
Prefix message (included in total width) [default: ''].
Use as {desc} in bar_format string.
ascii : bool, optional or str, optional
If not set, use unicode (smooth blocks) to fill the meter
[default: False]. The fallback is to use ASCII characters
" 123456789#".
unit : str, optional
The iteration unit [default: 'it'].
unit_scale : bool or int or float, optional
If 1 or True, the number of iterations will be printed with an
appropriate SI metric prefix (k = 10^3, M = 10^6, etc.)
[default: False]. If any other non-zero number, will scale
`total` and `n`.
rate : float, optional
Manual override for iteration rate.
If [default: None], uses n/elapsed.
bar_format : str, optional
Specify a custom bar string formatting. May impact performance.
[default: '{l_bar}{bar}{r_bar}'], where
l_bar='{desc}: {percentage:3.0f}%|' and
r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, '
'{rate_fmt}{postfix}]'
Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt,
percentage, elapsed, elapsed_s, ncols, nrows, desc, unit,
rate, rate_fmt, rate_noinv, rate_noinv_fmt,
rate_inv, rate_inv_fmt, postfix, unit_divisor,
remaining, remaining_s, eta.
Note that a trailing ": " is automatically removed after {desc}
if the latter is empty.
postfix : *, optional
Similar to `prefix`, but placed at the end
(e.g. for additional stats).
Note: postfix is usually a string (not a dict) for this method,
and will if possible be set to postfix = ', ' + postfix.
However other types are supported (#382).
unit_divisor : float, optional
[default: 1000], ignored unless `unit_scale` is True.
initial : int or float, optional
The initial counter value [default: 0].
colour : str, optional
Bar colour (e.g. 'green', '#00ff00').
Returns
-------
out : Formatted meter and stats, ready to display.
"""
# sanity check: total
if total and n >= (total + 0.5): # allow float imprecision (#849)
total = None
# apply custom scale if necessary
if unit_scale and unit_scale not in (True, 1):
if total:
total *= unit_scale
n *= unit_scale
if rate:
rate *= unit_scale # by default rate = self.avg_dn / self.avg_dt
unit_scale = False
elapsed_str = tqdm.format_interval(elapsed)
# if unspecified, attempt to use rate = average speed
# (we allow manual override since predicting time is an arcane art)
if rate is None and elapsed:
rate = (n - initial) / elapsed
inv_rate = 1 / rate if rate else None
format_sizeof = tqdm.format_sizeof
rate_noinv_fmt = ((format_sizeof(rate) if unit_scale else
'{0:5.2f}'.format(rate)) if rate else '?') + unit + '/s'
rate_inv_fmt = (
(format_sizeof(inv_rate) if unit_scale else '{0:5.2f}'.format(inv_rate))
if inv_rate else '?') + 's/' + unit
rate_fmt = rate_inv_fmt if inv_rate and inv_rate > 1 else rate_noinv_fmt
if unit_scale:
n_fmt = format_sizeof(n, divisor=unit_divisor)
total_fmt = format_sizeof(total, divisor=unit_divisor) \
if total is not None else '?'
else:
n_fmt = str(n)
total_fmt = str(total) if total is not None else '?'
try:
postfix = ', ' + postfix if postfix else ''
except TypeError:
pass
remaining = (total - n) / rate if rate and total else 0
remaining_str = tqdm.format_interval(remaining) if rate else '?'
try:
eta_dt = datetime.now() + timedelta(seconds=remaining) \
if rate and total else datetime.utcfromtimestamp(0)
except OverflowError:
eta_dt = datetime.max
# format the stats displayed to the left and right sides of the bar
if prefix:
# old prefix setup work around
bool_prefix_colon_already = (prefix[-2:] == ": ")
l_bar = prefix if bool_prefix_colon_already else prefix + ": "
else:
l_bar = ''
r_bar = '| {0}/{1} [{2}<{3}, {4}{5}]'.format(
n_fmt, total_fmt, elapsed_str, remaining_str, rate_fmt, postfix)
# Custom bar formatting
# Populate a dict with all available progress indicators
format_dict = dict(
# slight extension of self.format_dict
n=n, n_fmt=n_fmt, total=total, total_fmt=total_fmt,
elapsed=elapsed_str, elapsed_s=elapsed,
ncols=ncols, desc=prefix or '', unit=unit,
rate=inv_rate if inv_rate and inv_rate > 1 else rate,
rate_fmt=rate_fmt, rate_noinv=rate,
rate_noinv_fmt=rate_noinv_fmt, rate_inv=inv_rate,
rate_inv_fmt=rate_inv_fmt,
postfix=postfix, unit_divisor=unit_divisor,
colour=colour,
# plus more useful definitions
remaining=remaining_str, remaining_s=remaining,
l_bar=l_bar, r_bar=r_bar, eta=eta_dt,
**extra_kwargs)
# total is known: we can predict some stats
if total:
# fractional and percentage progress
frac = n / total
percentage = frac * 100
l_bar += '{0:3.0f}%|'.format(percentage)
if ncols == 0:
return l_bar[:-1] + r_bar[1:]
format_dict.update(l_bar=l_bar)
if bar_format:
format_dict.update(percentage=percentage)
# auto-remove colon for empty `desc`
if not prefix:
bar_format = bar_format.replace("{desc}: ", '')
else:
bar_format = "{l_bar}{bar}{r_bar}"
full_bar = FormatReplace()
try:
nobar = bar_format.format(bar=full_bar, **format_dict)
except UnicodeEncodeError:
bar_format = _unicode(bar_format)
nobar = bar_format.format(bar=full_bar, **format_dict)
if not full_bar.format_called:
# no {bar}, we can just format and return
return nobar
# Formatting progress bar space available for bar's display
full_bar = Bar(frac,
max(1, ncols - disp_len(nobar)) if ncols else 10,
charset=Bar.ASCII if ascii is True else ascii or Bar.UTF,
colour=colour)
if not _is_ascii(full_bar.charset) and _is_ascii(bar_format):
bar_format = _unicode(bar_format)
res = bar_format.format(bar=full_bar, **format_dict)
return disp_trim(res, ncols) if ncols else res
elif bar_format:
# user-specified bar_format but no total
l_bar += '|'
format_dict.update(l_bar=l_bar, percentage=0)
full_bar = FormatReplace()
nobar = bar_format.format(bar=full_bar, **format_dict)
if not full_bar.format_called:
return nobar
full_bar = Bar(0,
max(1, ncols - disp_len(nobar)) if ncols else 10,
charset=Bar.BLANK, colour=colour)
res = bar_format.format(bar=full_bar, **format_dict)
return disp_trim(res, ncols) if ncols else res
else:
# no total: no progressbar, ETA, just progress stats
return ((prefix + ": ") if prefix else '') + \
'{0}{1} [{2}, {3}{4}]'.format(
n_fmt, unit, elapsed_str, rate_fmt, postfix)
def __new__(cls, *_, **__):
instance = object.__new__(cls)
with cls.get_lock(): # also constructs lock if non-existent
cls._instances.add(instance)
# create monitoring thread
if cls.monitor_interval and (cls.monitor is None
or not cls.monitor.report()):
try:
cls.monitor = TMonitor(cls, cls.monitor_interval)
except Exception as e: # pragma: nocover
warn("tqdm:disabling monitor support"
" (monitor_interval = 0) due to:\n" + str(e),
TqdmMonitorWarning, stacklevel=2)
cls.monitor_interval = 0
return instance
@classmethod
def _get_free_pos(cls, instance=None):
"""Skips specified instance."""
positions = {abs(inst.pos) for inst in cls._instances
if inst is not instance and hasattr(inst, "pos")}
return min(set(range(len(positions) + 1)).difference(positions))
@classmethod
def _decr_instances(cls, instance):
"""
Remove from list and reposition another unfixed bar
to fill the new gap.
This means that by default (where all nested bars are unfixed),
order is not maintained but screen flicker/blank space is minimised.
(tqdm<=4.44.1 moved ALL subsequent unfixed bars up.)
"""
with cls._lock:
try:
cls._instances.remove(instance)
except KeyError:
# if not instance.gui: # pragma: no cover
# raise
pass # py2: maybe magically removed already
# else:
if not instance.gui:
last = (instance.nrows or 20) - 1
# find unfixed (`pos >= 0`) overflow (`pos >= nrows - 1`)
instances = list(filter(
lambda i: hasattr(i, "pos") and last <= i.pos,
cls._instances))
# set first found to current `pos`
if instances:
inst = min(instances, key=lambda i: i.pos)
inst.clear(nolock=True)
inst.pos = abs(instance.pos)
@classmethod
def write(cls, s, file=None, end="\n", nolock=False):
"""Print a message via tqdm (without overlap with bars)."""
fp = file if file is not None else sys.stdout
with cls.external_write_mode(file=file, nolock=nolock):
# Write the message
fp.write(s)
fp.write(end)
@classmethod
@contextmanager
def external_write_mode(cls, file=None, nolock=False):
"""
Disable tqdm within context and refresh tqdm when exits.
Useful when writing to standard output stream
"""
fp = file if file is not None else sys.stdout
try:
if not nolock:
cls.get_lock().acquire()
# Clear all bars
inst_cleared = []
for inst in getattr(cls, '_instances', []):
# Clear instance if in the target output file
# or if write output + tqdm output are both either
# sys.stdout or sys.stderr (because both are mixed in terminal)
if hasattr(inst, "start_t") and (inst.fp == fp or all(
f in (sys.stdout, sys.stderr) for f in (fp, inst.fp))):
inst.clear(nolock=True)
inst_cleared.append(inst)
yield
# Force refresh display of bars we cleared
for inst in inst_cleared:
inst.refresh(nolock=True)
finally:
if not nolock:
cls._lock.release()
@classmethod
def set_lock(cls, lock):
"""Set the global lock."""
cls._lock = lock
@classmethod
def get_lock(cls):
"""Get the global lock. Construct it if it does not exist."""
if not hasattr(cls, '_lock'):
cls._lock = TqdmDefaultWriteLock()
return cls._lock
@classmethod
def pandas(cls, **tqdm_kwargs):
"""
Registers the current `tqdm` class with
pandas.core.
( frame.DataFrame
| series.Series
| groupby.(generic.)DataFrameGroupBy
| groupby.(generic.)SeriesGroupBy
).progress_apply
A new instance will be create every time `progress_apply` is called,
and each instance will automatically `close()` upon completion.
Parameters
----------
tqdm_kwargs : arguments for the tqdm instance
Examples
--------
>>> import pandas as pd
>>> import numpy as np
>>> from tqdm import tqdm
>>> from tqdm.gui import tqdm as tqdm_gui
>>>
>>> df = pd.DataFrame(np.random.randint(0, 100, (100000, 6)))
>>> tqdm.pandas(ncols=50) # can use tqdm_gui, optional kwargs, etc
>>> # Now you can use `progress_apply` instead of `apply`
>>> df.groupby(0).progress_apply(lambda x: x**2)
References
----------
<https://stackoverflow.com/questions/18603270/\
progress-indicator-during-pandas-operations-python>
"""
from warnings import catch_warnings, simplefilter
from pandas.core.frame import DataFrame
from pandas.core.series import Series
try:
with catch_warnings():
simplefilter("ignore", category=FutureWarning)
from pandas import Panel
except ImportError: # pandas>=1.2.0
Panel = None
Rolling, Expanding = None, None
try: # pandas>=1.0.0
from pandas.core.window.rolling import _Rolling_and_Expanding
except ImportError:
try: # pandas>=0.18.0
from pandas.core.window import _Rolling_and_Expanding
except ImportError: # pandas>=1.2.0
try: # pandas>=1.2.0
from pandas.core.window.expanding import Expanding
from pandas.core.window.rolling import Rolling
_Rolling_and_Expanding = Rolling, Expanding
except ImportError: # pragma: no cover
_Rolling_and_Expanding = None
try: # pandas>=0.25.0
from pandas.core.groupby.generic import SeriesGroupBy # , NDFrameGroupBy
from pandas.core.groupby.generic import DataFrameGroupBy
except ImportError: # pragma: no cover
try: # pandas>=0.23.0
from pandas.core.groupby.groupby import DataFrameGroupBy, SeriesGroupBy
except ImportError:
from pandas.core.groupby import DataFrameGroupBy, SeriesGroupBy
try: # pandas>=0.23.0
from pandas.core.groupby.groupby import GroupBy
except ImportError: # pragma: no cover
from pandas.core.groupby import GroupBy
try: # pandas>=0.23.0
from pandas.core.groupby.groupby import PanelGroupBy
except ImportError:
try:
from pandas.core.groupby import PanelGroupBy
except ImportError: # pandas>=0.25.0
PanelGroupBy = None
tqdm_kwargs = tqdm_kwargs.copy()
deprecated_t = [tqdm_kwargs.pop('deprecated_t', None)]
def inner_generator(df_function='apply'):
def inner(df, func, *args, **kwargs):
"""
Parameters
----------
df : (DataFrame|Series)[GroupBy]
Data (may be grouped).
func : function
To be applied on the (grouped) data.
**kwargs : optional
Transmitted to `df.apply()`.
"""
# Precompute total iterations
total = tqdm_kwargs.pop("total", getattr(df, 'ngroups', None))
if total is None: # not grouped
if df_function == 'applymap':
total = df.size
elif isinstance(df, Series):
total = len(df)
elif _Rolling_and_Expanding is None or \
not isinstance(df, _Rolling_and_Expanding):
# DataFrame or Panel
axis = kwargs.get('axis', 0)
if axis == 'index':
axis = 0
elif axis == 'columns':
axis = 1
# when axis=0, total is shape[axis1]
total = df.size // df.shape[axis]
# Init bar
if deprecated_t[0] is not None:
t = deprecated_t[0]
deprecated_t[0] = None
else:
t = cls(total=total, **tqdm_kwargs)
if len(args) > 0:
# *args intentionally not supported (see #244, #299)
TqdmDeprecationWarning(
"Except func, normal arguments are intentionally" +
" not supported by" +
" `(DataFrame|Series|GroupBy).progress_apply`." +
" Use keyword arguments instead.",
fp_write=getattr(t.fp, 'write', sys.stderr.write))
try:
func = df._is_builtin_func(func)
except TypeError:
pass
# Define bar updating wrapper
def wrapper(*args, **kwargs):
# update tbar correctly
# it seems `pandas apply` calls `func` twice
# on the first column/row to decide whether it can
# take a fast or slow code path; so stop when t.total==t.n
t.update(n=1 if not t.total or t.n < t.total else 0)
return func(*args, **kwargs)
# Apply the provided function (in **kwargs)
# on the df using our wrapper (which provides bar updating)
try:
return getattr(df, df_function)(wrapper, **kwargs)
finally:
t.close()
return inner
# Monkeypatch pandas to provide easy methods
# Enable custom tqdm progress in pandas!
Series.progress_apply = inner_generator()
SeriesGroupBy.progress_apply = inner_generator()
Series.progress_map = inner_generator('map')
SeriesGroupBy.progress_map = inner_generator('map')
DataFrame.progress_apply = inner_generator()
DataFrameGroupBy.progress_apply = inner_generator()
DataFrame.progress_applymap = inner_generator('applymap')
if Panel is not None:
Panel.progress_apply = inner_generator()
if PanelGroupBy is not None:
PanelGroupBy.progress_apply = inner_generator()
GroupBy.progress_apply = inner_generator()
GroupBy.progress_aggregate = inner_generator('aggregate')
GroupBy.progress_transform = inner_generator('transform')
if Rolling is not None and Expanding is not None:
Rolling.progress_apply = inner_generator()
Expanding.progress_apply = inner_generator()
elif _Rolling_and_Expanding is not None:
_Rolling_and_Expanding.progress_apply = inner_generator()
def __init__(self, iterable=None, desc=None, total=None, leave=True, file=None,
ncols=None, mininterval=0.1, maxinterval=10.0, miniters=None,
ascii=None, disable=False, unit='it', unit_scale=False,
dynamic_ncols=False, smoothing=0.3, bar_format=None, initial=0,
position=None, postfix=None, unit_divisor=1000, write_bytes=None,
lock_args=None, nrows=None, colour=None, delay=0, gui=False,
**kwargs):
"""
Parameters
----------
iterable : iterable, optional
Iterable to decorate with a progressbar.
Leave blank to manually manage the updates.
desc : str, optional
Prefix for the progressbar.
total : int or float, optional
The number of expected iterations. If unspecified,
len(iterable) is used if possible. If float("inf") or as a last
resort, only basic progress statistics are displayed
(no ETA, no progressbar).
If `gui` is True and this parameter needs subsequent updating,
specify an initial arbitrary large positive number,
e.g. 9e9.
leave : bool, optional
If [default: True], keeps all traces of the progressbar
upon termination of iteration.
If `None`, will leave only if `position` is `0`.
file : `io.TextIOWrapper` or `io.StringIO`, optional
Specifies where to output the progress messages
(default: sys.stderr). Uses `file.write(str)` and `file.flush()`
methods. For encoding, see `write_bytes`.
ncols : int, optional
The width of the entire output message. If specified,
dynamically resizes the progressbar to stay within this bound.
If unspecified, attempts to use environment width. The
fallback is a meter width of 10 and no limit for the counter and
statistics. If 0, will not print any meter (only stats).
mininterval : float, optional
Minimum progress display update interval [default: 0.1] seconds.
maxinterval : float, optional
Maximum progress display update interval [default: 10] seconds.
Automatically adjusts `miniters` to correspond to `mininterval`
after long display update lag. Only works if `dynamic_miniters`
or monitor thread is enabled.
miniters : int or float, optional
Minimum progress display update interval, in iterations.
If 0 and `dynamic_miniters`, will automatically adjust to equal
`mininterval` (more CPU efficient, good for tight loops).
If > 0, will skip display of specified number of iterations.
Tweak this and `mininterval` to get very efficient loops.
If your progress is erratic with both fast and slow iterations
(network, skipping items, etc) you should set miniters=1.
ascii : bool or str, optional
If unspecified or False, use unicode (smooth blocks) to fill
the meter. The fallback is to use ASCII characters " 123456789#".
disable : bool, optional
Whether to disable the entire progressbar wrapper
[default: False]. If set to None, disable on non-TTY.
unit : str, optional
String that will be used to define the unit of each iteration
[default: it].
unit_scale : bool or int or float, optional
If 1 or True, the number of iterations will be reduced/scaled
automatically and a metric prefix following the
International System of Units standard will be added
(kilo, mega, etc.) [default: False]. If any other non-zero
number, will scale `total` and `n`.
dynamic_ncols : bool, optional
If set, constantly alters `ncols` and `nrows` to the
environment (allowing for window resizes) [default: False].
smoothing : float, optional
Exponential moving average smoothing factor for speed estimates
(ignored in GUI mode). Ranges from 0 (average speed) to 1
(current/instantaneous speed) [default: 0.3].
bar_format : str, optional
Specify a custom bar string formatting. May impact performance.
[default: '{l_bar}{bar}{r_bar}'], where
l_bar='{desc}: {percentage:3.0f}%|' and
r_bar='| {n_fmt}/{total_fmt} [{elapsed}<{remaining}, '
'{rate_fmt}{postfix}]'
Possible vars: l_bar, bar, r_bar, n, n_fmt, total, total_fmt,
percentage, elapsed, elapsed_s, ncols, nrows, desc, unit,
rate, rate_fmt, rate_noinv, rate_noinv_fmt,
rate_inv, rate_inv_fmt, postfix, unit_divisor,
remaining, remaining_s, eta.
Note that a trailing ": " is automatically removed after {desc}
if the latter is empty.
initial : int or float, optional
The initial counter value. Useful when restarting a progress
bar [default: 0]. If using float, consider specifying `{n:.3f}`
or similar in `bar_format`, or specifying `unit_scale`.
position : int, optional
Specify the line offset to print this bar (starting from 0)
Automatic if unspecified.
Useful to manage multiple bars at once (eg, from threads).
postfix : dict or *, optional
Specify additional stats to display at the end of the bar.
Calls `set_postfix(**postfix)` if possible (dict).
unit_divisor : float, optional
[default: 1000], ignored unless `unit_scale` is True.
write_bytes : bool, optional
If (default: None) and `file` is unspecified,
bytes will be written in Python 2. If `True` will also write
bytes. In all other cases will default to unicode.
lock_args : tuple, optional
Passed to `refresh` for intermediate output
(initialisation, iterating, and updating).
nrows : int, optional
The screen height. If specified, hides nested bars outside this
bound. If unspecified, attempts to use environment height.
The fallback is 20.
colour : str, optional
Bar colour (e.g. 'green', '#00ff00').
delay : float, optional
Don't display until [default: 0] seconds have elapsed.
gui : bool, optional
WARNING: internal parameter - do not use.
Use tqdm.gui.tqdm(...) instead. If set, will attempt to use
matplotlib animations for a graphical output [default: False].
Returns
-------
out : decorated iterator.
"""
if write_bytes is None:
write_bytes = file is None and sys.version_info < (3,)
if file is None:
file = sys.stderr
if write_bytes:
# Despite coercing unicode into bytes, py2 sys.std* streams
# should have bytes written to them.
file = SimpleTextIOWrapper(
file, encoding=getattr(file, 'encoding', None) or 'utf-8')
file = DisableOnWriteError(file, tqdm_instance=self)
if disable is None and hasattr(file, "isatty") and not file.isatty():
disable = True
if total is None and iterable is not None:
try:
total = len(iterable)
except (TypeError, AttributeError):
total = None
if total == float("inf"):
# Infinite iterations, behave same as unknown
total = None
if disable:
self.iterable = iterable
self.disable = disable
with self._lock:
self.pos = self._get_free_pos(self)
self._instances.remove(self)
self.n = initial
self.total = total
self.leave = leave
return
if kwargs:
self.disable = True
with self._lock:
self.pos = self._get_free_pos(self)
self._instances.remove(self)
raise (
TqdmDeprecationWarning(
"`nested` is deprecated and automated.\n"
"Use `position` instead for manual control.\n",
fp_write=getattr(file, 'write', sys.stderr.write))
if "nested" in kwargs else
TqdmKeyError("Unknown argument(s): " + str(kwargs)))
# Preprocess the arguments
if ((ncols is None or nrows is None) and
(file in (sys.stderr, sys.stdout))) or \
dynamic_ncols: # pragma: no cover
if dynamic_ncols:
dynamic_ncols = _screen_shape_wrapper()
if dynamic_ncols:
ncols, nrows = dynamic_ncols(file)
else:
_dynamic_ncols = _screen_shape_wrapper()
if _dynamic_ncols:
_ncols, _nrows = _dynamic_ncols(file)
if ncols is None:
ncols = _ncols
if nrows is None:
nrows = _nrows
if miniters is None:
miniters = 0
dynamic_miniters = True
else:
dynamic_miniters = False
if mininterval is None:
mininterval = 0
if maxinterval is None:
maxinterval = 0
if ascii is None:
ascii = not _supports_unicode(file)
if bar_format and not ((ascii is True) or _is_ascii(ascii)):
# Convert bar format into unicode since terminal uses unicode
bar_format = _unicode(bar_format)
if smoothing is None:
smoothing = 0
# Store the arguments
self.iterable = iterable
self.desc = desc or ''
self.total = total
self.leave = leave
self.fp = file
self.ncols = ncols
self.nrows = nrows
self.mininterval = mininterval
self.maxinterval = maxinterval
self.miniters = miniters
self.dynamic_miniters = dynamic_miniters
self.ascii = ascii
self.disable = disable
self.unit = unit
self.unit_scale = unit_scale
self.unit_divisor = unit_divisor
self.initial = initial
self.lock_args = lock_args
self.delay = delay
self.gui = gui
self.dynamic_ncols = dynamic_ncols
self.smoothing = smoothing
self._ema_dn = EMA(smoothing)
self._ema_dt = EMA(smoothing)
self._ema_miniters = EMA(smoothing)
self.bar_format = bar_format
self.postfix = None
self.colour = colour
self._time = time
if postfix:
try:
self.set_postfix(refresh=False, **postfix)
except TypeError:
self.postfix = postfix
# Init the iterations counters
self.last_print_n = initial
self.n = initial
# if nested, at initial sp() call we replace '\r' by '\n' to
# not overwrite the outer progress bar
with self._lock:
if position is None:
self.pos = self._get_free_pos(self)
else: # mark fixed positions as negative
self.pos = -position
if not gui:
# Initialize the screen printer
self.sp = self.status_printer(self.fp)
if delay <= 0:
self.refresh(lock_args=self.lock_args)
# Init the time counter
self.last_print_t = self._time()
# NB: Avoid race conditions by setting start_t at the very end of init
self.start_t = self.last_print_t
def __bool__(self):
if self.total is not None:
return self.total > 0
if self.iterable is None:
raise TypeError('bool() undefined when iterable == total == None')
return bool(self.iterable)
def __nonzero__(self):
return self.__bool__()
def __len__(self):
return self.total if self.iterable is None else \
(self.iterable.shape[0] if hasattr(self.iterable, "shape")
else len(self.iterable) if hasattr(self.iterable, "__len__")
else self.iterable.__length_hint__()
if hasattr(self.iterable, "__length_hint__")
else getattr(self, "total", None))
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
try:
self.close()
except AttributeError:
# maybe eager thread cleanup upon external error
if (exc_type, exc_value, traceback) == (None, None, None):
raise
warn("AttributeError ignored", TqdmWarning, stacklevel=2)
def __del__(self):
self.close()
def __str__(self):
return self.format_meter(**self.format_dict)
@property
def _comparable(self):
return abs(getattr(self, "pos", 1 << 31))
def __hash__(self):
return id(self)
def __iter__(self):
"""Backward-compatibility to use: for x in tqdm(iterable)"""
# Inlining instance variables as locals (speed optimisation)
iterable = self.iterable
# If the bar is disabled, then just walk the iterable
# (note: keep this check outside the loop for performance)
if self.disable:
for obj in iterable:
yield obj
return
mininterval = self.mininterval
last_print_t = self.last_print_t
last_print_n = self.last_print_n
min_start_t = self.start_t + self.delay
n = self.n
time = self._time
try:
for obj in iterable:
yield obj
# Update and possibly print the progressbar.
# Note: does not call self.update(1) for speed optimisation.
n += 1
if n - last_print_n >= self.miniters:
cur_t = time()
dt = cur_t - last_print_t
if dt >= mininterval and cur_t >= min_start_t:
self.update(n - last_print_n)
last_print_n = self.last_print_n
last_print_t = self.last_print_t
finally:
self.n = n
self.close()
def update(self, n=1):
"""
Manually update the progress bar, useful for streams
such as reading files.
E.g.:
>>> t = tqdm(total=filesize) # Initialise
>>> for current_buffer in stream:
... ...
... t.update(len(current_buffer))
>>> t.close()
The last line is highly recommended, but possibly not necessary if
`t.update()` will be called in such a way that `filesize` will be
exactly reached and printed.
Parameters
----------
n : int or float, optional
Increment to add to the internal counter of iterations
[default: 1]. If using float, consider specifying `{n:.3f}`
or similar in `bar_format`, or specifying `unit_scale`.
Returns
-------
out : bool or None
True if a `display()` was triggered.
"""
if self.disable:
return
if n < 0:
self.last_print_n += n # for auto-refresh logic to work
self.n += n
# check counter first to reduce calls to time()
if self.n - self.last_print_n >= self.miniters:
cur_t = self._time()
dt = cur_t - self.last_print_t
if dt >= self.mininterval and cur_t >= self.start_t + self.delay:
cur_t = self._time()
dn = self.n - self.last_print_n # >= n
if self.smoothing and dt and dn:
# EMA (not just overall average)
self._ema_dn(dn)
self._ema_dt(dt)
self.refresh(lock_args=self.lock_args)
if self.dynamic_miniters:
# If no `miniters` was specified, adjust automatically to the
# maximum iteration rate seen so far between two prints.
# e.g.: After running `tqdm.update(5)`, subsequent
# calls to `tqdm.update()` will only cause an update after
# at least 5 more iterations.
if self.maxinterval and dt >= self.maxinterval:
self.miniters = dn * (self.mininterval or self.maxinterval) / dt
elif self.smoothing:
# EMA miniters update
self.miniters = self._ema_miniters(
dn * (self.mininterval / dt if self.mininterval and dt
else 1))
else:
# max iters between two prints
self.miniters = max(self.miniters, dn)
# Store old values for next call
self.last_print_n = self.n
self.last_print_t = cur_t
return True
def close(self):
"""Cleanup and (if leave=False) close the progressbar."""
if self.disable:
return
# Prevent multiple closures
self.disable = True
# decrement instance pos and remove from internal set
pos = abs(self.pos)
self._decr_instances(self)
if self.last_print_t < self.start_t + self.delay:
# haven't ever displayed; nothing to clear
return
# GUI mode
if getattr(self, 'sp', None) is None:
return
# annoyingly, _supports_unicode isn't good enough
def fp_write(s):
self.fp.write(_unicode(s))
try:
fp_write('')
except ValueError as e:
if 'closed' in str(e):
return
raise # pragma: no cover
leave = pos == 0 if self.leave is None else self.leave
with self._lock:
if leave:
# stats for overall rate (no weighted average)
self._ema_dt = lambda: None
self.display(pos=0)
fp_write('\n')
else:
# clear previous display
if self.display(msg='', pos=pos) and not pos:
fp_write('\r')
def clear(self, nolock=False):
"""Clear current bar display."""
if self.disable:
return
if not nolock:
self._lock.acquire()
pos = abs(self.pos)
if pos < (self.nrows or 20):
self.moveto(pos)
self.sp('')
self.fp.write('\r') # place cursor back at the beginning of line
self.moveto(-pos)
if not nolock:
self._lock.release()
def refresh(self, nolock=False, lock_args=None):
"""
Force refresh the display of this bar.
Parameters
----------
nolock : bool, optional
If `True`, does not lock.
If [default: `False`]: calls `acquire()` on internal lock.
lock_args : tuple, optional
Passed to internal lock's `acquire()`.
If specified, will only `display()` if `acquire()` returns `True`.
"""
if self.disable:
return
if not nolock:
if lock_args:
if not self._lock.acquire(*lock_args):
return False
else:
self._lock.acquire()
self.display()
if not nolock:
self._lock.release()
return True
def unpause(self):
"""Restart tqdm timer from last print time."""
if self.disable:
return
cur_t = self._time()
self.start_t += cur_t - self.last_print_t
self.last_print_t = cur_t
def reset(self, total=None):
"""
Resets to 0 iterations for repeated use.
Consider combining with `leave=True`.
Parameters
----------
total : int or float, optional. Total to use for the new bar.
"""
self.n = 0
if total is not None:
self.total = total
if self.disable:
return
self.last_print_n = 0
self.last_print_t = self.start_t = self._time()
self._ema_dn = EMA(self.smoothing)
self._ema_dt = EMA(self.smoothing)
self._ema_miniters = EMA(self.smoothing)
self.refresh()
def set_description(self, desc=None, refresh=True):
"""
Set/modify description of the progress bar.
Parameters
----------
desc : str, optional
refresh : bool, optional
Forces refresh [default: True].
"""
self.desc = desc + ': ' if desc else ''
if refresh:
self.refresh()
def set_description_str(self, desc=None, refresh=True):
"""Set/modify description without ': ' appended."""
self.desc = desc or ''
if refresh:
self.refresh()
def set_postfix(self, ordered_dict=None, refresh=True, **kwargs):
"""
Set/modify postfix (additional stats)
with automatic formatting based on datatype.
Parameters
----------
ordered_dict : dict or OrderedDict, optional
refresh : bool, optional
Forces refresh [default: True].
kwargs : dict, optional
"""
# Sort in alphabetical order to be more deterministic
postfix = OrderedDict([] if ordered_dict is None else ordered_dict)
for key in sorted(kwargs.keys()):
postfix[key] = kwargs[key]
# Preprocess stats according to datatype
for key in postfix.keys():
# Number: limit the length of the string
if isinstance(postfix[key], Number):
postfix[key] = self.format_num(postfix[key])
# Else for any other type, try to get the string conversion
elif not isinstance(postfix[key], _basestring):
postfix[key] = str(postfix[key])
# Else if it's a string, don't need to preprocess anything
# Stitch together to get the final postfix
self.postfix = ', '.join(key + '=' + postfix[key].strip()
for key in postfix.keys())
if refresh:
self.refresh()
def set_postfix_str(self, s='', refresh=True):
"""
Postfix without dictionary expansion, similar to prefix handling.
"""
self.postfix = str(s)
if refresh:
self.refresh()
def moveto(self, n):
# TODO: private method
self.fp.write(_unicode('\n' * n + _term_move_up() * -n))
self.fp.flush()
@property
def format_dict(self):
"""Public API for read-only member access."""
if self.disable and not hasattr(self, 'unit'):
return defaultdict(lambda: None, {
'n': self.n, 'total': self.total, 'elapsed': 0, 'unit': 'it'})
if self.dynamic_ncols:
self.ncols, self.nrows = self.dynamic_ncols(self.fp)
return {
'n': self.n, 'total': self.total,
'elapsed': self._time() - self.start_t if hasattr(self, 'start_t') else 0,
'ncols': self.ncols, 'nrows': self.nrows, 'prefix': self.desc,
'ascii': self.ascii, 'unit': self.unit, 'unit_scale': self.unit_scale,
'rate': self._ema_dn() / self._ema_dt() if self._ema_dt() else None,
'bar_format': self.bar_format, 'postfix': self.postfix,
'unit_divisor': self.unit_divisor, 'initial': self.initial,
'colour': self.colour}
def display(self, msg=None, pos=None):
"""
Use `self.sp` to display `msg` in the specified `pos`.
Consider overloading this function when inheriting to use e.g.:
`self.some_frontend(**self.format_dict)` instead of `self.sp`.
Parameters
----------
msg : str, optional. What to display (default: `repr(self)`).
pos : int, optional. Position to `moveto`
(default: `abs(self.pos)`).
"""
if pos is None:
pos = abs(self.pos)
nrows = self.nrows or 20
if pos >= nrows - 1:
if pos >= nrows:
return False
if msg or msg is None: # override at `nrows - 1`
msg = " ... (more hidden) ..."
if not hasattr(self, "sp"):
raise TqdmDeprecationWarning(
"Please use `tqdm.gui.tqdm(...)`"
" instead of `tqdm(..., gui=True)`\n",
fp_write=getattr(self.fp, 'write', sys.stderr.write))
if pos:
self.moveto(pos)
self.sp(self.__str__() if msg is None else msg)
if pos:
self.moveto(-pos)
return True
@classmethod
@contextmanager
def wrapattr(cls, stream, method, total=None, bytes=True, **tqdm_kwargs):
"""
stream : file-like object.
method : str, "read" or "write". The result of `read()` and
the first argument of `write()` should have a `len()`.
>>> with tqdm.wrapattr(file_obj, "read", total=file_obj.size) as fobj:
... while True:
... chunk = fobj.read(chunk_size)
... if not chunk:
... break
"""
with cls(total=total, **tqdm_kwargs) as t:
if bytes:
t.unit = "B"
t.unit_scale = True
t.unit_divisor = 1024
yield CallbackIOWrapper(t.update, stream, method)
def trange(*args, **kwargs):
"""
A shortcut for tqdm(xrange(*args), **kwargs).
On Python3+ range is used instead of xrange.
"""
return tqdm(_range(*args), **kwargs) | PypiClean |
/dipex-4.54.5.tar.gz/dipex-4.54.5/integrations/aarhus/initial.py | import logging
from dataclasses import dataclass
from typing import Any
from typing import Dict
from typing import List
import config
import uuids
from initial_classes import Class
from initial_classes import CLASSES
from mox_helpers import payloads as mox_payloads
from mox_helpers.mox_helper import create_mox_helper
from mox_helpers.mox_helper import ElementNotFound
from os2mo_data_import import ImportHelper # type: ignore
from os2mo_data_import.mox_data_types import Itsystem
logger = logging.getLogger(__name__)
@dataclass
class ClassImportResult:
source: Class
lora_payload: Dict[str, Any]
async def perform_initial_setup():
"""
Perform all initial bootstrapping of OS2mo.
Imports an organisation if missing, and adds all base facets
Imports all pretedetermined classes and it systems
"""
settings = config.get_config()
mox_helper = await create_mox_helper(settings.mox_base)
try:
await mox_helper.read_element_organisation_organisation(bvn="%")
except ElementNotFound:
logger.info("No org found in LoRa. Performing initial setup.")
importer = ImportHelper(
create_defaults=True,
mox_base=settings.mox_base,
mora_base=settings.mora_base,
seperate_names=True,
)
importer.add_organisation(
identifier="Århus Kommune",
user_key="Århus Kommune",
municipality_code=751,
uuid=uuids.ORG_UUID,
)
# Perform initial import of org and facets
importer.import_all()
await import_remaining_classes()
await import_it()
async def import_remaining_classes():
"""
Import a set of predetermined classes. All the classes have predefined UUIDs
which makes this function idempotent
"""
settings = config.get_config()
mox_helper = await create_mox_helper(settings.mox_base)
result: List[ClassImportResult] = []
for cls in CLASSES:
facet_uuid = await mox_helper.read_element_klassifikation_facet(bvn=cls.facet)
lora_payload = mox_payloads.lora_klasse(
bvn=cls.bvn,
title=cls.titel,
facet_uuid=str(facet_uuid),
org_uuid=str(uuids.ORG_UUID),
scope=cls.scope,
)
await mox_helper.insert_klassifikation_klasse(lora_payload, str(cls.uuid))
result.append(ClassImportResult(cls, lora_payload))
return result
async def import_it():
"""
Import predetermined IT systems. The UUID(s) are predefined which makes this
function idempotent.
"""
settings = config.get_config()
if settings.azid_it_system_uuid == uuids.AZID_SYSTEM:
mox_helper = await create_mox_helper(settings.mox_base)
it_system = Itsystem(system_name="AZ", user_key="AZ")
it_system.organisation_uuid = str(uuids.ORG_UUID)
uuid = uuids.AZID_SYSTEM
json = it_system.build()
await mox_helper.insert_organisation_itsystem(json, str(uuid))
else:
logger.info(
"Settings specify a non-default AZID IT system UUID, not creating "
"default AZ IT system"
) | PypiClean |
/FrAG-1.1.0.tar.gz/FrAG-1.1.0/frag_pele/AdaptivePELE_repo/AdaptivePELE/spawning/densitycalculator.py | from __future__ import absolute_import, division, print_function, unicode_literals
from AdaptivePELE.constants import blockNames
from abc import abstractmethod
from AdaptivePELE.spawning import densitycalculatortypes
import sys
def continousDensity(contacts):
if contacts > 1.0:
return 8.0
else:
return 64.0/(-4*contacts+6)**3
class DensityCalculatorBuilder():
def build(self, spawningBlock):
"""
Build the DensityCalculator object according to the parameters selcted
:param spawningBlock: Spawning block of the control file
:type spawningBlock: dict
:returns: :py:class:`.DensityCalculator` -- DensityCalculator object selected
"""
try:
densityBlock = spawningBlock[blockNames.SpawningParams.density]
except KeyError:
print("Using null density calculator (no preference for any cluster)")
return NullDensityCalculator()
try:
density_type = densityBlock[blockNames.DensityCalculator.type]
except KeyError:
sys.exit("Density calculator must have a type")
if density_type == blockNames.DensityCalculator.null or density_type == blockNames.DensityCalculator.constant:
print("Using constant density")
return NullDensityCalculator()
elif density_type == blockNames.DensityCalculator.heaviside:
try:
paramsBlock = densityBlock[blockNames.DensityCalculator.params]
values = paramsBlock[blockNames.DensityCalculatorParams.values]
conditions = paramsBlock[blockNames.DensityCalculatorParams.conditions]
return DensityCalculatorHeaviside(conditions, values)
except KeyError:
print("Using default parameters for Heaviside density calculator")
return DensityCalculatorHeaviside()
elif density_type == blockNames.DensityCalculator.continuous:
print("Using continuous density calculator")
return ContinuousDensityCalculator()
elif density_type == blockNames.DensityCalculator.exitContinuous:
print("Using inverse continuous density calculator")
return ExitContinousDensityCalculator()
else:
sys.exit("Unknown density calculator type! Choices are: " + str(densitycalculatortypes.DENSITY_CALCULATOR_TYPE_TO_STRING_DICTIONARY.values()))
class DensityCalculator():
def __init__(self):
self.type = "BaseClass"
@abstractmethod
def calculate(self, contacts, contactThreshold):
pass
class DensityCalculatorHeaviside(DensityCalculator):
# Mostly duplicated code with threshold calculator
def __init__(self, conditions=None, values=None):
DensityCalculator.__init__(self)
self.type = densitycalculatortypes.DENSITY_CALCULATOR_TYPES.heaviside
if conditions is None:
conditions = []
if values is None:
values = [1.]
if len(values) != len(conditions) and len(values) != len(conditions) + 1:
raise ValueError('The number of values must be equal or one more, than the number of conditions')
self.conditions = conditions
self.values = values
def calculate(self, contacts, contactThreshold):
"""
Calcuate the density value according to the contact ratio
:param contacts: Contacts ratio
:type contacts: float
:param contactThreshold: Deprecated parameter
:type contactThreshold: float
:returns: float -- Density value for the value of the contacts ratio
"""
for i in range(len(self.conditions)):
# change, so that whole condition is in array
if contacts > self.conditions[i]:
# ensure that the value is a float
return float(self.values[i])
# the way it's built, it makes more sense to return this value, but,
# should check that len(value) = len(conditions) + 1 in order to
# return the "else" value
return self.values[-1]
class NullDensityCalculator(DensityCalculator):
def __init__(self):
DensityCalculator.__init__(self)
self.type = densitycalculatortypes.DENSITY_CALCULATOR_TYPES.null
def calculate(self, contacts, contactThreshold):
"""
Calcuate the density value according to the contact ratio, in this
case is always one
:param contacts: Contacts ratio
:type contacts: float
:param contactThreshold: Deprecated parameter
:type contactThreshold: float
:returns: float -- Density value for the value of the contacts ratio
"""
return 1.
class ContinuousDensityCalculator(DensityCalculator):
def __init__(self):
DensityCalculator.__init__(self)
self.type = densitycalculatortypes.DENSITY_CALCULATOR_TYPES.continuous
def calculate(self, contacts, contactThreshold):
"""
Calcuate the density value according to the contact ratio
:param contacts: Contacts ratio
:type contacts: float
:param contactThreshold: Deprecated parameter
:type contactThreshold: float
:returns: float -- Density value for the value of the contacts ratio
"""
return continousDensity(contacts)
class ExitContinousDensityCalculator(DensityCalculator):
def __init__(self):
DensityCalculator.__init__(self)
self.type = densitycalculatortypes.DENSITY_CALCULATOR_TYPES.exitContinous
def calculate(self, contacts, contactThreshold):
# alternative possibility
# if contacts > 1.0:
# return 1/4.0
# elif contacts < 0.24:
# return 4.0
# else:
# return (-4*contacts+6)**3/32.0
return 1.0/continousDensity(contacts) | PypiClean |
/Argonaut-0.3.4.tar.gz/Argonaut-0.3.4/argonaut/public/ckeditor/_source/lang/nl.js | /*
Copyright (c) 2003-2010, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.html or http://ckeditor.com/license
*/
/**
* @fileOverview Defines the {@link CKEDITOR.lang} object, for the
* Dutch language.
*/
/**#@+
@type String
@example
*/
/**
* Constains the dictionary of language entries.
* @namespace
*/
CKEDITOR.lang['nl'] =
{
/**
* The language reading direction. Possible values are "rtl" for
* Right-To-Left languages (like Arabic) and "ltr" for Left-To-Right
* languages (like English).
* @default 'ltr'
*/
dir : 'ltr',
/*
* Screenreader titles. Please note that screenreaders are not always capable
* of reading non-English words. So be careful while translating it.
*/
editorTitle : 'Tekstverwerker, %1, druk op ALT 0 voor hulp.',
// ARIA descriptions.
toolbar : 'Werkbalk',
editor : 'Tekstverwerker',
// Toolbar buttons without dialogs.
source : 'Code',
newPage : 'Nieuwe pagina',
save : 'Opslaan',
preview : 'Voorbeeld',
cut : 'Knippen',
copy : 'Kopiëren',
paste : 'Plakken',
print : 'Printen',
underline : 'Onderstreept',
bold : 'Vet',
italic : 'Schuingedrukt',
selectAll : 'Alles selecteren',
removeFormat : 'Opmaak verwijderen',
strike : 'Doorhalen',
subscript : 'Subscript',
superscript : 'Superscript',
horizontalrule : 'Horizontale lijn invoegen',
pagebreak : 'Pagina-einde invoegen',
unlink : 'Link verwijderen',
undo : 'Ongedaan maken',
redo : 'Opnieuw uitvoeren',
// Common messages and labels.
common :
{
browseServer : 'Bladeren op server',
url : 'URL',
protocol : 'Protocol',
upload : 'Upload',
uploadSubmit : 'Naar server verzenden',
image : 'Afbeelding',
flash : 'Flash',
form : 'Formulier',
checkbox : 'Aanvinkvakje',
radio : 'Selectievakje',
textField : 'Tekstveld',
textarea : 'Tekstvak',
hiddenField : 'Verborgen veld',
button : 'Knop',
select : 'Selectieveld',
imageButton : 'Afbeeldingsknop',
notSet : '<niet ingevuld>',
id : 'Kenmerk',
name : 'Naam',
langDir : 'Schrijfrichting',
langDirLtr : 'Links naar rechts (LTR)',
langDirRtl : 'Rechts naar links (RTL)',
langCode : 'Taalcode',
longDescr : 'Lange URL-omschrijving',
cssClass : 'Stylesheet-klassen',
advisoryTitle : 'Aanbevolen titel',
cssStyle : 'Stijl',
ok : 'OK',
cancel : 'Annuleren',
close : 'Sluiten',
preview : 'Voorbeeld',
generalTab : 'Algemeen',
advancedTab : 'Geavanceerd',
validateNumberFailed : 'Deze waarde is geen geldig getal.',
confirmNewPage : 'Alle aangebrachte wijzigingen gaan verloren. Weet u zeker dat u een nieuwe pagina wilt openen?',
confirmCancel : 'Enkele opties zijn gewijzigd. Weet u zeker dat u dit dialoogvenster wilt sluiten?',
options : 'Opties',
target : 'Doel',
targetNew : 'Nieuw venster (_blank)',
targetTop : 'Hele venster (_top)',
targetSelf : 'Zelfde venster (_self)',
targetParent : 'Origineel venster (_parent)',
langDirLTR : 'Left to Right (LTR)', // MISSING
langDirRTL : 'Right to Left (RTL)', // MISSING
styles : 'Style', // MISSING
cssClasses : 'Stylesheet Classes', // MISSING
// Put the voice-only part of the label in the span.
unavailable : '%1<span class="cke_accessibility">, niet beschikbaar</span>'
},
contextmenu :
{
options : 'Context menu opties'
},
// Special char dialog.
specialChar :
{
toolbar : 'Speciaal teken invoegen',
title : 'Selecteer speciaal teken',
options : 'Speciale tekens opties'
},
// Link dialog.
link :
{
toolbar : 'Link invoegen/wijzigen',
other : '<ander>',
menu : 'Link wijzigen',
title : 'Link',
info : 'Linkomschrijving',
target : 'Doel',
upload : 'Upload',
advanced : 'Geavanceerd',
type : 'Linktype',
toUrl : 'URL',
toAnchor : 'Interne link in pagina',
toEmail : 'E-mail',
targetFrame : '<frame>',
targetPopup : '<popup window>',
targetFrameName : 'Naam doelframe',
targetPopupName : 'Naam popupvenster',
popupFeatures : 'Instellingen popupvenster',
popupResizable : 'Herschaalbaar',
popupStatusBar : 'Statusbalk',
popupLocationBar: 'Locatiemenu',
popupToolbar : 'Menubalk',
popupMenuBar : 'Menubalk',
popupFullScreen : 'Volledig scherm (IE)',
popupScrollBars : 'Schuifbalken',
popupDependent : 'Afhankelijk (Netscape)',
popupWidth : 'Breedte',
popupLeft : 'Positie links',
popupHeight : 'Hoogte',
popupTop : 'Positie boven',
id : 'Id',
langDir : 'Schrijfrichting',
langDirLTR : 'Links naar rechts (LTR)',
langDirRTL : 'Rechts naar links (RTL)',
acccessKey : 'Toegangstoets',
name : 'Naam',
langCode : 'Schrijfrichting',
tabIndex : 'Tabvolgorde',
advisoryTitle : 'Aanbevolen titel',
advisoryContentType : 'Aanbevolen content-type',
cssClasses : 'Stylesheet-klassen',
charset : 'Karakterset van gelinkte bron',
styles : 'Stijl',
selectAnchor : 'Kies een interne link',
anchorName : 'Op naam interne link',
anchorId : 'Op kenmerk interne link',
emailAddress : 'E-mailadres',
emailSubject : 'Onderwerp bericht',
emailBody : 'Inhoud bericht',
noAnchors : '(Geen interne links in document gevonden)',
noUrl : 'Geef de link van de URL',
noEmail : 'Geef een e-mailadres'
},
// Anchor dialog
anchor :
{
toolbar : 'Interne link',
menu : 'Eigenschappen interne link',
title : 'Eigenschappen interne link',
name : 'Naam interne link',
errorName : 'Geef de naam van de interne link op'
},
// List style dialog
list:
{
numberedTitle : 'Numbered List Properties', // MISSING
bulletedTitle : 'Bulleted List Properties', // MISSING
type : 'Type', // MISSING
start : 'Start', // MISSING
validateStartNumber :'List start number must be a whole number.', // MISSING
circle : 'Circle', // MISSING
disc : 'Disc', // MISSING
square : 'Square', // MISSING
none : 'None', // MISSING
notset : '<not set>', // MISSING
armenian : 'Armenian numbering', // MISSING
georgian : 'Georgian numbering (an, ban, gan, etc.)', // MISSING
lowerRoman : 'Lower Roman (i, ii, iii, iv, v, etc.)', // MISSING
upperRoman : 'Upper Roman (I, II, III, IV, V, etc.)', // MISSING
lowerAlpha : 'Lower Alpha (a, b, c, d, e, etc.)', // MISSING
upperAlpha : 'Upper Alpha (A, B, C, D, E, etc.)', // MISSING
lowerGreek : 'Lower Greek (alpha, beta, gamma, etc.)', // MISSING
decimal : 'Decimal (1, 2, 3, etc.)', // MISSING
decimalLeadingZero : 'Decimal leading zero (01, 02, 03, etc.)' // MISSING
},
// Find And Replace Dialog
findAndReplace :
{
title : 'Zoeken en vervangen',
find : 'Zoeken',
replace : 'Vervangen',
findWhat : 'Zoeken naar:',
replaceWith : 'Vervangen met:',
notFoundMsg : 'De opgegeven tekst is niet gevonden.',
matchCase : 'Hoofdlettergevoelig',
matchWord : 'Hele woord moet voorkomen',
matchCyclic : 'Doorlopend zoeken',
replaceAll : 'Alles vervangen',
replaceSuccessMsg : '%1 resulaten vervangen.'
},
// Table Dialog
table :
{
toolbar : 'Tabel',
title : 'Eigenschappen tabel',
menu : 'Eigenschappen tabel',
deleteTable : 'Tabel verwijderen',
rows : 'Rijen',
columns : 'Kolommen',
border : 'Breedte rand',
align : 'Uitlijning',
alignLeft : 'Links',
alignCenter : 'Centreren',
alignRight : 'Rechts',
width : 'Breedte',
widthPx : 'pixels',
widthPc : 'procent',
widthUnit : 'eenheid breedte',
height : 'Hoogte',
cellSpace : 'Afstand tussen cellen',
cellPad : 'Ruimte in de cel',
caption : 'Naam',
summary : 'Samenvatting',
headers : 'Koppen',
headersNone : 'Geen',
headersColumn : 'Eerste kolom',
headersRow : 'Eerste rij',
headersBoth : 'Beide',
invalidRows : 'Het aantal rijen moet een getal zijn groter dan 0.',
invalidCols : 'Het aantal kolommen moet een getal zijn groter dan 0.',
invalidBorder : 'De rand breedte moet een getal zijn.',
invalidWidth : 'De tabel breedte moet een getal zijn.',
invalidHeight : 'De tabel hoogte moet een getal zijn.',
invalidCellSpacing : 'Afstand tussen cellen moet een getal zijn.',
invalidCellPadding : 'Ruimte in de cel moet een getal zijn.',
cell :
{
menu : 'Cel',
insertBefore : 'Voeg cel in voor',
insertAfter : 'Voeg cel in achter',
deleteCell : 'Cellen verwijderen',
merge : 'Cellen samenvoegen',
mergeRight : 'Voeg samen naar rechts',
mergeDown : 'Voeg samen naar beneden',
splitHorizontal : 'Splits cellen horizontaal',
splitVertical : 'Splits cellen verticaal',
title : 'Cel eigenschappen',
cellType : 'Cel type',
rowSpan : 'Rijen samenvoegen',
colSpan : 'Kolommen samenvoegen',
wordWrap : 'Automatische terugloop',
hAlign : 'Horizontale uitlijning',
vAlign : 'Verticale uitlijning',
alignTop : 'Boven',
alignMiddle : 'Midden',
alignBottom : 'Onder',
alignBaseline : 'Basislijn',
bgColor : 'Achtergrondkleur',
borderColor : 'Kleur rand',
data : 'Inhoud',
header : 'Kop',
yes : 'Ja',
no : 'Nee',
invalidWidth : 'De celbreedte moet een getal zijn.',
invalidHeight : 'De celhoogte moet een getal zijn.',
invalidRowSpan : 'Rijen samenvoegen moet een heel getal zijn.',
invalidColSpan : 'Kolommen samenvoegen moet een heel getal zijn.',
chooseColor : 'Kies'
},
row :
{
menu : 'Rij',
insertBefore : 'Voeg rij in voor',
insertAfter : 'Voeg rij in achter',
deleteRow : 'Rijen verwijderen'
},
column :
{
menu : 'Kolom',
insertBefore : 'Voeg kolom in voor',
insertAfter : 'Voeg kolom in achter',
deleteColumn : 'Kolommen verwijderen'
}
},
// Button Dialog.
button :
{
title : 'Eigenschappen knop',
text : 'Tekst (waarde)',
type : 'Soort',
typeBtn : 'Knop',
typeSbm : 'Versturen',
typeRst : 'Leegmaken'
},
// Checkbox and Radio Button Dialogs.
checkboxAndRadio :
{
checkboxTitle : 'Eigenschappen aanvinkvakje',
radioTitle : 'Eigenschappen selectievakje',
value : 'Waarde',
selected : 'Geselecteerd'
},
// Form Dialog.
form :
{
title : 'Eigenschappen formulier',
menu : 'Eigenschappen formulier',
action : 'Actie',
method : 'Methode',
encoding : 'Codering'
},
// Select Field Dialog.
select :
{
title : 'Eigenschappen selectieveld',
selectInfo : 'Informatie',
opAvail : 'Beschikbare opties',
value : 'Waarde',
size : 'Grootte',
lines : 'Regels',
chkMulti : 'Gecombineerde selecties toestaan',
opText : 'Tekst',
opValue : 'Waarde',
btnAdd : 'Toevoegen',
btnModify : 'Wijzigen',
btnUp : 'Omhoog',
btnDown : 'Omlaag',
btnSetValue : 'Als geselecteerde waarde instellen',
btnDelete : 'Verwijderen'
},
// Textarea Dialog.
textarea :
{
title : 'Eigenschappen tekstvak',
cols : 'Kolommen',
rows : 'Rijen'
},
// Text Field Dialog.
textfield :
{
title : 'Eigenschappen tekstveld',
name : 'Naam',
value : 'Waarde',
charWidth : 'Breedte (tekens)',
maxChars : 'Maximum aantal tekens',
type : 'Soort',
typeText : 'Tekst',
typePass : 'Wachtwoord'
},
// Hidden Field Dialog.
hidden :
{
title : 'Eigenschappen verborgen veld',
name : 'Naam',
value : 'Waarde'
},
// Image Dialog.
image :
{
title : 'Eigenschappen afbeelding',
titleButton : 'Eigenschappen afbeeldingsknop',
menu : 'Eigenschappen afbeelding',
infoTab : 'Informatie afbeelding',
btnUpload : 'Naar server verzenden',
upload : 'Upload',
alt : 'Alternatieve tekst',
width : 'Breedte',
height : 'Hoogte',
lockRatio : 'Afmetingen vergrendelen',
unlockRatio : 'Afmetingen ontgrendelen',
resetSize : 'Afmetingen resetten',
border : 'Rand',
hSpace : 'HSpace',
vSpace : 'VSpace',
align : 'Uitlijning',
alignLeft : 'Links',
alignRight : 'Rechts',
alertUrl : 'Geef de URL van de afbeelding',
linkTab : 'Link',
button2Img : 'Wilt u de geselecteerde afbeeldingsknop vervangen door een eenvoudige afbeelding?',
img2Button : 'Wilt u de geselecteerde afbeelding vervangen door een afbeeldingsknop?',
urlMissing : 'De URL naar de afbeelding ontbreekt.',
validateWidth : 'Breedte moet een heel nummer zijn.',
validateHeight : 'Hoogte moet een heel nummer zijn.',
validateBorder : 'Rand moet een heel nummer zijn.',
validateHSpace : 'HSpace moet een heel nummer zijn.',
validateVSpace : 'VSpace moet een heel nummer zijn.'
},
// Flash Dialog
flash :
{
properties : 'Eigenschappen Flash',
propertiesTab : 'Eigenschappen',
title : 'Eigenschappen Flash',
chkPlay : 'Automatisch afspelen',
chkLoop : 'Herhalen',
chkMenu : 'Flashmenu\'s inschakelen',
chkFull : 'Schermvullend toestaan',
scale : 'Schaal',
scaleAll : 'Alles tonen',
scaleNoBorder : 'Geen rand',
scaleFit : 'Precies passend',
access : 'Script toegang',
accessAlways : 'Altijd',
accessSameDomain: 'Zelfde domeinnaam',
accessNever : 'Nooit',
align : 'Uitlijning',
alignLeft : 'Links',
alignAbsBottom : 'Absoluut-onder',
alignAbsMiddle : 'Absoluut-midden',
alignBaseline : 'Basislijn',
alignBottom : 'Beneden',
alignMiddle : 'Midden',
alignRight : 'Rechts',
alignTextTop : 'Boven tekst',
alignTop : 'Boven',
quality : 'Kwaliteit',
qualityBest : 'Beste',
qualityHigh : 'Hoog',
qualityAutoHigh : 'Automatisch hoog',
qualityMedium : 'Gemiddeld',
qualityAutoLow : 'Automatisch laag',
qualityLow : 'Laag',
windowModeWindow: 'Venster',
windowModeOpaque: 'Ondoorzichtig',
windowModeTransparent : 'Doorzichtig',
windowMode : 'Venster modus',
flashvars : 'Variabelen voor Flash',
bgcolor : 'Achtergrondkleur',
width : 'Breedte',
height : 'Hoogte',
hSpace : 'HSpace',
vSpace : 'VSpace',
validateSrc : 'Geef de link van de URL',
validateWidth : 'De breedte moet een getal zijn.',
validateHeight : 'De hoogte moet een getal zijn.',
validateHSpace : 'De HSpace moet een getal zijn.',
validateVSpace : 'De VSpace moet een getal zijn.'
},
// Speller Pages Dialog
spellCheck :
{
toolbar : 'Spellingscontrole',
title : 'Spellingscontrole',
notAvailable : 'Excuses, deze dienst is momenteel niet beschikbaar.',
errorLoading : 'Er is een fout opgetreden bij het laden van de diesnt: %s.',
notInDic : 'Niet in het woordenboek',
changeTo : 'Wijzig in',
btnIgnore : 'Negeren',
btnIgnoreAll : 'Alles negeren',
btnReplace : 'Vervangen',
btnReplaceAll : 'Alles vervangen',
btnUndo : 'Ongedaan maken',
noSuggestions : '-Geen suggesties-',
progress : 'Bezig met spellingscontrole...',
noMispell : 'Klaar met spellingscontrole: geen fouten gevonden',
noChanges : 'Klaar met spellingscontrole: geen woorden aangepast',
oneChange : 'Klaar met spellingscontrole: één woord aangepast',
manyChanges : 'Klaar met spellingscontrole: %1 woorden aangepast',
ieSpellDownload : 'De spellingscontrole niet geïnstalleerd. Wilt u deze nu downloaden?'
},
smiley :
{
toolbar : 'Smiley',
title : 'Smiley invoegen',
options : 'Smiley opties'
},
elementsPath :
{
eleLabel : 'Elementenpad',
eleTitle : '%1 element'
},
numberedlist : 'Genummerde lijst',
bulletedlist : 'Opsomming',
indent : 'Inspringen vergroten',
outdent : 'Inspringen verkleinen',
justify :
{
left : 'Links uitlijnen',
center : 'Centreren',
right : 'Rechts uitlijnen',
block : 'Uitvullen'
},
blockquote : 'Citaatblok',
clipboard :
{
title : 'Plakken',
cutError : 'De beveiligingsinstelling van de browser verhinderen het automatisch knippen. Gebruik de sneltoets Ctrl/Cmd+X van het toetsenbord.',
copyError : 'De beveiligingsinstelling van de browser verhinderen het automatisch kopiëren. Gebruik de sneltoets Ctrl/Cmd+C van het toetsenbord.',
pasteMsg : 'Plak de tekst in het volgende vak gebruik makend van uw toetsenbord (<strong>Ctrl/Cmd+V</strong>) en klik op <strong>OK</strong>.',
securityMsg : 'Door de beveiligingsinstellingen van uw browser is het niet mogelijk om direct vanuit het klembord in de editor te plakken. Middels opnieuw plakken in dit venster kunt u de tekst alsnog plakken in de editor.',
pasteArea : 'Plakgebied'
},
pastefromword :
{
confirmCleanup : 'De tekst die u plakte lijkt gekopieerd te zijn vanuit Word. Wilt u de tekst opschonen voordat deze geplakt wordt?',
toolbar : 'Plakken als Word-gegevens',
title : 'Plakken als Word-gegevens',
error : 'Het was niet mogelijk om de geplakte tekst op te schonen door een interne fout'
},
pasteText :
{
button : 'Plakken als platte tekst',
title : 'Plakken als platte tekst'
},
templates :
{
button : 'Sjablonen',
title : 'Inhoud sjabonen',
options : 'Template opties',
insertOption : 'Vervang de huidige inhoud',
selectPromptMsg : 'Selecteer het sjabloon dat in de editor geopend moet worden (de actuele inhoud gaat verloren):',
emptyListMsg : '(Geen sjablonen gedefinieerd)'
},
showBlocks : 'Toon blokken',
stylesCombo :
{
label : 'Stijl',
panelTitle : 'Opmaakstijlen',
panelTitle1 : 'Blok stijlen',
panelTitle2 : 'In-line stijlen',
panelTitle3 : 'Object stijlen'
},
format :
{
label : 'Opmaak',
panelTitle : 'Opmaak',
tag_p : 'Normaal',
tag_pre : 'Met opmaak',
tag_address : 'Adres',
tag_h1 : 'Kop 1',
tag_h2 : 'Kop 2',
tag_h3 : 'Kop 3',
tag_h4 : 'Kop 4',
tag_h5 : 'Kop 5',
tag_h6 : 'Kop 6',
tag_div : 'Normaal (DIV)'
},
div :
{
title : 'Div aanmaken',
toolbar : 'Div aanmaken',
cssClassInputLabel : 'Stylesheet klassen',
styleSelectLabel : 'Stijl',
IdInputLabel : 'Id',
languageCodeInputLabel : ' Taalcode',
inlineStyleInputLabel : 'Inline stijl',
advisoryTitleInputLabel : 'informatieve titel',
langDirLabel : 'Schrijfrichting',
langDirLTRLabel : 'Links naar rechts (LTR)',
langDirRTLLabel : 'Rechts naar links (RTL)',
edit : 'Div wijzigen',
remove : 'Div verwijderen'
},
font :
{
label : 'Lettertype',
voiceLabel : 'Lettertype',
panelTitle : 'Lettertype'
},
fontSize :
{
label : 'Lettergrootte',
voiceLabel : 'Lettergrootte',
panelTitle : 'Lettergrootte'
},
colorButton :
{
textColorTitle : 'Tekstkleur',
bgColorTitle : 'Achtergrondkleur',
panelTitle : 'Kleuren',
auto : 'Automatisch',
more : 'Meer kleuren...'
},
colors :
{
'000' : 'Zwart',
'800000' : 'Kastanjebruin',
'8B4513' : 'Chocoladebruin',
'2F4F4F' : 'Donkerleigrijs',
'008080' : 'Blauwgroen',
'000080' : 'Marine',
'4B0082' : 'Indigo',
'696969' : 'Donkergrijs',
'B22222' : 'Baksteen',
'A52A2A' : 'Bruin',
'DAA520' : 'Donkergeel',
'006400' : 'Donkergroen',
'40E0D0' : 'Turquoise',
'0000CD' : 'Middenblauw',
'800080' : 'Paars',
'808080' : 'Grijs',
'F00' : 'Rood',
'FF8C00' : 'Donkeroranje',
'FFD700' : 'Goud',
'008000' : 'Groen',
'0FF' : 'Cyaan',
'00F' : 'Blauw',
'EE82EE' : 'Violet',
'A9A9A9' : 'Donkergrijs',
'FFA07A' : 'Lichtzalm',
'FFA500' : 'Oranje',
'FFFF00' : 'Geel',
'00FF00' : 'Felgroen',
'AFEEEE' : 'Lichtturquoise',
'ADD8E6' : 'Lichtblauw',
'DDA0DD' : 'Pruim',
'D3D3D3' : 'Lichtgrijs',
'FFF0F5' : 'Linnen',
'FAEBD7' : 'Ivoor',
'FFFFE0' : 'Lichtgeel',
'F0FFF0' : 'Honingdauw',
'F0FFFF' : 'Azuur',
'F0F8FF' : 'Licht hemelsblauw',
'E6E6FA' : 'Lavendel',
'FFF' : 'Wit'
},
scayt :
{
title : 'Controleer de spelling tijdens het typen',
opera_title : 'Not supported by Opera', // MISSING
enable : 'SCAYT inschakelen',
disable : 'SCAYT uitschakelen',
about : 'Over SCAYT',
toggle : 'SCAYT in/uitschakelen',
options : 'Opties',
langs : 'Talen',
moreSuggestions : 'Meer suggesties',
ignore : 'Negeren',
ignoreAll : 'Alles negeren',
addWord : 'Woord toevoegen',
emptyDic : 'De naam van het woordenboek mag niet leeg zijn.',
optionsTab : 'Opties',
allCaps : 'Ignore All-Caps Words', // MISSING
ignoreDomainNames : 'Ignore Domain Names', // MISSING
mixedCase : 'Ignore Words with Mixed Case', // MISSING
mixedWithDigits : 'Ignore Words with Numbers', // MISSING
languagesTab : 'Talen',
dictionariesTab : 'Woordenboeken',
dic_field_name : 'Dictionary name', // MISSING
dic_create : 'Create', // MISSING
dic_restore : 'Restore', // MISSING
dic_delete : 'Delete', // MISSING
dic_rename : 'Rename', // MISSING
dic_info : 'Initially the User Dictionary is stored in a Cookie. However, Cookies are limited in size. When the User Dictionary grows to a point where it cannot be stored in a Cookie, then the dictionary may be stored on our server. To store your personal dictionary on our server you should specify a name for your dictionary. If you already have a stored dictionary, please type it\'s name and click the Restore button.', // MISSING
aboutTab : 'Over'
},
about :
{
title : 'Over CKEditor',
dlgTitle : 'Over CKEditor',
moreInfo : 'Voor licentie informatie, bezoek onze website:',
copy : 'Copyright © $1. Alle rechten voorbehouden.'
},
maximize : 'Maximaliseren',
minimize : 'Minimaliseren',
fakeobjects :
{
anchor : 'Anker',
flash : 'Flash animatie',
div : 'Pagina einde',
unknown : 'Onbekend object'
},
resize : 'Sleep om te herschalen',
colordialog :
{
title : 'Selecteer kleur',
options : 'Color Options', // MISSING
highlight : 'Actief',
selected : 'Geselecteerd',
clear : 'Wissen'
},
toolbarCollapse : 'Werkbalk inklappen',
toolbarExpand : 'Werkbalk uitklappen',
bidi :
{
ltr : 'Text direction from left to right', // MISSING
rtl : 'Text direction from right to left' // MISSING
}
}; | PypiClean |
/CtrlF_TF-1.0b1.tar.gz/CtrlF_TF-1.0b1/src/ctrlf_tf/cli_prgm.py | import argparse
import sys
import pandas as pd
import ctrlf_tf as cftf
MAIN_DESCRIPTION = ("CtrlF-TF: TF Binding Site Search via Aligned Sequences.")
def _config_compile_parser(parser):
"""Configure the arguments for the align subprogram."""
required = parser.add_argument_group("required arguments")
optimization = parser.add_argument_group("optimization arguments")
required.add_argument("-a",
"--align_model",
required=True,
type=str,
help="Alignment model")
required.add_argument("-k",
"--kmer_file",
required=True,
type=str,
help="Kmer file in a")
parser.add_argument("-o",
"--output",
type=str,
default=None,
help="Output file, stdout by default")
parser.add_argument("-p",
"--palindrome",
action="store_true",
help="Boolean flag if the model is palindromic")
parser.add_argument("-m",
"--meme",
action="store_true",
help="Boolean flag if the model is in MEME format")
parser.add_argument("-g",
"--gap_limit",
type=int,
default=0,
help="""Filters the kmer dataframe for kmers with a
max count of gaps. Must be 0 or a positive
integer (default is 0).""")
parser.add_argument("-t",
"--threshold",
type=float,
help=("Threshold score for kmers to align, if no "
"-tc argument provided, uses 3rd column."))
parser.add_argument("-tc",
"--threshold_column",
type=str,
help=("Column in the kmer dataframe to use for the "
"rank score (Default is 3rd column)."))
parser.add_argument("-r",
"--range",
nargs=2,
type=int,
default=(0, 0),
help="""Core range for PWM model (1-based), default is
the whole input""")
parser.add_argument("-cg",
"--core_gap",
nargs='*',
type=int,
default=None,
help="""Positions within the core range (1-based,
relative to core range '-r') that are not part of the
kmer description of a site. Must be given with the '-r'
argument""")
parser.add_argument("-rc",
"--range_consensus",
type=str,
default=None,
help="""Definition of -r and -cg using the alignment of a
consensus site instead. A '.' character in the
consensus string indicates a -cg position.""")
optimization.add_argument("-opt",
"--optimize",
action="store_true",
help="Boolean flag to perform optimization on classified sequences.")
optimization.add_argument("-c",
"--classify_file",
type=str,
help="Output file from 'ctrlf classify'")
optimization.add_argument("-fpr",
"--fpr_threshold",
type=float,
default=0.01,
help="FPR target for optimization on de bruijn data.")
optimization.add_argument("-orep",
"--output_report",
type=str,
default=None,
help="Output file location for Optimization Report (Default is no output)")
optimization.add_argument("-gthr",
"--gap_thresholds",
nargs='*',
default=[0.35, 0.40, 0.43],
type=float,
help="Rank score thresholds for optimizing gaps (Default is E-score based)")
return parser
def _config_call_parser(parser):
"""Configure the arguments for the call subprogram parser."""
required = parser.add_argument_group("required arguments")
required.add_argument("-c",
"--consensus_sites",
required=True,
type=str,
help="Compiled consensus sites file.")
required.add_argument("-f",
"--fasta_file",
required=True,
type=str,
help="Fasta file of DNA sequences")
parser.add_argument("-o",
"--output",
type=str,
default=None,
help="Output file, stdout by default")
parser.add_argument("-gc",
"--genomic_coordinates",
action="store_true",
help="Parse fasta input for genomic coordinates")
return parser
def _config_classify_parser(parser):
"""Configure the arguments for the classify subprogram parser."""
required = parser.add_argument_group("required arguments")
required.add_argument("-i",
"--input_file",
required=True,
type=str,
help="Input file for classification.")
parser.add_argument("-o",
"--output",
type=str,
default=None,
help="Output file, stdout by default.")
parser.add_argument("-m",
"--method",
type=str,
choices=["kde_z4", "z-score"],
default='kde_z4',
help="Classification method, default = kde_z4.")
parser.add_argument("-z",
"--z_scores",
nargs=2,
type=float,
default=(3, 4),
help="Z-scores to use if classifying by 'z-score'")
parser.add_argument("-sr",
"--sequence_range",
nargs=2,
type=int,
help="Sequence position range to use.")
parser.add_argument("-ln",
"--ln_transform",
action="store_true",
help="Natural log transform the values prior to classification.")
return parser
def _cli_parser():
"""Parse arguments for ctrlf_cli."""
# Define main parser
main_parser = argparse.ArgumentParser(description=MAIN_DESCRIPTION)
main_parser.add_argument('-v', '--version',action="store_true", help="Return version.")
subparsers = main_parser.add_subparsers(dest="program",
help="Available subcommands:")
# Align program parser definition
align_parser = subparsers.add_parser("compile",
help="Align and compile k-mers into aligned sequences containing sites.")
align_parser = _config_compile_parser(align_parser)
# Call program parser definition
call_parser = subparsers.add_parser("sitecall",
help="Call sites using the aligned sequences containing sites.")
call_parser = _config_call_parser(call_parser)
# Classify program parser
classify_parser = subparsers.add_parser("classify",
help="Classify a table of values and sequences for optimization.")
classify_parser = _config_classify_parser(classify_parser)
return main_parser
def _args_to_align_parameters(args) -> cftf.AlignParameters:
"""Convert output from argument parser to AlignParameters."""
# Convert meme boolean flag to format choice
if args.meme:
file_format = "MEME"
else:
file_format = "Tabular"
# Convert range argument to AlignParameters format
if args.range:
start_parameter = args.range[0]
end_parameter = args.range[1]
result = cftf.AlignParameters(kmer_file=args.kmer_file,
pwm_file=args.align_model,
pwm_file_format=file_format,
core_start=start_parameter,
core_end=end_parameter,
core_gaps=args.core_gap,
range_consensus=args.range_consensus,
gap_limit=args.gap_limit,
threshold=args.threshold,
threshold_column=args.threshold_column,
palindrome=args.palindrome)
return result
def _align_parser_validation(parser, args) -> bool:
"""Validate argument inputs, raise parser.error if invalid."""
if args.gap_limit is not None and args.gap_limit < 0:
parser.error("'-g' given a negative integer, needs to be 0 or more")
if args.core_gap is not None and args.range is None:
parser.error("'-cg' was given without specifying '-r'")
if args.range_consensus and args.range != (0, 0):
parser.error("-r must be specified with either -r or -rc, not both.")
return True
def _compile_program(args):
parameters = _args_to_align_parameters(args)
if args.optimize:
gap_thresholds = {}
for idx, i in enumerate(args.gap_thresholds):
gap_thresholds[idx] = i
classified_seqs = cftf.ClassifiedSequences.load_from_file(args.classify_file)
opt_obj = cftf.Optimize(align_parameters=parameters,
classified_df=classified_seqs.dataframe,
fpr_threshold=args.fpr_threshold,
gap_thresholds=gap_thresholds)
parameters = opt_obj.optimal_parameters
if args.output_report:
opt_obj.save_to_file(args.output_report)
compiled_kmers = cftf.CompiledKmers.from_parameters(parameters)
compiled_kmers.save_compiled_sites(args.output)
def _call_program(args):
ctrlf_object = cftf.CtrlF.from_compiled_sites(args.consensus_sites)
if args.output:
output = args.output
else:
output = sys.stdout
ctrlf_object.call_sites_from_fasta(args.fasta_file,
args.genomic_coordinates,
output)
def _classify_program(args):
input_df = pd.read_csv(args.input_file,
sep='\t',
header=None)
if args.sequence_range:
string_start = args.sequence_range[0]
string_end = args.sequence_range[1]
else:
string_start = None
string_end = None
results = cftf.ClassifiedSequences.classify_from_dataframe(df=input_df,
method=args.method,
z_negative=args.z_scores[0],
z_positive=args.z_scores[1],
sequence_start=string_start,
sequence_end=string_end,
ln_transform=args.ln_transform)
if args.output:
results.save_to_file(args.output)
else:
results.save_to_stdout()
def main():
"""CtrlF-TF CLI logic."""
parser = _cli_parser()
arguments = parser.parse_args()
# If the main program is run
if arguments.program == "compile":
_align_parser_validation(parser, arguments)
_compile_program(arguments)
elif arguments.program == "classify":
_classify_program(arguments)
elif arguments.version:
print(cftf.__version__)
elif arguments.program == "sitecall":
_call_program(arguments)
if __name__ == "__main__":
main() | PypiClean |
/DESaster-0.1.8.tar.gz/DESaster-0.1.8/desaster/entities.py | from desaster.structures import SingleFamilyResidential, Building
from desaster.hazus import setContentsDamageValueHAZUS
import names, warnings, sys
from simpy import Container
class Entity(object):
"""A base class for representing entities, such as households, businesses,
agencies, NGOs, etc.
Methods:
__init__(self, env, name, write_story = False)
story_to_text()
"""
def __init__(self, env, name = None, savings = 0, insurance = 0, credit = 0, write_story = False):
"""Initiate an Entity object
Keyword Arguments:
env -- Pointer to SimPy env environment.
name -- A string indicating the entities name.
savings -- Amount of entity savings in $
insurance -- Hazard-specific insurance coverage: coverage / residence.value
credit -- A FICO-like credit score
write_story -- Boolean indicating whether to track an entity's story.
Modified Attributes
self.recovery_funds -- initiated with value of self.savings
"""
self.env = env
# Entity attributes
self.name = name # Name associated with occupant of the home %***%
self.write_story = write_story # Boolean. Whether to track the entity's story.
self.insurance = insurance # Hazard-specific insurance coverage: coverage / residence.value
self.savings = savings # Amount of entity savings in $
self.credit = credit # A FICO-like credit score
# Entity outputs
self.story = [] # The story of events for each entity
self.claim_put = None # Time put request in for insurance settlement
self.claim_get = None # Time get insurance claim settled
self.claim_amount = 0.0 # Amount of insurance claim payout
self.fema_put = None # Time put request in for FEMA assistance
self.fema_get = None # Time get FEMA assistance
self.fema_amount = 0.0 # Amount of assistance provided by FEMA
self.sba_put = None # Time put request for loan
self.sba_get = None # Time get requested loan
self.sba_amount = 0.0 # Amount of loan received
try:
self.recovery_funds = Container(env, init=self.savings) # Total funds available to entity to recover; must be > 0
except:
self.recovery_funds = Container(env, init=1) # init must be > 0
def story_to_text(self):
"""Join list of story strings into a single story string."""
return ''.join(self.story)
class Owner(Entity):
"""An class that inherits from the Entity() class to represent any entity
that owns property. An owner does not necessarily have a residence (e.g.,
landlord). For the most part this is class is to define subclasses with Owner()
attributes.
Methods:
__init__(self, env, name, attributes_df, building_stock, write_story = False)
"""
def __init__(self, env, name = None, savings = 0, insurance = 0, credit = 0, real_property = None, write_story = False):
"""Initiate several attributes related to an Owner entity.
No universal methods have been define for the Owner class yet. methods
are currently specified in subclasses of Owner.
Keyword Arguments:
env -- Pointer to SimPy env environment.
name -- A string indicating the entities name.
savings -- Amount of entity savings in $
insurance -- Hazard-specific insurance coverage: coverage / residence.value
credit -- A FICO-like credit score
real_property -- A building object, such as structures.SingleFamilyResidential()
write_story -- Boolean indicating whether to track an entity's story.
Inheritance:
Subclass of entities.Entity()
"""
Entity.__init__(self, env, name, savings, insurance, credit, write_story)
# Attributes
self.property = real_property # A building object from desaster.structures
# Owner env outputs
self.inspection_put = None # Time put request in for house inspection
self.inspection_get = None # Time get house inspection
self.assistance_payout = 0.0 # Amount of generic assistance provided (e.g., Red Cross)
self.repair_put = None # Time put request in for house repair
self.repair_get = None # Time get house repair completed
self.demolition_put = None # Time demolition requested
self.demolition_get = None # Time demolition occurs
self.permit_put = None # Time put request for building permit
self.permit_get = None # Time get requested building permit
self.assessment_put = None # Time put request for engineering assessment
self.assessment_get = None # Time put request for engineering assessment
self.gave_up_funding_search = None # Time entity gave up on some funding
# process; obviously can't keep track
# of multiple give ups
self.prior_properties = [] # A list to keep track of entity's previous properties
class Household(Entity):
"""Define a Household() class to represent a group of persons that reside
together in a single dwelling unit. A Household() object can not own property,
but does have a residence. For the most part this is class is to define
subclasses with Household() attributes. Also includes methods for writing
household stories.
Methods:
__init__(self, env, name = None, savings = 0, insurance = 0, credit = 0, write_story = False)
writeResides(self):
writeResides(self):
writeResides(self):
writeOccupy(self):
"""
def __init__(self, env, name = None, income = float('inf'), savings = float('inf'),
insurance = 1.0, credit = 850, residence = None, write_story = False):
"""Initiate a entities.Household() object.
Keyword Arguments:
env -- Pointer to SimPy env environment.
name -- A string indicating the entities name.
savings -- Amount of entity savings in $
insurance -- Hazard-specific insurance coverage: coverage / residence.value
credit -- A FICO-like credit score
residence -- A building object, such as structures.SingleFamilyResidential()
that serves as the entity's temporary or permanent residence.
write_story -- Boolean indicating whether to track a entitys story.
Returns or Attribute Changes:
self.story -- If write_story == True, append entity story strings
"""
Entity.__init__(self, env, name, savings, insurance, credit, write_story)
# Attributes
self.residence = residence
self.income = income
# Entity outputs
self.home_put = None # Time started searching for a new home
self.home_get = None # Time found a new home
self.gave_up_home_search = None # Whether entity gave up search for home
self.occupy_put = None # The time when the entity put's in a request for a home.
# None if request never made.
self.occupy_get = None # The time when the entity receives a home.
# None if never received.
self.prior_residences = [] # An empty list to rsecord each residence that
# the entity vacates.
self.writeResides()
def writeResides(self):
if self.write_story:
self.story.append('{0} resides at {1}. '.format(
self.name, self.residence.address)
)
def writeStartSearch(self):
if self.write_story:
self.story.append(
'{0} started searching for a new {1} {2:,.0f} days after the event. '.format(
self.name.title(), self.prior_residences[-1].occupancy.lower(), self.home_put)
)
def writeGaveUp(self):
if self.write_story:
self.story.append(
'On day {0:,.0f}, after a {1:,.0f} day search, {2} gave up looking for a new home in the local area. '.format(
self.env.now, self.env.now - self.home_put, self.name.title())
)
def writeOccupy(self):
if self.write_story:
self.story.append(
"{0} occupied the {1} {2:.0f} days after the event. ".format(
self.name.title(), self.residence.occupancy.lower(), self.occupy_get)
)
class OwnerHousehold(Owner, Household):
"""The OwnerHousehold() class has attributes of both entities.Owner() and
entities.Household() classes. It can own property and has a residence, which
do not have to be the same. The OwnerHousehold() class includes methods to
look for a new home to purchase (property), as well as to occupy a residence
(not necessarily it's property). Also includes methods to write stories.
Methods:
replace_home(self, search_patience, building_stock)
occupy(self, duration, callbacks = None)
changeListing(self, listed):
writeInitiateOwnerHousehold(self):
writeHomeBuy(self):
"""
def __init__(self, env, name = None, income = float('inf'), savings = float('inf'),
insurance = 1.0, credit = 850, real_property = None, write_story = False):
"""Define entity inputs and outputs attributes.
Initiate entity's story list string.
Keyword Arguments:
env -- Pointer to SimPy env environment.
name -- A string indicating the entities name.
savings -- Amount of entity savings in $
insurance -- Hazard-specific insurance coverage: coverage / residence.value
credit -- A FICO-like credit score
real_property -- A building object, such as structures.SingleFamilyResidential()
residence -- A building object, such as structures.SingleFamilyResidential()
that serves as the entity's temporary or permanent residence.
write_story -- Boolean indicating whether to track an entity's story.
Returns or Attribute Changes:
self.story -- If write_story == True, append entity story strings
Inheritance:
entities.Household()
entities.Owner()
"""
Owner.__init__(self, env, name, savings, insurance, credit, real_property, write_story)
Household.__init__(self, env, name, income, savings, insurance, credit, self.property, write_story)
self.writeInitiateOwnerHousehold()
def replace_home(self, search_stock, duration, down_payment_pct = 0.10, housing_ratio = 0.3,
price_pct = 1.1, area_pct = 0.9, rooms_tol = 0,
search_patience = float('inf')):
"""A process (generator) representing entity search for permanent housing
based on housing preferences, available housing stock, and patience finding
a new home.
Keyword Arguments:
search_stock -- A SimPy FilterStore that contains one or more
residential building objects (e.g., structures.SingleFamilyResidential)
that represent homes owner is searching to purchase.
duration -- A distributions.ProbabilityDistribution object, KDE_Distribution object
or other type from desaster.distributions
down_payment_pct -- Percentage of home value required for a down payment
housing_ratio -- Maximum percentage of monthly income for acceptable monthly costs
price_pct -- Ratio of existing home value to maximum desirable new home value
area_pct -- Ratio of existing home area to maximum desirable new home area
rooms_tol -- Number of fewer or additional bedroms compared to existing home
area that is acceptable for new home
search_patience -- The search duration in which the entity is willing to wait
to find a new home. Does not include the process of
securing money.
Returns or Attribute Changes:
self.story -- Process outcomes appended to story.
self.home_put -- Record time home search starts
self.home_get -- Record time home search stops
self.residence -- Potentially assigned a new residence object.
self.property -- Potentially assigned a new property object.
self.property.list -- Potentially set prior property to True and new one to False
self.prior_residences -- Potentially append a prior residence object.
self.prior_properties -- Potentially assigned a prior property object.
self.gave_up_home_search -- Set with env.now to indicate if and when
search patience runs out.
self.story -- If write_story == True, append entity story strings
"""
# Record when housing search starts
# Calculate the time that housing search patience ends
# If write_story, write search start time to entity's story
self.home_put = self.env.now
patience_end = self.home_put + search_patience
# Record current residence as prior residence, current property as
# prior property
self.prior_properties.append(self.property)
if self.residence:
self.prior_residences.append(self.residence)
# Write the story
self.writeStartSearch()
# Define timeout process representing entity's patience for finding home.
# Return 'Gave up' if timeout process completes.
home_search_patience = self.env.timeout(patience_end - self.env.now,
value='Gave up')
# Define a FilterStore.get process to find a new home to buy from the vacant
# for sale stock with similar attributes as *original* property.
new_home = search_stock.get(lambda findHome:
findHome.damage_state == 'None'
and findHome.occupancy.lower() == self.prior_properties[0].occupancy.lower()
and (findHome.bedrooms >= self.prior_properties[0].bedrooms + rooms_tol
or findHome.area >= self.prior_properties[0].area * area_pct)
and (findHome.value <= self.prior_properties[0].value * price_pct
or findHome.monthly_cost <= (self.income / 12.0) * housing_ratio)
and findHome.listed == True
)
# Yield both the patience timeout and the housing stock FilterStore get.
# Wait until one or the other process is completed.
# Assign the process that is completed first to the variable.
home_search_outcome = yield home_search_patience | new_home
# Exit the function if the patience timeout completes before a suitable
# home is found in the housing stock.
if home_search_outcome == {home_search_patience: 'Gave up'}:
del self.prior_properties[0] # Didn't replace home, so delete from prior
del self.prior_residences[0] # Didn't replace home, so delete from prior
self.gave_up_home_search = self.env.now
self.writeGaveUp()
return
# Define timeout process representing entity's *remaining* search patience.
# Return 'Gave up' if timeout process completes.
down_payment_patience = self.env.timeout(patience_end - self.env.now,
value='Gave up')
# Withdraw 10% down payment; wait for more funds if don't have it yet
down_payment = down_payment_pct * home_search_outcome[new_home].value
get_down_payment = self.recovery_funds.get(down_payment)
# Yield both the remaining patience timeout and down payment get.
down_payment_outcome = yield down_payment_patience | get_down_payment
# Exit the function if the patience timeout completes before a suitable
# home is found in the housing stock.
if down_payment_outcome == {down_payment_patience: 'Gave up'}:
yield search_stock.put(home_search_outcome[new_home]) # Didn't buy it afterall
del self.prior_properties[0] # Didn't replace home, so delete from prior
del self.prior_residences[0] # Didn't replace home, so delete from prior
self.gave_up_home_search = self.env.now
self.writeGaveUp()
return
# If a new home is found before patience runs out set current property's
# listed attributed to True -- put home up for sale.
# get and put from FilterStore to tell SimPy object's state changed
if self.property:
self.changeListing(listed = True)
# Take new home off the market and place back in housing stock
# (in orde for SimPy to register the resource state change)
self.changeListing(listed = False)
# Take a timeout equal to specified time to close home purchase
yield self.env.timeout(duration.rvs())
# Set the newly found home as the entity's property.
self.property = home_search_outcome[new_home]
# Make the entity's property also their residence
self.residence = self.property
# Record the time that the housing search ends.
self.home_get = self.env.now
# If write_story is True, then write results of successful home search to
# entity's story.
self.writeHomeBuy()
def changeListing(self, listed):
get_home = yield self.property.stock.get(lambda getHome:
getHome.__dict__ == self.property.__dict__
)
self.residence.listed = listed
yield self.residence.stock.put(get_home)
def occupy(self, duration, callbacks = None):
"""Define process for occupying a residence--e.g., amount of time it takes
to move into a new residence. Currently the method doesn't do much but
make story writing simpler.
Keyword Arguments:
duration -- A distributions.ProbabilityDistribution object that defines
the duration related to how long it takes the entity
to occupy a dwelling.
callbacks -- a generator function containing processes to start after the
completion of this process.
Returns or Attribute Changes:
self.story -- Summary of process outcome as string.
self.occupy_put -- Recording beginning of occupany duration.
self.occupy_get -- Record time of occupancy
"""
self.occupy_put = self.env.now
# Yield timeout equivalent to time required to move back into home.
yield self.env.timeout(duration.rvs())
# Record time got home
self.occupy_get = self.env.now
#If true, write process outcome to story
self.writeOccupy()
if callbacks is not None:
yield self.env.process(callbacks)
else:
pass
def writeInitiateOwnerHousehold(self):
if self.write_story:
# Set story with non-disaster attributes.
self.story.append(
'{0} owns and lives in a {1} room {2} at {3} worth ${4:,.0f}. '.format(self.name,
self.residence.bedrooms, self.residence.occupancy.lower(), self.residence.address,
self.residence.value)
)
def writeHomeBuy(self):
if self.write_story:
self.story.append(
'On day {0:,.0f}, {1} purchased a {2} at {3} with a value of ${4:,.0f}. '.format(
self.home_get, self.name.title(), self.property.occupancy.lower(), self.property.address,
self.property.value)
)
class RenterHousehold(Household):
"""The RenterHousehold() class has attributes of both entities.Entity() and
entities.Household() classes. The class does not have associated property, but
does have an associated landlord (entities.Landlord() object) that owns their
residence. So RenterHousehold() objects can have both residences and landlords
assigned and unassigned to represent, e.g., evictions.
Methods:
replace_home(self, search_patience, building_stock)
occupy(self, duration, callbacks = None)
changeListing(self, listed):
writeInitiateRenterHousehold(self):
writeHomeRent(self):
"""
def __init__(self, env, name = None, income = float('inf'), savings = float('inf'), insurance = 1.0, credit = 850,
residence = None, landlord = None, write_story = False):
"""Define entity inputs and outputs attributes.
Initiate entity's story list string.
Keyword Arguments:
env -- Pointer to SimPy env environment.
name -- A string indicating the entities name.
savings -- Amount of entity savings in $
insurance -- Hazard-specific insurance coverage: coverage / residence.value
credit -- A FICO-like credit score
residence -- A building object, such as structures.SingleFamilyResidential()
that serves as the entity's temporary or permanent residence.
landlord -- An Owner object that represent's the renter's landlord.
write_story -- Boolean indicating whether to track an entity's story.
Returns or Attribute Changes:
self.story -- If write_story == True, append entity story strings
self.landlord -- Record renter's landlord
Inheritance:
Subclass of entities.Household()
"""
# Attributes
self.landlord = landlord
# Initial method calls; This needs to go after landlord assignment.
Household.__init__(self, env, name, income, savings, insurance, credit, residence, write_story)
self.writeInitiateRenterHousehold()
def replace_home(self, search_stock, duration, move_in_ratio = 2.5, housing_ratio = 0.3,
area_pct = 0.9, rooms_tol = 0, notice_time = 20.0,
search_patience = float('inf')):
"""A process (generator) representing entity search for permanent housing
based on housing preferences, available housing stock, and patience finding
a new home.
Keyword Arguments:
search_stock -- A SimPy FilterStore that contains one or more
residential building objects (e.g., structures.SingleFamilyResidential)
that represent homes owner is searching to purchase.
duration -- A distributions.ProbabilityDistribution object, KDE_Distribution object
or other type from desaster.distributions
down_payment_pct -- Percentage of home value required for a down payment
housing_ratio -- Maximum percentage of monthly income for acceptable monthly costs
move_in_ratio -- A float value that represents move in cost of a new residence
as a ratio of the residence's monthly cost (rent).
area_pct -- Ratio of existing home area to maximum desirable new home area
rooms_tol -- Number of fewer or additional bedroms compared to existing home
area that is acceptable for new home
notice_time -- A duration that represents the amount of time between identifying
a desirable listing and the availability of the new residence.
search_patience -- The search duration in which the entity is willing to wait
to find a new home. Does not include the process of
securing money.
Returns or Attribute Changes:
self.story -- Process outcomes appended to story.
self.home_put -- Record time home search starts
self.home_get -- Record time home search stops
self.residence -- Potentially assigned a new residence object.
self.property.listed -- Potentially set prior property to True and new one to False
self.prior_residences -- Potentially append a prior residence object.
self.gave_up_home_search -- Set with env.now to indicate if and when
search patience runs out.
self.story -- If write_story == True, append entity story strings
"""
# Record when housing search starts
# Calculate the time that housing search patience ends
# If write_story, write search start time to entity's story
self.home_put = self.env.now
patience_end = self.home_put + search_patience
# Put current residence as a prior residence
if self.residence:
self.prior_residences.append(self.residence)
# Define timeout process representing entity's *remaining* search patience.
# Return 'Gave up' if timeout process completes.
find_search_patience = self.env.timeout(patience_end - self.env.now,
value='Gave up')
self.writeStartSearch()
# Define a FilterStore.get process to find a new home to rent from the vacant
# for rent stock with similar attributes as original residence.
new_home = search_stock.get(lambda findHome:
findHome.damage_state == 'None'
and findHome.occupancy.lower() == self.prior_residences[0].occupancy.lower()
and (findHome.bedrooms >= self.prior_residences[0].bedrooms + rooms_tol
or findHome.area >= self.prior_residences[0].area * area_pct)
and findHome.monthly_cost <= (self.income / 12.0) * housing_ratio
and findHome.listed == True
)
# Yield both the patience timeout and the housing stock FilterStore get.
# Wait until one or the other process is completed.
# Assign the process that is completed first to the variable.
home_search_outcome = yield find_search_patience | new_home
# Exit the function if the patience timeout completes before a suitable
# home is found in the housing stock.
if home_search_outcome == {find_search_patience: 'Gave up'}:
self.gave_up_home_search = self.env.now
# If write_story, note in the story that the entity gave up
# the search.
self.writeGaveUp()
return
# Define timeout process representing entity's *remaining* search patience.
# Return 'Gave up' if timeout process completes.
move_in_cost_patience = self.env.timeout(patience_end - self.env.now,
value='Gave up')
# Withdraw 10% down payment; wait for more funds if don't have it yet
move_in_cost = move_in_ratio * home_search_outcome[new_home].monthly_cost
get_move_in_cost = self.recovery_funds.get(move_in_cost)
# Yield both the remaining patience timeout and down payment get.
move_in_cost_outcome = yield move_in_cost_patience | get_move_in_cost
# Exit the function if the patience timeout completes before a suitable
# home is found in the housing stock.
if move_in_cost_outcome == {move_in_cost_patience: 'Gave up'}:
yield search_stock.put(home_search_outcome[new_home]) # Didn't buy it afterall
# Put current residence as a prior residence
if self.residence:
del self.prior_residences[0] # Didn't replace home, so delete from prior
self.gave_up_home_search = self.env.now
self.writeGaveUp()
return
# If a new home is found before patience runs change current residence's
# listed state to True to indicate residence is for rent (if tenant has a
# residence)
if self.residence:
self.changeListing(listed = True)
# Set the newly found home as the entity's residence.
self.changeListing(listed = False)
# Take a timeout equal to specified to notice time before can move in
yield self.env.timeout(duration.rvs())
# Set newly found home as residence
self.residence = home_search_outcome[new_home]
# Record the time that the housing search ends.
self.home_get = self.env.now
# If write_story is True, then write results of successful home search to
# entity's story.
self.writeHomeRent()
def changeListing(self, listed):
get_home = yield self.residence.stock.get(lambda getHome:
getHome.__dict__ == self.property.__dict__
)
self.residence.listed = listed
yield self.residence.stock.put(get_home)
def occupy(self, duration, callbacks = None):
"""A process for a RenterHousehold to occupy a residence.
At the moment all this does is represent some duration it takes for the
entity to move into a new residence. Potentially eventually can add logic
related to, e.g., rent increases.
Keyword Arguments:
duration -- A distribution.ProbabilityDistribution object or
similar that defines the duration related to how
long it takes the entity to occupy a dwelling.
callbacks -- a generator function containing processes to start after the
completion of this process.
Returns or Attribute Changes:
self.story -- Summary of process outcome as string.
self.occupy_put -- Recording beginning of occupany duration.
self.occupy_get -- Record time of occupancy
"""
self.occupy_put = self.env.now
####
#### Hopefully put code here for checking if renter can still afford
#### the rent. Obviously need a function somewhere that estimates rent increases.
####
# Yield timeout equivalent to time required to move back into home.
yield self.env.timeout(duration.rvs())
# Record time got home
self.occupy_get = self.env.now
self.writeOccupy()
if callbacks is not None:
yield self.env.process(callbacks)
else:
pass
def writeInitiateRenterHousehold(self):
if self.write_story:
self.story.append(
'{0} rents and lives in a {1} room {2} at {3}. '.format(
self.name, self.residence.bedrooms, self.residence.occupancy.lower(),
self.residence.address)
)
def writeHomeRent(self):
if self.write_story:
self.story.append(
'On day {0:,.0f}, {1} leased a {2} at {3} with a rent of ${4:,.0f}. '.format(
self.home_get, self.name.title(), self.residence.occupancy.lower(),
self.residence.address, self.residence.monthly_cost, self.residence.damage_value)
)
class Landlord(Owner):
"""A Landlord() class is a subclass of entiites.Owner() but has an attributes
that allows it to have a tenant (e.g., entities.RenterHousehold). Otherwise,
similar to entities.Owner().
Methods:
evict_tenant(self):
writeInitiateLandlord(self):
writeEvicted(self):
"""
def __init__(self, env, name = None, savings = 0, insurance = 0, credit = 0, real_property = None,
tenant = None, write_story = False):
"""Define landlord's inputs and outputs attributes.
Initiate landlord's story list string.
Keyword Arguments:
env -- Pointer to SimPy env environment.
name -- A string indicating the entities name.
savings -- Amount of entity savings in $
insurance -- Hazard-specific insurance coverage: coverage / residence.value
credit -- A FICO-like credit score
real_property -- A building object, such as structures.SingleFamilyResidential()
tenant -- A RenterHousehold object that serves landlord's tenant
write_story -- Boolean indicating whether to track an entity's story.
Modified Attributes:
self.tenant -- Set landlord's tenant
self.story -- Initiate landlord's story
Inheritance:
Subclass of entities.Owner()
"""
Owner.__init__(self, env, name, savings, insurance, credit, real_property, write_story)
# Landlord env inputs
self.tenant = tenant
self.writeInitiateLandlord()
def evict_tenant(self):
self.tenant.prior_residences.append(self.tenant.residence)
self.tenant.residence = None
self.writeEvicted()
def writeInitiateLandlord(self):
if self.write_story:
# Set story with non-disaster attributes.
self.story.append(
'{0} rents out a {1} bedroom {2} at {3} worth ${4:,.0f}. '.format(
self.name, self.property.bedrooms, self.property.occupancy.lower(),
self.property.address, self.property.value)
)
def writeEvicted(self):
if self.tenant.write_story == True:
self.tenant.story.append(
'{0} was evicted because the {1} had {2} damage. '.format(
self.tenant.name, self.property.occupancy.lower(),
self.property.damage_state.lower()
)
) | PypiClean |
/Faker-19.3.1.tar.gz/Faker-19.3.1/faker/providers/color/hy_AM/__init__.py | from collections import OrderedDict
from .. import Provider as ColorProvider
class Provider(ColorProvider):
"""Implement color provider for ``hy_AM`` locale."""
all_colors = OrderedDict(
(
("Ալիսի կապույտ", "#F0F8FF"),
("Անանուխի կրեմ", "#F5FFFA"),
("Անտառային կանաչ", "#228B22"),
("Արծաթագույն", "#C0C0C0"),
("Արքայական կապույտ", "#4169E1"),
("Բաց դեղին", "#FFFFE0"),
("Բաց դեղնաոսկեգույն", "#FAFAD2"),
("Բաց երկնագույն", "#87CEFA"),
("Բաց ծովային կանաչ", "#20B2AA"),
("Բաց կաթնագույն", "#FFFFF0"),
("Բաց կանաչ", "#90EE90"),
("Բաց կապույտ", "#ADD8E6"),
("Բաց կապտականաչ", "#E0FFFF"),
("Բաց կորալ", "#F08080"),
("Բաց մանուշակագույն", "#EE82EE"),
("Բաց մոխրագույն թերթաքար", "#778899"),
("Բաց մոխրագույն", "#D3D3D3"),
("Բաց նշագույն", "#FFEBCD"),
("Բաց պողպատե կապույտ", "#B0C4DE"),
("Բաց սաղմոնագույն", "#FFA07A"),
("Բաց վարդագույն", "#FFB6C1"),
("Բեժ", "#F5F5DC"),
("Բոսորագույն", "#DC143C"),
("Գարնանային կանաչ", "#00FF7F"),
("Գեյնսբորրո", "#DCDCDC"),
("Գունատ կանաչ", "#98FB98"),
("Գունատ կարմիր մանուշակագույն", "#DB7093"),
("Գունատ ոսկեգույն", "#EEE8AA"),
("Գունատ փիրուզագույն", "#AFEEEE"),
("Գրասենյակային կանաչ", "#008000"),
("Դարչնագույն ավազ", "#F4A460"),
("Դարչնագույն", "#964B00"),
("Դեղին", "#FFFF00"),
("Դեղձի կրեմ", "#FFDAB9"),
("Դեղնականաչ", "#9ACD3"),
("Դոդջերս կապույտ", "#1E90FF"),
("Եգիպտացորենի մազիկներ", "#FFF8DC"),
("Երկնագույն մառախուղ", "#F0FFFF"),
("Երկնագույն", "#87CEEB"),
("Զինվորական կանաչ", "#6B8E23"),
("Թամբի դարչնագույն", "#8B4513"),
("Թեժ վարդագույն", "#FF69B4"),
("Թուխ", "#D2B48C"),
("Ինդիգո", "#4B0082"),
("Լայմի կանաչ", "#32CD32"),
("Լավանդ", "#E6E6FA"),
("Լոլիկ", "#FF6347"),
("Խակի", "#F0E68C"),
("Խոլորձագույն", "#DA70D6"),
("Ծխագույն", "#F5F5F5"),
("Ծովախեցի", "#FFF5EE"),
("Ծովակնագույն", "#7FFFD4"),
("Ծովային կանաչ", "#2E8B57"),
("Կադետների կապույտ", "#5F9EA0"),
("Կաթնագույն", "#FFFAF0"),
("Կակաոյի դարչնագույն", "#D2691E"),
("Կանաչ", "#00FF00"),
("Կանաչադեղին", "#ADFF2F"),
("Կապույտ թերթաքար", "#6A5ACD"),
("Կապույտ մանուշակագույն", "#8A2BE2"),
("Կապույտ փոշի", "#B0E0E6"),
("Կապույտ", "#0000FF"),
("Կապտականաչ", "#00FFFF"),
("Կարմիր դարչնագույն", "#A52A2A"),
("Կարմիր լավանդ", "#FFF0F5"),
("Կարմիր մանուշակագույն", "#C71585"),
("Կարմիր", "#FF0000"),
("Կեսգիշերային կապույտ", "#191970"),
("Կիտրոնի շիֆոն", "#FFFACD"),
("Կորալ", "#FF7F50"),
("Հարած պապայա", "#FFEFD5"),
("Հին ժանյակ", "#FDF5E6"),
("Հնաոճ սպիտակ", "#FAEBD7"),
("Հնդկական կարմիր", "#CD5C5C"),
("Հրակայուն աղյուս", "#B22222"),
("Ձիթապտղի գույն", "#808000"),
("Ձյունաճերմակ", "#FFFAFA"),
("Մանուշակագույն", "#800080"),
("Մեղրացող սեխ", "#F0FFF0"),
("Միջին գարնանային կանաչ", "#00FA9A"),
("Միջին խոլորձագույն", "#BA55D3"),
("Միջին ծովակնագույն", "#66CDAA"),
("Միջին ծովային կանաչ", "#3CB371"),
("Միջին կապույտ թերթաքար", "#7B68EE"),
("Միջին կապույտ", "#0000CD"),
("Միջին կապտականաչ", "#9370DB"),
("Միջին փիրուզագույն", "#48D1CC"),
("Մոխրագույն թերթաքար", "#708090"),
("Մոխրագույն", "#808080"),
("Մոկասին", "#FFE4B5"),
("Մուգ երկնագույն", "#00BFFF"),
("Մուգ խակի", "#BDB76B"),
("Մուգ խոլորձագույն", "#9932CC"),
("Մուգ ծովային կանաչ", "#8FBC8F"),
("Մուգ կանաչ", "#006400"),
("Մուգ կապույտ թերթաքար", "#483D8B"),
("Մուգ կապույտ", "#00008B"),
("Մուգ կապտականաչ", "#008080"),
("Մուգ կարմիր", "#8B0000"),
("Մուգ ձիթապտղի կանաչ", "#556B2F"),
("Մուգ մանուշակագույն", "#9400D3"),
("Մուգ մոխրագույն թերթաքար", "#2F4F4F"),
("Մուգ մոխրագույն", "#696969"),
("Մուգ մոխրագույն", "#A9A9A9"),
("Մուգ նարնջագույն", "#FF8C00"),
("Մուգ ոսկեգույն", "#B8860B"),
("Մուգ սաղմոնագույն", "#E9967A"),
("Մուգ վառ մանուշակագույն", "#8B008B"),
("Մուգ վարդագույն", "#FF1493"),
("Մուգ փիրուզագույն", "#00CED1"),
("Նավահո սպիտակ", "#FFDEAD"),
("Նավատորմի կապույտ", "#000080"),
("Նարնջագույն կարմիր", "#FF4500"),
("Նարնջագույն", "#FFA500"),
("Նշագույն", "#FFE4C4"),
("Շագանակագույն", "#800000"),
("Շարտրուզ", "#7FFF00"),
("Ոսկեգույն ձող", "#DAA520"),
("Ոսկեգույն", "#FFD700"),
("Պերու", "#CD853F"),
("Պողպատե կապույտ", "#4682B4"),
("Սալոր", "#DDA0DD"),
("Սաղմոնագույն", "#FA8072"),
("Սիենա", "#A0522D"),
("Սիզամարգի կանաչ", "#7CFC00"),
("Սպիտակ ստվեր", "#F8F8FF"),
("Սպիտակ", "#FFFFFF"),
("Սև", "#000000"),
("Վառ մանուշակագույն", "#FF00FF"),
("Վարդագույն", "#FFC0CB"),
("Վարդագույն", "#FFE4E1"),
("Վարդադարչնագույն", "#BC8F8F"),
("Վուշ", "#FAF0E6"),
("Տատասկ", "#D8BFD8"),
("Տերեփուկի կապույտ", "#6495ED"),
("Ցորենագույն", "#F5DEB3"),
("Փիրուզագույն", "#40E0D0"),
("Փխրուն փայտ", "#DEB887"),
)
)
safe_colors = (
"սև",
"շագանակագույն",
"կանաչ",
"նավատորմի կապույտ",
"ձիթապտղի գույն",
"մանուշակագույն",
"մուգ կապտականաչ",
"լայմ",
"կապույտ",
"արծաթագույն",
"մոխրագույն",
"դեղին",
"վառ մանուշակագույն",
"կապտականաչ",
"սպիտակ",
) | PypiClean |
/EA_framework-2.2.3-py3-none-any.whl/EA_sequential/EA.py | from EA_sequential .Population import *
import numpy as np
class EA:
""" Main Evolutionary Strategy class
"""
def __init__(self, minimize, budget, patience,
parents_size, offspring_size,
individual_size, discrete,
recombination, mutation,
selection, evaluation,
verbose):
self.minimize = minimize
self.budget = budget
self.patience = patience
self.parents_size = parents_size
self.offspring_size = offspring_size
self.individual_size = individual_size
self.discrete = discrete
self.recombination = recombination
self.mutation = mutation
self.selection = selection
self.evaluation = evaluation
self.verbose=verbose
self.parents = Population( self.parents_size,
self.individual_size,
self.discrete, mutation)
self.offspring = Population(self.offspring_size,
self.individual_size,
self.discrete, mutation)
def run(self):
""" Runs the Evolutionary Strategy.
Returns the best individual and the best fitness.
"""
# Initialize budget and patience
self.curr_budget, self.curr_patience = 0, 0
# Initialize number of better generations found and total generations counters
self.better_generations = 0
self.gen_count = 0
self.all_best_evals = []
# Initial parents setup
self.evaluation(self.parents)
self.best_eval, self.best_index = self.parents.best_fitness(self.minimize)
self.best_indiv = self.parents.individuals[self.best_index]
self.all_best_evals.append(self.best_eval)
self.curr_budget += self.parents_size
# debug print
if self.verbose > 1: # prints zeneration 0 best eval
print(f"Generation {self.gen_count} Best eval: {np.round(self.best_eval, 3)}, budget: {self.curr_budget}/{self.budget}")
while self.curr_budget < self.budget:
# check offspring population size to match maximum budget
self.population_size_control()
# Recombination: creates new offspring
if self.recombination is not None:
self.recombination(self.parents, self.offspring)
# Mutation: mutate offspring population
self.mutation(self.offspring)
# Evaluation: evaluate offspring population
self.evaluation(self.offspring)
# Selection: select the parents for the next geneation
self.selection(self.parents, self.offspring, self.minimize)
# Update control variables, e.g. budget and best individual
self.update_control_vars()
if self.verbose > 0: # prints once per run
print(f"Best eval: {self.best_eval}")
return self.best_indiv, np.array(self.all_best_evals)
def population_size_control(self):
""" Check offspring population size to match maximum budget
"""
if (self.budget - self.curr_budget) / self.offspring_size < 1:
new_offspring_size = self.budget - self.curr_budget
self.offspring.pop_size = new_offspring_size
self.offspring.individuals = self.offspring.individuals[:new_offspring_size]
if self.offspring.mut_params is not None:
self.offspring.mut_params = self.offspring.mut_params[:new_offspring_size]
def update_control_vars(self):
""" Updates all control variables
"""
# Update the best individual
# best individual is in the first position due to selection
curr_best_eval, _ = self.parents.best_fitness(minimize=self.minimize)
self.all_best_evals.append(curr_best_eval)
# increment budget and generation counter
self.curr_budget += self.offspring.pop_size
self.gen_count += 1
# reset sigmas if patience has been defined
if self.patience is not None and self.curr_patience >= self.patience:
if self.verbose > 1:
print(f"~~ Reinitializing sigmas for generation {self.gen_count}. ~~")
self.parents.mut_params_init()
self.curr_patience = 0
if (self.minimize and curr_best_eval < self.best_eval) \
or (not self.minimize and curr_best_eval > self.best_eval): # min or max new best conditions
self.best_indiv = self.parents.individuals[0]
self.best_eval = curr_best_eval
# increment number of successful generations
self.better_generations += 1
# reset patience since we found a new best
self.curr_patience = 0
# debug print
if self.verbose > 1: # prints every time the algorithm finds a new best
print(f"Generation {self.gen_count} Best eval: {np.round(self.best_eval, 3)}, budget: {self.curr_budget}/{self.budget}")
else: # new best not found, increment current patience counter
if self.verbose > 1:
print(f"Generation {self.gen_count}, no new best found. Budget: {self.curr_budget}/{self.budget}")
self.curr_patience += 1 | PypiClean |
/OctoBot_Launcher-2.0.4-py3-none-any.whl/launcher/tools/environment.py | import logging
import os
import subprocess
import urllib.request
from launcher import CONFIG_FILE, OCTOBOT_GITHUB_REPOSITORY, \
GITHUB_RAW_CONTENT_URL, OCTOBOT_REFERENCE_BRANCH, DEFAULT_CONFIG_FILE, LOGGING_CONFIG_FILE, \
CONFIG_DEFAULT_EVALUATOR_FILE, CONFIG_DEFAULT_TRADING_FILE, OCTOBOT_NAME, LINUX_OS_NAME, MAC_OS_NAME, \
TENTACLES_PATH, inc_progress, FORCE_BINARY, CONFIG_FILE_SCHEMA_WITH_PATH, STRATEGY_OPTIMIZER_DATA_FOLDER
from launcher.tools import executor
from launcher.tools.github import GithubOctoBot
from launcher.tools.version import OctoBotVersion
FOLDERS_TO_CREATE = ["logs", "backtesting/collector/data", STRATEGY_OPTIMIZER_DATA_FOLDER]
STRATEGY_OPTIMIZER_DATA = [
"binance_ADA_BTC_20180722_223335.data",
"binance_BTC_USDT_20180428_121156.data",
"binance_ETH_USDT_20180716_131148.data",
"binance_ICX_BTC_20180716_131148.data",
"binance_NEO_BTC_20180716_131148.data",
"binance_ONT_BTC_20180722_230900.data",
"binance_POWR_BTC_20180722_234855.data",
"binance_VEN_BTC_20180716_131148.data",
"binance_XLM_BTC_20180722_234305.data",
"binance_XRB_BTC_20180716_131148.data",
"bittrex_ADA_BTC_20180722_223357.data",
"bittrex_ETC_BTC_20180726_210341.data",
"bittrex_NEO_BTC_20180722_195942.data",
"bittrex_WAX_BTC_20180726_205032.data",
"bittrex_XRP_BTC_20180726_210927.data",
"bittrex_XVG_BTC_20180726_211225.data"
]
INSTALL_DOWNLOAD = [
(
f"{GITHUB_RAW_CONTENT_URL}/cjhutto/vaderSentiment/master/vaderSentiment/emoji_utf8_lexicon.txt",
"vaderSentiment/emoji_utf8_lexicon.txt"),
(
f"{GITHUB_RAW_CONTENT_URL}/cjhutto/vaderSentiment/master/vaderSentiment/vader_lexicon.txt",
"vaderSentiment/vader_lexicon.txt"),
]
OCTOBOT_REPOSITORY_FILES_ROOT = f"{GITHUB_RAW_CONTENT_URL}/{OCTOBOT_GITHUB_REPOSITORY}/{OCTOBOT_REFERENCE_BRANCH}"
INSTALL_DOWNLOAD += [(f"{OCTOBOT_REPOSITORY_FILES_ROOT}/{STRATEGY_OPTIMIZER_DATA_FOLDER}/{data_file}",
f"{STRATEGY_OPTIMIZER_DATA_FOLDER}/{data_file}")
for data_file in STRATEGY_OPTIMIZER_DATA]
FILES_TO_DOWNLOAD = [
(
f"{OCTOBOT_REPOSITORY_FILES_ROOT}/{DEFAULT_CONFIG_FILE}", CONFIG_FILE
),
(
f"{OCTOBOT_REPOSITORY_FILES_ROOT}/{DEFAULT_CONFIG_FILE}", DEFAULT_CONFIG_FILE
),
(
f"{OCTOBOT_REPOSITORY_FILES_ROOT}/{CONFIG_FILE_SCHEMA_WITH_PATH}", CONFIG_FILE_SCHEMA_WITH_PATH
),
(
f"{OCTOBOT_REPOSITORY_FILES_ROOT}/{CONFIG_DEFAULT_EVALUATOR_FILE}", CONFIG_DEFAULT_EVALUATOR_FILE
),
(
f"{OCTOBOT_REPOSITORY_FILES_ROOT}/{CONFIG_DEFAULT_TRADING_FILE}", CONFIG_DEFAULT_TRADING_FILE
),
(
f"{OCTOBOT_REPOSITORY_FILES_ROOT}/{LOGGING_CONFIG_FILE}", LOGGING_CONFIG_FILE
)
]
LIB_FILES_DOWNLOAD_PROGRESS_SIZE = 5
CREATE_FOLDERS_PROGRESS_SIZE = 5
def create_environment():
inc_progress(0, to_min=True)
logging.info(f"{OCTOBOT_NAME} is checking your environment...")
inc_progress(1)
ensure_file_environment(INSTALL_DOWNLOAD)
inc_progress(LIB_FILES_DOWNLOAD_PROGRESS_SIZE - 1)
inc_progress(CREATE_FOLDERS_PROGRESS_SIZE)
logging.info(f"Your {OCTOBOT_NAME} environment is ready !")
def install_bot(force_package=False):
create_environment()
binary_path = GithubOctoBot().update_binary(OctoBotVersion(), force_package=force_package,
force_binary=FORCE_BINARY)
# give binary execution rights if necessary
if binary_path:
binary_execution_rights(binary_path)
# if update tentacles
if binary_path:
executable_path = OctoBotVersion().get_local_binary(force_binary=FORCE_BINARY)
update_tentacles(executable_path, force_install=True)
else:
logging.error(f"No {OCTOBOT_NAME} found to update tentacles.")
def _ensure_directory(file_path):
directory = os.path.dirname(file_path)
if not os.path.exists(directory) and directory:
os.makedirs(directory)
def ensure_minimum_environment():
try:
for folder in FOLDERS_TO_CREATE:
if not os.path.exists(folder) and folder:
os.makedirs(folder)
ensure_file_environment(FILES_TO_DOWNLOAD)
except Exception as e:
print(f"Error when creating minimum launcher environment: {e} this should not prevent launcher "
f"from working.")
def ensure_file_environment(file_to_download):
# download files
for file_to_dl in file_to_download:
_ensure_directory(file_to_dl[1])
file_name = file_to_dl[1]
if not os.path.isfile(file_name) and file_name:
urllib.request.urlretrieve(file_to_dl[0], file_name)
def update_tentacles(binary_path, force_install=False):
if binary_path:
# update tentacles if installed
if not force_install and os.path.exists(TENTACLES_PATH):
executor.execute_command_on_current_binary(binary_path, ["-p", "update", "all"])
logging.info(f"Tentacles : all default tentacles have been updated.")
else:
executor.execute_command_on_current_binary(binary_path, ["-p", "install", "all", "force"])
logging.info(f"Tentacles : all default tentacles have been installed.")
def binary_execution_rights(binary_path):
if os.name in [LINUX_OS_NAME, MAC_OS_NAME]:
try:
rights_process = subprocess.Popen(["chmod", "+x", binary_path])
except Exception as e:
logging.error(f"Failed to give execution rights to {binary_path} : {e}")
rights_process = None
if not rights_process:
# show message if user has to type the command
message = f"{OCTOBOT_NAME} binary need execution rights, " \
f"please type in a command line 'sudo chmod +x ./{OCTOBOT_NAME}'"
logging.warning(message)
# if self.launcher_app:
# self.launcher_app.show_alert(f"{message} and then press OK", bitmap=WARNING) | PypiClean |
/MDAKit-1.0.0-py3-none-any.whl/MDKit/tools/PCA/md2pc.py |
import os
import sys
import prody
import argparse
import numpy as np
import pandas as pd
from multiprocessing import Pool
def assign_pcs(args):
fn, topf, eda, pcs, sel, outf = args
if fn.endswith("pdb"):
pdb = prody.parsePDB(fn)
pdb = pdb.select(sel).copy()
ensemble = prody.Ensemble('A single pdb file ensemble')
ensemble.setCoords( pdb.getCoords() )
ensemble.addCoordset( pdb.getCoordsets() )
ensemble.iterpose()
PCs = prody.calcProjection(ensemble, eda[pcs])
print(PCs)
return
elif fn.endswith(".dcd"):
structure = prody.parsePDB(topf)
str_sel = structure.select(sel)
#dcd = prody.DCDFile(fn)
dcd = prody.Trajectory(fn)
dcd.link(structure)
dcd.setCoords(structure)
dcd.setAtoms(str_sel)
PCs = prody.calcProjection(dcd, eda[pcs])
if outf is not None:
header = " ".join([ "PC%d"%(i+1) for i in pcs ])
np.savetxt(outf, PCs, fmt="%.4f", header=header, comments="")
else:
print("Unsupport file type: %s"%fn)
return None
return PCs
def parse_args():
parser = argparse.ArgumentParser(description="Assign the MD trajectory or pdb file to PCs.")
parser.add_argument('-i', dest="inp", help="A single dcd or pdb file to assign or a floder contains dcd files.", required=True)
parser.add_argument('-t', dest="top", help="Top file for the dcd trajectory file.")
parser.add_argument('-e', dest="eda", help="The eda pickl file.", required=True)
parser.add_argument('-s', dest="sel", help="The select string for protein.", required=True)
parser.add_argument('-p', dest="pcs", type=int, nargs='+', help="The pcs to assign start with 0, default: all", default=None)
parser.add_argument('-o', dest="out", help="The output file path. default=./temp", default="temp")
parser.add_argument("-T", help="Number of thread to fun this job. default:1", type=int, default=1)
args = parser.parse_args()
return args.inp, args.top, args.eda, args.sel, args.pcs, args.out, args.T
if __name__ == "__main__":
inpf, topf, edaf, sel, pcs, outf, T = parse_args()
eda = pd.read_pickle(edaf)
if pcs is None:
pcs = np.arange(eda.numModes())
if os.path.isfile(inpf):
assign_pcs((inpf, topf, eda, pcs, sel, os.path.split(inpf)[-1]+".PC"))
else:
if not os.path.exists(outf):
os.mkdir(outf)
args = [ ( os.path.join(inpf, dcdf), topf, eda, pcs, sel, os.path.join(outf, dcdf+".PC")) for dcdf in os.listdir(inpf) ]
pool = Pool(T)
PCs = pool.map(assign_pcs, args)
pd.to_pickle(PCs, os.path.join(outf, "PCs-all.pickl")) | PypiClean |
/Flask-AppBuilder-red-2.1.13.tar.gz/Flask-AppBuilder-red-2.1.13/flask_appbuilder/models/sqla/interface.py | import logging
import sys
import sqlalchemy as sa
from sqlalchemy import func
from sqlalchemy.exc import IntegrityError
from sqlalchemy.orm import Load
from sqlalchemy.orm.descriptor_props import SynonymProperty
from sqlalchemy_utils.types.uuid import UUIDType
from . import filters
from ..base import BaseInterface
from ..group import GroupByCol, GroupByDateMonth, GroupByDateYear
from ..mixins import FileColumn, ImageColumn
from ..._compat import as_unicode
from ...const import (
LOGMSG_ERR_DBI_ADD_GENERIC,
LOGMSG_ERR_DBI_DEL_GENERIC,
LOGMSG_ERR_DBI_EDIT_GENERIC,
LOGMSG_WAR_DBI_ADD_INTEGRITY,
LOGMSG_WAR_DBI_DEL_INTEGRITY,
LOGMSG_WAR_DBI_EDIT_INTEGRITY
)
from ...filemanager import FileManager, ImageManager
log = logging.getLogger(__name__)
def _include_filters(obj):
for key in filters.__all__:
if not hasattr(obj, key):
setattr(obj, key, getattr(filters, key))
def _is_sqla_type(obj, sa_type):
return (
isinstance(obj, sa_type) or
isinstance(obj, sa.types.TypeDecorator) and
isinstance(obj.impl, sa_type)
)
class SQLAInterface(BaseInterface):
"""
SQLAModel
Implements SQLA support methods for views
"""
session = None
filter_converter_class = filters.SQLAFilterConverter
def __init__(self, obj, session=None):
_include_filters(self)
self.list_columns = dict()
self.list_properties = dict()
self.session = session
# Collect all SQLA columns and properties
for prop in sa.orm.class_mapper(obj).iterate_properties:
if type(prop) != SynonymProperty:
self.list_properties[prop.key] = prop
for col_name in obj.__mapper__.columns.keys():
if col_name in self.list_properties:
self.list_columns[col_name] = obj.__mapper__.columns[col_name]
super(SQLAInterface, self).__init__(obj)
@property
def model_name(self):
"""
Returns the models class name
useful for auto title on views
"""
return self.obj.__name__
@staticmethod
def is_model_already_joined(query, model):
return model in [mapper.class_ for mapper in query._join_entities]
def _get_base_query(
self, query=None, filters=None, order_column="", order_direction=""
):
if filters:
query = filters.apply_all(query)
if order_column != "":
# if Model has custom decorator **renders('<COL_NAME>')**
# this decorator will add a property to the method named *_col_name*
if hasattr(self.obj, order_column):
if hasattr(getattr(self.obj, order_column), "_col_name"):
order_column = getattr(self._get_attr(order_column), "_col_name")
if order_direction == "asc":
query = query.order_by(self._get_attr(order_column).asc())
else:
query = query.order_by(self._get_attr(order_column).desc())
return query
def _query_join_dotted_column(self, query, column) -> (object, tuple):
relation_tuple = tuple()
if len(column.split('.')) >= 2:
for join_relation in column.split('.')[:-1]:
relation_tuple = self.get_related_model_and_join(join_relation)
model_relation, relation_join = relation_tuple
if not self.is_model_already_joined(query, model_relation):
query = query.join(
model_relation,
relation_join,
isouter=True
)
return query, relation_tuple
def _query_select_options(self, query, select_columns=None):
"""
Add select load options to query. The goal
is to only SQL select what is requested
:param query: SQLAlchemy Query obj
:param select_columns: (list) of columns
:return: SQLAlchemy Query obj
"""
if select_columns:
_load_options = list()
for column in select_columns:
query, relation_tuple = self._query_join_dotted_column(
query,
column,
)
model_relation, relation_join = relation_tuple or (None, None)
if model_relation:
_load_options.append(
Load(model_relation).load_only(column.split(".")[1])
)
else:
# is a custom property method field?
if hasattr(getattr(self.obj, column), "fget"):
pass
# is not a relation and not a function?
elif not self.is_relation(column) and not hasattr(
getattr(self.obj, column), "__call__"
):
_load_options.append(Load(self.obj).load_only(column))
else:
_load_options.append(Load(self.obj))
query = query.options(*tuple(_load_options))
return query
def query(
self,
filters=None,
order_column="",
order_direction="",
page=None,
page_size=None,
select_columns=None,
):
"""
QUERY
:param filters:
dict with filters {<col_name>:<value,...}
:param order_column:
name of the column to order
:param order_direction:
the direction to order <'asc'|'desc'>
:param page:
the current page
:param page_size:
the current page size
"""
query = self.session.query(self.obj)
query, relation_tuple = self._query_join_dotted_column(query, order_column)
query = self._query_select_options(query, select_columns)
query_count = self.session.query(func.count('*')).select_from(self.obj)
query_count = self._get_base_query(query=query_count, filters=filters)
query = self._get_base_query(
query=query,
filters=filters,
order_column=order_column,
order_direction=order_direction,
)
count = query_count.scalar()
if page:
query = query.offset(page * page_size)
if page_size:
query = query.limit(page_size)
return count, query.all()
def query_simple_group(
self, group_by="", aggregate_func=None, aggregate_col=None, filters=None
):
query = self.session.query(self.obj)
query = self._get_base_query(query=query, filters=filters)
query_result = query.all()
group = GroupByCol(group_by, "Group by")
return group.apply(query_result)
def query_month_group(self, group_by="", filters=None):
query = self.session.query(self.obj)
query = self._get_base_query(query=query, filters=filters)
query_result = query.all()
group = GroupByDateMonth(group_by, "Group by Month")
return group.apply(query_result)
def query_year_group(self, group_by="", filters=None):
query = self.session.query(self.obj)
query = self._get_base_query(query=query, filters=filters)
query_result = query.all()
group_year = GroupByDateYear(group_by, "Group by Year")
return group_year.apply(query_result)
"""
-----------------------------------------
FUNCTIONS for Testing TYPES
-----------------------------------------
"""
def is_image(self, col_name):
try:
return isinstance(self.list_columns[col_name].type, ImageColumn)
except Exception:
return False
def is_file(self, col_name):
try:
return isinstance(self.list_columns[col_name].type, FileColumn)
except Exception:
return False
def is_string(self, col_name):
try:
return _is_sqla_type(self.list_columns[col_name].type, sa.types.String) or \
self.list_columns[col_name].type.__class__ == UUIDType
except Exception:
return False
def is_text(self, col_name):
try:
return _is_sqla_type(self.list_columns[col_name].type, sa.types.Text)
except Exception:
return False
def is_binary(self, col_name):
try:
return _is_sqla_type(self.list_columns[col_name].type, sa.types.LargeBinary)
except Exception:
return False
def is_integer(self, col_name):
try:
return _is_sqla_type(self.list_columns[col_name].type, sa.types.Integer)
except Exception:
return False
def is_numeric(self, col_name):
try:
return _is_sqla_type(self.list_columns[col_name].type, sa.types.Numeric)
except Exception:
return False
def is_float(self, col_name):
try:
return _is_sqla_type(self.list_columns[col_name].type, sa.types.Float)
except Exception:
return False
def is_boolean(self, col_name):
try:
return _is_sqla_type(self.list_columns[col_name].type, sa.types.Boolean)
except Exception:
return False
def is_date(self, col_name):
try:
return _is_sqla_type(self.list_columns[col_name].type, sa.types.Date)
except Exception:
return False
def is_datetime(self, col_name):
try:
return _is_sqla_type(self.list_columns[col_name].type, sa.types.DateTime)
except Exception:
return False
def is_enum(self, col_name):
try:
return _is_sqla_type(self.list_columns[col_name].type, sa.types.Enum)
except Exception:
return False
def is_relation(self, col_name):
try:
return isinstance(
self.list_properties[col_name], sa.orm.properties.RelationshipProperty
)
except Exception:
return False
def is_relation_many_to_one(self, col_name):
try:
if self.is_relation(col_name):
return self.list_properties[col_name].direction.name == "MANYTOONE"
except Exception:
return False
def is_relation_many_to_many(self, col_name):
try:
if self.is_relation(col_name):
return self.list_properties[col_name].direction.name == "MANYTOMANY"
except Exception:
return False
def is_relation_one_to_one(self, col_name):
try:
if self.is_relation(col_name):
return self.list_properties[col_name].direction.name == "ONETOONE"
except Exception:
return False
def is_relation_one_to_many(self, col_name):
try:
if self.is_relation(col_name):
return self.list_properties[col_name].direction.name == "ONETOMANY"
except Exception:
return False
def is_nullable(self, col_name):
if self.is_relation_many_to_one(col_name):
col = self.get_relation_fk(col_name)
return col.nullable
try:
return self.list_columns[col_name].nullable
except Exception:
return False
def is_unique(self, col_name):
try:
return self.list_columns[col_name].unique is True
except Exception:
return False
def is_pk(self, col_name):
try:
return self.list_columns[col_name].primary_key
except Exception:
return False
def is_pk_composite(self):
return len(self.obj.__mapper__.primary_key) > 1
def is_fk(self, col_name):
try:
return self.list_columns[col_name].foreign_keys
except Exception:
return False
def get_max_length(self, col_name):
try:
if self.is_enum(col_name):
return -1
col = self.list_columns[col_name]
if col.type.length:
return col.type.length
else:
return -1
except Exception:
return -1
"""
-------------------------------
FUNCTIONS FOR CRUD OPERATIONS
-------------------------------
"""
def add(self, item, raise_exception=False):
try:
self.session.add(item)
self.session.commit()
self.message = (as_unicode(self.add_row_message), "success")
return True
except IntegrityError as e:
self.message = (as_unicode(self.add_integrity_error_message), "warning")
log.warning(LOGMSG_WAR_DBI_ADD_INTEGRITY.format(str(e)))
self.session.rollback()
if raise_exception:
raise e
return False
except Exception as e:
self.message = (
as_unicode(self.general_error_message + " " + str(sys.exc_info()[0])),
"danger",
)
log.exception(LOGMSG_ERR_DBI_ADD_GENERIC.format(str(e)))
self.session.rollback()
if raise_exception:
raise e
return False
def edit(self, item, raise_exception=False):
try:
self.session.merge(item)
self.session.commit()
self.message = (as_unicode(self.edit_row_message), "success")
return True
except IntegrityError as e:
self.message = (as_unicode(self.edit_integrity_error_message), "warning")
log.warning(LOGMSG_WAR_DBI_EDIT_INTEGRITY.format(str(e)))
self.session.rollback()
if raise_exception:
raise e
return False
except Exception as e:
self.message = (
as_unicode(self.general_error_message + " " + str(sys.exc_info()[0])),
"danger",
)
log.exception(LOGMSG_ERR_DBI_EDIT_GENERIC.format(str(e)))
self.session.rollback()
if raise_exception:
raise e
return False
def delete(self, item, raise_exception=False):
try:
self._delete_files(item)
self.session.delete(item)
self.session.commit()
self.message = (as_unicode(self.delete_row_message), "success")
return True
except IntegrityError as e:
self.message = (as_unicode(self.delete_integrity_error_message), "warning")
log.warning(LOGMSG_WAR_DBI_DEL_INTEGRITY.format(str(e)))
self.session.rollback()
if raise_exception:
raise e
return False
except Exception as e:
self.message = (
as_unicode(self.general_error_message + " " + str(sys.exc_info()[0])),
"danger",
)
log.exception(LOGMSG_ERR_DBI_DEL_GENERIC.format(str(e)))
self.session.rollback()
if raise_exception:
raise e
return False
def delete_all(self, items):
try:
for item in items:
self._delete_files(item)
self.session.delete(item)
self.session.commit()
self.message = (as_unicode(self.delete_row_message), "success")
return True
except IntegrityError as e:
self.message = (as_unicode(self.delete_integrity_error_message), "warning")
log.warning(LOGMSG_WAR_DBI_DEL_INTEGRITY.format(str(e)))
self.session.rollback()
return False
except Exception as e:
self.message = (
as_unicode(self.general_error_message + " " + str(sys.exc_info()[0])),
"danger",
)
log.exception(LOGMSG_ERR_DBI_DEL_GENERIC.format(str(e)))
self.session.rollback()
return False
"""
-----------------------
FILE HANDLING METHODS
-----------------------
"""
def _add_files(self, this_request, item):
fm = FileManager()
im = ImageManager()
for file_col in this_request.files:
if self.is_file(file_col):
fm.save_file(this_request.files[file_col], getattr(item, file_col))
for file_col in this_request.files:
if self.is_image(file_col):
im.save_file(this_request.files[file_col], getattr(item, file_col))
def _delete_files(self, item):
for file_col in self.get_file_column_list():
if self.is_file(file_col):
if getattr(item, file_col):
fm = FileManager()
fm.delete_file(getattr(item, file_col))
for file_col in self.get_image_column_list():
if self.is_image(file_col):
if getattr(item, file_col):
im = ImageManager()
im.delete_file(getattr(item, file_col))
"""
------------------------------
FUNCTIONS FOR RELATED MODELS
------------------------------
"""
def get_col_default(self, col_name):
default = getattr(self.list_columns[col_name], "default", None)
if default is not None:
value = getattr(default, "arg", None)
if value is not None:
if getattr(default, "is_callable", False):
return lambda: default.arg(None)
else:
if not getattr(default, "is_scalar", True):
return None
return value
def get_related_model(self, col_name):
return self.list_properties[col_name].mapper.class_
def get_related_model_and_join(self, col_name):
relation = self.list_properties[col_name]
return relation.mapper.class_, relation.primaryjoin
def query_model_relation(self, col_name):
model = self.get_related_model(col_name)
return self.session.query(model).all()
def get_related_interface(self, col_name):
return self.__class__(self.get_related_model(col_name), self.session)
def get_related_obj(self, col_name, value):
rel_model = self.get_related_model(col_name)
return self.session.query(rel_model).get(value)
def get_related_fks(self, related_views):
return [view.datamodel.get_related_fk(self.obj) for view in related_views]
def get_related_fk(self, model):
for col_name in self.list_properties.keys():
if self.is_relation(col_name):
if model == self.get_related_model(col_name):
return col_name
def get_info(self, col_name):
if col_name in self.list_properties:
return self.list_properties[col_name].info
return {}
"""
-------------
GET METHODS
-------------
"""
def get_columns_list(self):
"""
Returns all model's columns on SQLA properties
"""
return list(self.list_properties.keys())
def get_user_columns_list(self):
"""
Returns all model's columns except pk or fk
"""
ret_lst = list()
for col_name in self.get_columns_list():
if (not self.is_pk(col_name)) and (not self.is_fk(col_name)):
ret_lst.append(col_name)
return ret_lst
# TODO get different solution, more integrated with filters
def get_search_columns_list(self):
ret_lst = list()
for col_name in self.get_columns_list():
if not self.is_relation(col_name):
tmp_prop = self.get_property_first_col(col_name).name
if (
(not self.is_pk(tmp_prop)) and
(not self.is_fk(tmp_prop)) and
(not self.is_image(col_name)) and
(not self.is_file(col_name)) and
(not self.is_boolean(col_name))
):
ret_lst.append(col_name)
else:
ret_lst.append(col_name)
return ret_lst
def get_order_columns_list(self, list_columns=None):
"""
Returns the columns that can be ordered
:param list_columns: optional list of columns name, if provided will
use this list only.
"""
ret_lst = list()
list_columns = list_columns or self.get_columns_list()
for col_name in list_columns:
if not self.is_relation(col_name):
if hasattr(self.obj, col_name):
if not hasattr(getattr(self.obj, col_name), "__call__") or hasattr(
getattr(self.obj, col_name), "_col_name"
):
ret_lst.append(col_name)
else:
ret_lst.append(col_name)
return ret_lst
def get_file_column_list(self):
return [
i.name
for i in self.obj.__mapper__.columns
if isinstance(i.type, FileColumn)
]
def get_image_column_list(self):
return [
i.name
for i in self.obj.__mapper__.columns
if isinstance(i.type, ImageColumn)
]
def get_property_first_col(self, col_name):
# support for only one col for pk and fk
return self.list_properties[col_name].columns[0]
def get_relation_fk(self, col_name):
# support for only one col for pk and fk
return list(self.list_properties[col_name].local_columns)[0]
def get(self, id, filters=None):
if filters:
query = self.session.query(self.obj)
_filters = filters.copy()
pk = self.get_pk_name()
if self.is_pk_composite():
for _pk, _id in zip(pk, id):
_filters.add_filter(_pk, self.FilterEqual, _id)
else:
_filters.add_filter(pk, self.FilterEqual, id)
query = self._get_base_query(query=query, filters=_filters)
return query.first()
return self.session.query(self.obj).get(id)
def get_pk_name(self):
pk = [pk.name for pk in self.obj.__mapper__.primary_key]
if pk:
return pk if self.is_pk_composite() else pk[0]
"""
For Retro-Compatibility
"""
SQLModel = SQLAInterface | PypiClean |
Subsets and Splits