id
stringlengths 1
265
| text
stringlengths 6
5.19M
| dataset_id
stringclasses 7
values |
---|---|---|
/Djaloha-0.4.2.tar.gz/Djaloha-0.4.2/djaloha/static/aloha.0.20/plugins/extra/googletranslate/README.md | Alhoa-Editor GoogleTranslate Plugin
===================================
This plugin will allow you to translate the contents you're editing by using the Google Translate API v2.
Usage
=====
Just include the plugin.js file in your page, like any other Aloha plugin. Highlight some text, switch to the "Translate" tab, and select a language you want to translate to.
At this point only English, German and French are supported, which is just because I'm too lazy to add all the language icons and styles Google Translate supports.
Please configure your own API key, as you will most likely hit Google's Translation API limits if you stick with the one I provide with this plugin:
GENTICS.Aloha.settings = {
"plugins" : {
"com.gentics.aloha.plugins.GoogleTranslate": {
apiKey : "YOUR-API-KEY-HERE"
}
}
}
Known Issues
============
* Any translation which returns special chars is broken, as the characters are inserted as symbols. This will result in broken text entries, eg. when translating english to french | PypiClean |
/Discord%20Anti-Spam-1.8.1.tar.gz/Discord Anti-Spam-1.8.1/antispam/libs/dpy.py | from __future__ import annotations
import datetime
import logging
from typing import Dict, Optional, Union, List
from unittest.mock import AsyncMock
from antispam.deprecation import mark_deprecated
from antispam.libs.shared import Base, SubstituteArgs
try:
import discord
except ModuleNotFoundError: # pragma: no cover
import disnake as discord
from antispam import (
InvalidMessage,
MissingGuildPermissions,
PropagateFailure,
UnsupportedAction,
)
from antispam.abc import Lib
from antispam.dataclasses import Guild, Member, Message
from antispam.dataclasses.propagate_data import PropagateData
log = logging.getLogger(__name__)
class DPY(Base, Lib):
def __init__(self, handler):
self.handler = handler
self.bot = self.handler.bot
def get_expected_message_type(self):
return discord.Message
def get_author_id_from_message(self, message) -> int:
return message.author.id
def get_author_name_from_message(self, message) -> str:
return message.author.name
def get_bot_id_from_message(self, message) -> int:
return self.handler.bot.user.id
def get_channel_id_from_message(self, message) -> int:
return message.channel.id
def get_message_id_from_message(self, message) -> int:
return message.id
def get_guild_id_from_message(self, message) -> Optional[int]:
if message.guild:
return message.guild.id
return None
def get_role_ids_for_message_author(self, message) -> List[int]:
try:
return [role.id for role in message.author.roles]
except AttributeError:
return []
def check_if_message_is_from_a_bot(self, message) -> bool:
return message.author.bot
async def does_author_have_kick_and_ban_perms(self, message) -> bool:
perms = message.guild.me.guild_permissions
return perms.kick_members and perms.ban_members
def get_file(self, path: str): # pragma: no cover
return discord.File(path)
async def lib_embed_as_dict(self, embed: discord.Embed) -> Dict:
return embed.to_dict()
async def get_channel_from_message(
self,
message: discord.Message,
): # pragma: no cover
return message.channel
async def get_message_mentions(self, message: discord.Message): # pragma: no cover
return message.mentions
async def get_member_from_message(self, message):
return message.author
async def get_channel_by_id(self, channel_id: int): # pragma: no cover
channel = self.bot.get_channel(channel_id)
if not channel:
channel = await self.bot.fetch_channel(channel_id)
return channel
async def dict_to_lib_embed(self, data: Dict) -> discord.Embed:
return discord.Embed.from_dict(data)
async def get_guild_id(self, message: discord.Message) -> int:
return message.guild.id
async def get_channel_id(self, message: discord.Message) -> int:
return message.channel.id
async def get_substitute_args(
self, message: discord.Message
) -> SubstituteArgs: # pragma: no cover
version = int(discord.__version__.split(".")[0])
if version >= 2:
member_avatar = str(message.author.display_avatar)
bot_avatar = str(message.guild.me.display_avatar)
guild_icon = message.guild.icon
guild_icon = guild_icon.url if guild_icon else ""
else:
member_avatar = message.author.avatar_url # type: ignore
guild_icon = message.guild.icon_url # type: ignore
bot_avatar = message.guild.me.avatar_url # type: ignore
return SubstituteArgs(
bot_id=message.guild.me.id,
bot_name=message.guild.me.name,
bot_avatar=bot_avatar,
guild_id=message.guild.id,
guild_icon=guild_icon,
guild_name=message.guild.name,
member_id=message.author.id,
member_name=message.author.display_name,
member_avatar=member_avatar,
)
async def create_message(self, message: discord.Message) -> Message:
log.debug(
"Attempting to create a new message for author(id=%s) in Guild(%s)",
message.author.id,
message.guild.id,
)
if message.is_system():
raise InvalidMessage(
"Message is a system one, we don't check against those."
)
content = ""
if message.stickers:
# 'sticker' urls should be unique..
all_stickers = "|".join(s.url for s in message.stickers)
content += all_stickers
elif not bool(message.content and message.content.strip()):
if not message.embeds and not message.attachments:
# System message? Like on join trip these
raise InvalidMessage("We don't check against system messages")
if not message.embeds:
# We don't check against attachments
raise InvalidMessage("We don't check against attachments")
for embed in message.embeds:
if not isinstance(embed, discord.Embed):
raise InvalidMessage("embed was not of instance discord.Embed")
if embed.type.lower() != "rich":
raise InvalidMessage("Only rich embeds are supported")
content += await self.embed_to_string(embed)
else:
content += message.clean_content
if self.handler.options.delete_zero_width_chars:
content = (
content.replace("u200B", "")
.replace("u200C", "")
.replace("u200D", "")
.replace("u200E", "")
.replace("u200F", "")
.replace("uFEFF", "")
)
return Message(
id=message.id,
channel_id=message.channel.id,
guild_id=message.guild.id,
author_id=message.author.id,
content=content,
)
async def send_guild_log(
self,
guild,
message: Union[str, discord.Embed],
delete_after_time,
original_channel: Union[discord.abc.GuildChannel, discord.abc.PrivateChannel],
file=None,
) -> None: # pragma: no cover
try:
if not guild.log_channel_id:
log.debug(
"Guild(id=%s) has no log channel set, not sending message.",
guild.id,
)
return
channel = guild.log_channel_id
channel = self.bot.get_channel(channel)
if not channel:
channel = await self.bot.fetch_channel(channel)
if isinstance(message, str):
await channel.send(message, delete_after=delete_after_time, file=file)
else:
await channel.send(
embed=message, file=file, delete_after=delete_after_time
)
log.debug("Sent message to log channel in Guild(id=%s)", guild.id)
except discord.HTTPException:
log.error(
"Failed to send log message in Guild(id=%s). HTTPException", guild.id
)
async def punish_member(
self,
original_message: discord.Message,
member: Member,
internal_guild: Guild,
user_message,
guild_message,
is_kick: bool,
user_delete_after: int = None,
channel_delete_after: int = None,
) -> bool: # pragma: no cover
guild = original_message.guild
author = original_message.author
# Check we have perms to punish
perms = guild.me.guild_permissions
if not perms.kick_members and is_kick:
member.internal_is_in_guild = True
member.kick_count -= 1
raise MissingGuildPermissions(
f"I need kick perms to punish someone in {guild.name}"
)
elif not perms.ban_members and not is_kick:
member.internal_is_in_guild = True
member.kick_count -= 1
raise MissingGuildPermissions(
f"I need ban perms to punish someone in {guild.name}"
)
# We also check they don't own the guild, since ya know...
elif guild.owner_id == member.id:
member.internal_is_in_guild = True
member.kick_count -= 1
await self.send_guild_log(
guild=internal_guild,
message=f"I am failing to punish {original_message.author.display_name} because they own this guild.",
delete_after_time=channel_delete_after,
original_channel=original_message.channel,
)
raise MissingGuildPermissions(
f"I cannot punish {author.display_name}({author.id}) "
f"because they own this guild. ({guild.name})"
)
# Ensure we can actually punish the user, for this
# we just check our top role is higher then them
elif guild.me.top_role.position < author.top_role.position:
log.warning(
"I might not be able to punish %s(%s) in Guild: %s(%s) "
"because they are higher then me, which means I could lack the ability to kick/ban them.",
author.display_name,
member.id,
guild.name,
guild.id,
)
sent_message: Optional[discord.Message] = None
try:
if isinstance(user_message, discord.Embed):
sent_message = await author.send(
embed=user_message, delete_after=user_delete_after
)
else:
sent_message = await author.send(
user_message, delete_after=user_delete_after
)
except discord.HTTPException:
await self.send_guild_log(
guild=internal_guild,
message=f"Sending a message to {author.mention} about their {'kick' if is_kick else 'ban'} failed.",
delete_after_time=channel_delete_after,
original_channel=original_message.channel,
)
log.warning(
f"Failed to message Member(id=%s) about {'kick' if is_kick else 'ban'}",
author.id,
)
# Even if we can't tell them they are being punished
# We still need to punish them, so try that
_success = True
try:
if is_kick:
await guild.kick(
member, reason="Automated punishment from DPY Anti-Spam."
)
log.info("Kicked Member(id=%s)", member.id)
else:
await guild.ban(
member, reason="Automated punishment from DPY Anti-Spam."
)
log.info("Banned Member(id=%s)", member.id)
except discord.Forbidden as e:
# In theory we send the failed punishment method
# here, although we check first so I think its fine
# to remove it from this part
raise e from None
except discord.HTTPException:
_success = False
member.internal_is_in_guild = True
member.kick_count -= 1
await self.send_guild_log(
guild=internal_guild,
message=f"An error occurred trying to {'kick' if is_kick else 'ban'}: <@{member.id}>",
delete_after_time=channel_delete_after,
original_channel=original_message.channel,
)
log.warning(
"An error occurred trying to %s: Member(id=%s)",
{"kick" if is_kick else "ban"},
member.id,
)
if sent_message is not None:
if is_kick:
user_failed_message = await self.transform_message(
self.handler.options.member_failed_kick_message,
original_message,
member.warn_count,
member.kick_count,
)
else:
user_failed_message = await self.transform_message(
self.handler.options.member_failed_ban_message,
original_message,
member.warn_count,
member.kick_count,
)
await self.send_guild_log(
internal_guild,
user_failed_message,
channel_delete_after,
original_message.channel,
)
await sent_message.delete()
else:
await self.send_guild_log(
guild=internal_guild,
message=guild_message,
delete_after_time=channel_delete_after,
original_channel=original_message.channel,
)
member.internal_is_in_guild = True
await self.handler.cache.set_member(member)
return _success
async def delete_member_messages(self, member: Member) -> None: # pragma: no cover
log.debug(
"Attempting to delete all duplicate messages for Member(id=%s) in Guild(%s)",
member.id,
member.guild_id,
)
bot = self.bot
channels = {}
for message in member.messages:
if message.is_duplicate:
# cache channel for further fetches
if message.channel_id not in channels:
channel = bot.get_channel(message.channel_id)
if not channel:
channel = await bot.fetch_channel(message.channel_id)
channels[message.channel_id] = channel
else:
channel = channels[message.channel_id]
try:
actual_message = await channel.fetch_message(message.id)
await self.delete_message(actual_message)
except discord.NotFound:
continue
async def delete_message(
self, message: discord.Message
) -> None: # pragma: no cover
try:
await message.delete()
log.debug("Deleted message %s", message.id)
except discord.HTTPException:
# Failed to delete message
log.warning(
"Failed to delete message %s in Guild(id=%s). HTTPException",
message.id,
message.guild.id,
)
async def send_message_to_(
self,
target: discord.abc.Messageable,
message: Union[str, discord.Embed],
mention: str,
delete_after_time: Optional[int] = None,
) -> None: # pragma: no cover
if isinstance(message, discord.Embed):
content = None
if self.handler.options.mention_on_embed:
content = mention
await target.send(
content,
embed=message,
delete_after=delete_after_time,
)
else:
await target.send(
message,
delete_after=delete_after_time,
)
async def timeout_member(
self, member, original_message, until: datetime.timedelta
) -> None:
guild = original_message.guild
perms = guild.me.guild_permissions
if not perms.moderate_members:
raise MissingGuildPermissions(
"moderate_members is required to timeout members.\n"
f"Tried timing out Member(id={member.id}) in Guild(id={member.guild.id})"
)
await member.timeout(
until, reason="Automated timeout from Discord-Anti-Spam"
)
async def is_member_currently_timed_out(self, member) -> bool:
return member.timed_out_until is not None
def is_dm(self, message) -> bool:
return not bool(message.guild) | PypiClean |
/CASTLE-tools-1.1.tar.gz/CASTLE-tools-1.1/CASTLE/loc_prediction.py | import os
import numpy as np
import scanpy as sc
import matplotlib.pyplot as plt
from sklearn.neighbors import NearestNeighbors
from .location.transformation import best_fit_transform, transform
from .location.edge_detection import alpha_shape
from .location.align_fine import fine_alignment
from .utils import MakeLogClass
def loc_predict_z(adata,
atte,
querys,
loc_key,
batch_key,
knowns = None,
num_mnn = 3):
use = adata.obs[[batch_key, loc_key]].drop_duplicates()
use.index = use[batch_key]
use = use[loc_key]
if knowns is None:
knowns = list(set(adata.obs[batch_key].value_counts().index.tolist()) - set(querys))
knowns.sort()
loc_knowns = use.loc[knowns]
preds = []
for query_tmp in querys:
atte_in = atte.loc[query_tmp, knowns]
atte_out = atte.loc[knowns, query_tmp]
neigh_in = atte_in.sort_values(ascending=False)[:num_mnn]
neigh_out = atte_out.sort_values(ascending=False)[:num_mnn]
neigh_index = list(set(neigh_in.index).intersection(set(neigh_out.index)))
pred = (atte_in[neigh_index]*loc_knowns[neigh_index]).sum() / (atte_in[neigh_index]).sum()
preds.append(pred)
nearest_slices = [loc_knowns.index[abs(loc_knowns - preds[i]).argmin()] for i in range(len(preds))]
return preds, nearest_slices
def init_align_with_scale( adata_ref,
adata_query,
emb_key = 'HAN_SE',
num_mnn = 1,
spatial_key1 = 'spatial',
spatial_key2 = None,
use_scale = True,
key_added = 'init_scale',
return_scale = False
):
emb1_tmp = adata_ref.obsm[emb_key]
emb2_tmp = adata_query.obsm[emb_key]
# print(emb1_tmp)
# print(emb2_tmp)
# Search for mutual nearest neighbors of two slices
neigh1 = NearestNeighbors(n_neighbors=num_mnn, metric='cosine')
neigh1.fit(emb2_tmp)
indices1 = neigh1.kneighbors(emb1_tmp, return_distance=False)
neigh2 = NearestNeighbors(n_neighbors=num_mnn, metric='cosine')
neigh2.fit(emb1_tmp)
indices2 = neigh2.kneighbors(emb2_tmp, return_distance=False)
set1 = {(i, indices1[i,j]) for i in range(indices1.shape[0]) for j in range(indices1.shape[1])}
set2 = {(indices2[j,i], j) for j in range(indices2.shape[0]) for i in range(indices2.shape[1])}
pair = set1.intersection(set2)
pair = np.array(list(pair))
if spatial_key2 is None:
spatial_key2 = spatial_key1
B = adata_ref[pair[:,0],:].obsm[spatial_key1].copy()
A = adata_query[pair[:,1],:].obsm[spatial_key2].copy()
scales = np.array([np.sqrt(((B[i] - B[j])**2).sum()) / np.sqrt(((A[i] - A[j])**2).sum()) for i in range(B.shape[0]) for j in range(B.shape[0]) if i!=j])
scale_use = np.median(scales)
A_scaled = A * scale_use
T,_,_ = best_fit_transform(A_scaled, B)
# transform the coordinates of adata2
adata_query.obsm[key_added] = transform(adata_query.obsm[spatial_key2] * scale_use, T)
if key_added != spatial_key1:
adata_ref.obsm[key_added] = adata_ref.obsm[spatial_key1].copy()
if return_scale:
return adata_ref, adata_query, scale_use
return adata_ref, adata_query
def plot_edge( adata_a, adata_b, edge_tmp1, edge_tmp2, query_index,
alpha_query, alpha_ref, spatial_key='spatial',
figsize=(5,5), s_query=10, s_ref=50, result_path='.'):
spatial_tmp1 = adata_a.obsm[spatial_key]
spatial_tmp2 = adata_b.obsm[spatial_key]
if not os.path.exists(result_path + '/loc_pred/edge'):
os.makedirs(result_path + '/loc_pred/edge')
xx,yy = np.median(spatial_tmp1, 0)
plt.figure(figsize=figsize)
plt.scatter(spatial_tmp1[:, 0], spatial_tmp1[:, 1], s = s_ref)
for i, j in edge_tmp1:
plt.plot(spatial_tmp1[[i, j], 0], spatial_tmp1[[i, j], 1], c='#E24A33')
plt.text(xx, yy, f"alpha_ref={alpha_ref}", size=18)
plt.savefig(f'{result_path}/loc_pred/edge/spatial_edge_{str(query_index)}_a.png', bbox_inches='tight')
plt.close()
xx,yy = np.median(spatial_tmp2, 0)
plt.figure(figsize=figsize)
plt.scatter(spatial_tmp2[:, 0], spatial_tmp2[:, 1], s = s_query)
for i, j in edge_tmp2:
plt.plot(spatial_tmp2[[i, j], 0], spatial_tmp2[[i, j], 1], c='#8EBA42')
plt.text(xx, yy, f"alpha_query={alpha_query}", size=18)
plt.savefig(f'{result_path}/loc_pred/edge/spatial_edge_{str(query_index)}_b.png', bbox_inches='tight')
plt.close()
class Loc_Pred(object):
"""
Location prediction of new slices based on known reference, including location prediction and alignment.
They perform location prediction and alignment based on attention and spatial embedding in HAT module.
Parameters
----------
adata
AnnData object of scanpy package including query and reference data
atte
A pandas dataframe with attention as value and slice names as index and columns
batch_key
The key containing slice information in .obs
querys
list of names of slices to predict
Examples
--------
>>> adata = sc.read_h5ad(path_to_anndata)
>>> loc_pred = Loc_Pred(adata, atte, batch_key = 'section_index', querys = ['10X_1'])
>>> loc_pred.pred_z(loc_key = 'stereo_AP', num_mnn = 20)
>>> ladata_query = loc_pred.pred_xy(spatial_key_query = 'spatial',
spatial_key_ref = 'spatial_ccf_2d',
spatial_key_3d = 'spatial_ccf_3d',
emb_key = 'HAN_SE',
)
"""
def __init__(
self,
adata,
atte,
batch_key,
querys,
make_log = True,
result_path = '.'
):
super(Loc_Pred, self).__init__()
self.adata = adata
self.atte = atte
self.batch_key = batch_key
self.querys = querys
self.make_log = make_log
self.result_path = result_path
if self.make_log:
self.makeLog = MakeLogClass(f"{self.result_path}/log_loc_pred.tsv").make
self.makeLog(f"Location prediction")
self.makeLog(f" Slice key: {self.batch_key}")
self.makeLog(f" Query slices: {self.querys}")
def pred_z( self,
loc_key = 'stereo_AP',
knowns = None,
num_mnn = 20,
return_result = True
):
"""
Predict the coordinate position parallel to the slice in the reference, and find the nearest slice with the new one.
Parameters
----------
loc_key
The key containing coordinates of paralle direction in .obs
knowns
List of slice names with known coordinates in paralle direction
num_mnn
Number of neighbor slices in predicting new z
return_result
Whether return the predicted locations and nearset slices for each query slice.
"""
self.preds, self.nearest_slices = loc_predict_z( self.adata,
self.atte,
querys = self.querys,
loc_key = loc_key,
batch_key = self.batch_key,
knowns = knowns,
num_mnn = num_mnn)
if self.make_log:
self.makeLog(f"Location prediction of z")
self.makeLog(f" Location key: {loc_key}")
self.makeLog(f" Number of neighbor slices: {num_mnn}")
if return_result:
return self.preds, self.nearest_slices
def pred_xy( self,
spatial_key_query = 'spatial',
spatial_key_ref = 'spatial_ccf_2d',
spatial_key_init = 'spatial_init',
spatial_key_3d = 'spatial_ccf_3d',
emb_key = 'CASTLE',
num_mnn_init = 1,
return_scale = True,
alpha_query = 1,
alpha_ref = 1,
add_3d = True,
plot_init = True,
figsize = (5,5),
s_query = 10,
s_ref = 50
):
"""
Get the coordinates of the new slice consistent with the reference, including initial alignment and fine alignment.
Parameters
----------
spatial_key_query
The key of 2D coordinates of query datasets in .obsm.
spatial_key_ref
The key of 2D coordinates of reference in .obsm.
spatial_key_init
Added key of initial aligned 2D coordinates in .obsm.
spatial_key_3d
Added key of 3D coordinates consistent with reference in .obsm.
emb_key
The key containing the spatial embedding, default is 'CASTLE'.
"""
adata_querys = []
for i in range(len(self.querys)):
query_slice = self.querys[i]
nearest_slice = self.nearest_slices[i]
adata_a = self.adata[self.adata.obs['section_index'] == nearest_slice].copy()
adata_b = self.adata[self.adata.obs['section_index'] == query_slice].copy()
adata_a, adata_b, scale = init_align_with_scale(adata_ref = adata_a,
adata_query = adata_b,
emb_key = emb_key,
num_mnn = num_mnn_init,
spatial_key1 = spatial_key_ref,
spatial_key2 = spatial_key_query,
key_added = spatial_key_init,
return_scale = return_scale)
boundary_tmp1, edge_tmp1, _ = alpha_shape(adata_a.obsm[spatial_key_ref], alpha=alpha_ref, only_outer=True)
boundary_tmp2, edge_tmp2, _ = alpha_shape(adata_b.obsm[spatial_key_init], alpha=alpha_query, only_outer=True)
if plot_init:
plot_edge( adata_a, adata_b, edge_tmp1, edge_tmp2, i, alpha_query, alpha_ref,
spatial_key = spatial_key_init, result_path = self.result_path,
figsize = figsize, s_query = s_query, s_ref = s_ref)
fine_adatas, Ts_fine = fine_alignment( [adata_a, adata_b],
[(boundary_tmp1, boundary_tmp2)],
spatial_key = spatial_key_init,
key_added = spatial_key_ref,
init_pose = None,
max_iterations = 40,
tolerance = 1e-8)
adata_b = fine_adatas[1]
if add_3d:
adata_b.obsm[spatial_key_3d] = np.hstack((adata_b.obsm[spatial_key_ref], np.array([self.preds[i]]*adata_b.shape[0])[:,None]))
adata_querys.append(adata_b)
adata_query = sc.concat(adata_querys)
if self.make_log:
self.makeLog(f"Location prediction of x and y")
self.makeLog(f" 2D coordinates of query: {spatial_key_query}")
self.makeLog(f" 2D coordinates of reference: {spatial_key_ref}")
self.makeLog(f" 2D coordinates of initial alignment: {spatial_key_init}")
self.makeLog(f" 3D coordinates of final alignment: {spatial_key_3d}")
self.makeLog(f" Number of neighbor spots in initial alignment: {num_mnn_init}")
self.makeLog(f" Alpha of edge detection in query data: {alpha_query}")
self.makeLog(f" Alpha of edge detection in reference: {alpha_ref}")
return adata_query | PypiClean |
/Netzob-2.0.0.tar.gz/Netzob-2.0.0/src/netzob/release.py |
#+---------------------------------------------------------------------------+
#| 01001110 01100101 01110100 01111010 01101111 01100010 |
#| |
#| Netzob : Inferring communication protocols |
#+---------------------------------------------------------------------------+
#| Copyright (C) 2011-2017 Georges Bossert and Frédéric Guihéry |
#| This program is free software: you can redistribute it and/or modify |
#| it under the terms of the GNU General Public License as published by |
#| the Free Software Foundation, either version 3 of the License, or |
#| (at your option) any later version. |
#| |
#| This program is distributed in the hope that it will be useful, |
#| but WITHOUT ANY WARRANTY; without even the implied warranty of |
#| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
#| GNU General Public License for more details. |
#| |
#| You should have received a copy of the GNU General Public License |
#| along with this program. If not, see <http://www.gnu.org/licenses/>. |
#+---------------------------------------------------------------------------+
#| @url : http://www.netzob.org |
#| @contact : [email protected] |
#| @sponsors : Amossys, http://www.amossys.fr |
#| Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ |
#+---------------------------------------------------------------------------+
#+---------------------------------------------------------------------------+
#| Official name of the project : Netzob
#+---------------------------------------------------------------------------+
name = "Netzob"
#+---------------------------------------------------------------------------+
#| Official name of the application
#+---------------------------------------------------------------------------+
appname = name
#+---------------------------------------------------------------------------+
#| Current OFFICIAL version of the application
#| the version number must be changed during the last commit before
#| the tag release.
#| Development version has version number increased and is
#| postfixed by .dev0
#+---------------------------------------------------------------------------+
version = "2.0.0"
versionName = "ClimbingSquirrel"
#+---------------------------------------------------------------------------+
#| Copyright mentions
#+---------------------------------------------------------------------------+
copyright = "Copyright (C) 2011-2022 Georges Bossert and Frédéric Guihéry"
#+---------------------------------------------------------------------------+
#| Description of the application
#+---------------------------------------------------------------------------+
description = "Protocol Reverse Engineering, Modeling and Fuzzing"
#+---------------------------------------------------------------------------+
#| Platforms on which the application can be executed
#+---------------------------------------------------------------------------+
platforms = "Linux_x86, Linux_x64"
#+---------------------------------------------------------------------------+
#| Authors names
#+---------------------------------------------------------------------------+
author = "Georges Bossert, Frédéric Guihéry"
#+---------------------------------------------------------------------------+
#| Contributor names
#+---------------------------------------------------------------------------+
contributors = [
"Georges Bossert <[email protected]>",
"Frédéric Guihéry <[email protected]>",
"Olivier Tétard <[email protected]>",
"Goulven Guiheux <[email protected]>",
"Maxime Olivier <[email protected]>",
"Alexandre Pigné <[email protected]>",
"Franck Roland <[email protected]>",
"Fabien André <[email protected]>",
"Quentin Heyler <[email protected]>",
"Benjamin Dufour <[email protected]>",
"Giuseppe Massaro <[email protected]>",
"Timo Juhani <[email protected]>", "winnie", "Remy Delion",
"Bertus Kruger", "Eric asselin", "Tammo Krueger"
]
#+---------------------------------------------------------------------------+
#| Official website of the application
#+---------------------------------------------------------------------------+
url = "https://github.com/netzob/netzob"
#+---------------------------------------------------------------------------+
#| Official url to download the application
#+---------------------------------------------------------------------------+
download_url = "https://github.com/netzob/netzob"
#+---------------------------------------------------------------------------+
#| Translators
#+---------------------------------------------------------------------------+
translator_credits = ""
#+---------------------------------------------------------------------------+
#| Keywords to describe the application
#+---------------------------------------------------------------------------+
keywords = [
"Protocol", "Inference", "Networking", "Reverse Engineering", "Fuzzing",
"Security"
]
#+---------------------------------------------------------------------------+
#| Long description
#| <!> Do not use it anymore
#+---------------------------------------------------------------------------+
"""@deprecated: the official long description is now the full README.rst file"""
long_description = ""
#+---------------------------------------------------------------------------+
#| License used to publish the tool
#+---------------------------------------------------------------------------+
licenseName = "GPLv3"
license = """This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.""" | PypiClean |
/CANberry-0.4.tar.gz/CANberry-0.4/canberry/bower_components/jquery/src/core.js | define([
"./var/arr",
"./var/slice",
"./var/concat",
"./var/push",
"./var/indexOf",
"./var/class2type",
"./var/toString",
"./var/hasOwn",
"./var/support"
], function( arr, slice, concat, push, indexOf, class2type, toString, hasOwn, support ) {
var
// Use the correct document accordingly with window argument (sandbox)
document = window.document,
version = "@VERSION",
// Define a local copy of jQuery
jQuery = function( selector, context ) {
// The jQuery object is actually just the init constructor 'enhanced'
// Need init if jQuery is called (just allow error to be thrown if not included)
return new jQuery.fn.init( selector, context );
},
// Support: Android<4.1
// Make sure we trim BOM and NBSP
rtrim = /^[\s\uFEFF\xA0]+|[\s\uFEFF\xA0]+$/g,
// Matches dashed string for camelizing
rmsPrefix = /^-ms-/,
rdashAlpha = /-([\da-z])/gi,
// Used by jQuery.camelCase as callback to replace()
fcamelCase = function( all, letter ) {
return letter.toUpperCase();
};
jQuery.fn = jQuery.prototype = {
// The current version of jQuery being used
jquery: version,
constructor: jQuery,
// Start with an empty selector
selector: "",
// The default length of a jQuery object is 0
length: 0,
toArray: function() {
return slice.call( this );
},
// Get the Nth element in the matched element set OR
// Get the whole matched element set as a clean array
get: function( num ) {
return num != null ?
// Return just the one element from the set
( num < 0 ? this[ num + this.length ] : this[ num ] ) :
// Return all the elements in a clean array
slice.call( this );
},
// Take an array of elements and push it onto the stack
// (returning the new matched element set)
pushStack: function( elems ) {
// Build a new jQuery matched element set
var ret = jQuery.merge( this.constructor(), elems );
// Add the old object onto the stack (as a reference)
ret.prevObject = this;
ret.context = this.context;
// Return the newly-formed element set
return ret;
},
// Execute a callback for every element in the matched set.
// (You can seed the arguments with an array of args, but this is
// only used internally.)
each: function( callback, args ) {
return jQuery.each( this, callback, args );
},
map: function( callback ) {
return this.pushStack( jQuery.map(this, function( elem, i ) {
return callback.call( elem, i, elem );
}));
},
slice: function() {
return this.pushStack( slice.apply( this, arguments ) );
},
first: function() {
return this.eq( 0 );
},
last: function() {
return this.eq( -1 );
},
eq: function( i ) {
var len = this.length,
j = +i + ( i < 0 ? len : 0 );
return this.pushStack( j >= 0 && j < len ? [ this[j] ] : [] );
},
end: function() {
return this.prevObject || this.constructor(null);
},
// For internal use only.
// Behaves like an Array's method, not like a jQuery method.
push: push,
sort: arr.sort,
splice: arr.splice
};
jQuery.extend = jQuery.fn.extend = function() {
var options, name, src, copy, copyIsArray, clone,
target = arguments[0] || {},
i = 1,
length = arguments.length,
deep = false;
// Handle a deep copy situation
if ( typeof target === "boolean" ) {
deep = target;
// Skip the boolean and the target
target = arguments[ i ] || {};
i++;
}
// Handle case when target is a string or something (possible in deep copy)
if ( typeof target !== "object" && !jQuery.isFunction(target) ) {
target = {};
}
// Extend jQuery itself if only one argument is passed
if ( i === length ) {
target = this;
i--;
}
for ( ; i < length; i++ ) {
// Only deal with non-null/undefined values
if ( (options = arguments[ i ]) != null ) {
// Extend the base object
for ( name in options ) {
src = target[ name ];
copy = options[ name ];
// Prevent never-ending loop
if ( target === copy ) {
continue;
}
// Recurse if we're merging plain objects or arrays
if ( deep && copy && ( jQuery.isPlainObject(copy) || (copyIsArray = jQuery.isArray(copy)) ) ) {
if ( copyIsArray ) {
copyIsArray = false;
clone = src && jQuery.isArray(src) ? src : [];
} else {
clone = src && jQuery.isPlainObject(src) ? src : {};
}
// Never move original objects, clone them
target[ name ] = jQuery.extend( deep, clone, copy );
// Don't bring in undefined values
} else if ( copy !== undefined ) {
target[ name ] = copy;
}
}
}
}
// Return the modified object
return target;
};
jQuery.extend({
// Unique for each copy of jQuery on the page
expando: "jQuery" + ( version + Math.random() ).replace( /\D/g, "" ),
// Assume jQuery is ready without the ready module
isReady: true,
error: function( msg ) {
throw new Error( msg );
},
noop: function() {},
isFunction: function( obj ) {
return jQuery.type(obj) === "function";
},
isArray: Array.isArray,
isWindow: function( obj ) {
return obj != null && obj === obj.window;
},
isNumeric: function( obj ) {
// parseFloat NaNs numeric-cast false positives (null|true|false|"")
// ...but misinterprets leading-number strings, particularly hex literals ("0x...")
// subtraction forces infinities to NaN
// adding 1 corrects loss of precision from parseFloat (#15100)
return !jQuery.isArray( obj ) && (obj - parseFloat( obj ) + 1) >= 0;
},
isPlainObject: function( obj ) {
// Not plain objects:
// - Any object or value whose internal [[Class]] property is not "[object Object]"
// - DOM nodes
// - window
if ( jQuery.type( obj ) !== "object" || obj.nodeType || jQuery.isWindow( obj ) ) {
return false;
}
if ( obj.constructor &&
!hasOwn.call( obj.constructor.prototype, "isPrototypeOf" ) ) {
return false;
}
// If the function hasn't returned already, we're confident that
// |obj| is a plain object, created by {} or constructed with new Object
return true;
},
isEmptyObject: function( obj ) {
var name;
for ( name in obj ) {
return false;
}
return true;
},
type: function( obj ) {
if ( obj == null ) {
return obj + "";
}
// Support: Android<4.0, iOS<6 (functionish RegExp)
return typeof obj === "object" || typeof obj === "function" ?
class2type[ toString.call(obj) ] || "object" :
typeof obj;
},
// Evaluates a script in a global context
globalEval: function( code ) {
var script,
indirect = eval;
code = jQuery.trim( code );
if ( code ) {
// If the code includes a valid, prologue position
// strict mode pragma, execute code by injecting a
// script tag into the document.
if ( code.indexOf("use strict") === 1 ) {
script = document.createElement("script");
script.text = code;
document.head.appendChild( script ).parentNode.removeChild( script );
} else {
// Otherwise, avoid the DOM node creation, insertion
// and removal by using an indirect global eval
indirect( code );
}
}
},
// Convert dashed to camelCase; used by the css and data modules
// Support: IE9-11+
// Microsoft forgot to hump their vendor prefix (#9572)
camelCase: function( string ) {
return string.replace( rmsPrefix, "ms-" ).replace( rdashAlpha, fcamelCase );
},
nodeName: function( elem, name ) {
return elem.nodeName && elem.nodeName.toLowerCase() === name.toLowerCase();
},
// args is for internal usage only
each: function( obj, callback, args ) {
var value,
i = 0,
length = obj.length,
isArray = isArraylike( obj );
if ( args ) {
if ( isArray ) {
for ( ; i < length; i++ ) {
value = callback.apply( obj[ i ], args );
if ( value === false ) {
break;
}
}
} else {
for ( i in obj ) {
value = callback.apply( obj[ i ], args );
if ( value === false ) {
break;
}
}
}
// A special, fast, case for the most common use of each
} else {
if ( isArray ) {
for ( ; i < length; i++ ) {
value = callback.call( obj[ i ], i, obj[ i ] );
if ( value === false ) {
break;
}
}
} else {
for ( i in obj ) {
value = callback.call( obj[ i ], i, obj[ i ] );
if ( value === false ) {
break;
}
}
}
}
return obj;
},
// Support: Android<4.1
trim: function( text ) {
return text == null ?
"" :
( text + "" ).replace( rtrim, "" );
},
// results is for internal usage only
makeArray: function( arr, results ) {
var ret = results || [];
if ( arr != null ) {
if ( isArraylike( Object(arr) ) ) {
jQuery.merge( ret,
typeof arr === "string" ?
[ arr ] : arr
);
} else {
push.call( ret, arr );
}
}
return ret;
},
inArray: function( elem, arr, i ) {
return arr == null ? -1 : indexOf.call( arr, elem, i );
},
merge: function( first, second ) {
var len = +second.length,
j = 0,
i = first.length;
for ( ; j < len; j++ ) {
first[ i++ ] = second[ j ];
}
first.length = i;
return first;
},
grep: function( elems, callback, invert ) {
var callbackInverse,
matches = [],
i = 0,
length = elems.length,
callbackExpect = !invert;
// Go through the array, only saving the items
// that pass the validator function
for ( ; i < length; i++ ) {
callbackInverse = !callback( elems[ i ], i );
if ( callbackInverse !== callbackExpect ) {
matches.push( elems[ i ] );
}
}
return matches;
},
// arg is for internal usage only
map: function( elems, callback, arg ) {
var value,
i = 0,
length = elems.length,
isArray = isArraylike( elems ),
ret = [];
// Go through the array, translating each of the items to their new values
if ( isArray ) {
for ( ; i < length; i++ ) {
value = callback( elems[ i ], i, arg );
if ( value != null ) {
ret.push( value );
}
}
// Go through every key on the object,
} else {
for ( i in elems ) {
value = callback( elems[ i ], i, arg );
if ( value != null ) {
ret.push( value );
}
}
}
// Flatten any nested arrays
return concat.apply( [], ret );
},
// A global GUID counter for objects
guid: 1,
// Bind a function to a context, optionally partially applying any
// arguments.
proxy: function( fn, context ) {
var tmp, args, proxy;
if ( typeof context === "string" ) {
tmp = fn[ context ];
context = fn;
fn = tmp;
}
// Quick check to determine if target is callable, in the spec
// this throws a TypeError, but we will just return undefined.
if ( !jQuery.isFunction( fn ) ) {
return undefined;
}
// Simulated bind
args = slice.call( arguments, 2 );
proxy = function() {
return fn.apply( context || this, args.concat( slice.call( arguments ) ) );
};
// Set the guid of unique handler to the same of original handler, so it can be removed
proxy.guid = fn.guid = fn.guid || jQuery.guid++;
return proxy;
},
now: Date.now,
// jQuery.support is not used in Core but other projects attach their
// properties to it so it needs to exist.
support: support
});
// Populate the class2type map
jQuery.each("Boolean Number String Function Array Date RegExp Object Error".split(" "), function(i, name) {
class2type[ "[object " + name + "]" ] = name.toLowerCase();
});
function isArraylike( obj ) {
// Support: iOS 8.2 (not reproducible in simulator)
// `in` check used to prevent JIT error (gh-2145)
// hasOwn isn't used here due to false negatives
// regarding Nodelist length in IE
var length = "length" in obj && obj.length,
type = jQuery.type( obj );
if ( type === "function" || jQuery.isWindow( obj ) ) {
return false;
}
if ( obj.nodeType === 1 && length ) {
return true;
}
return type === "array" || length === 0 ||
typeof length === "number" && length > 0 && ( length - 1 ) in obj;
}
return jQuery;
}); | PypiClean |
/Django-Pizza-16.10.1.tar.gz/Django-Pizza-16.10.1/pizza/kitchen_sink/static/ks/ckeditor/plugins/wsc/README.md | CKEditor WebSpellChecker Plugin
===============================
This plugin brings Web Spell Checker (WSC) into CKEditor.
WSC is "installation-less", using the web-services of [WebSpellChecker.net](http://www.webspellchecker.net/). It's an out of the box solution.
Installation
------------
1. Clone/copy this repository contents in a new "plugins/wsc" folder in your CKEditor installation.
2. Enable the "wsc" plugin in the CKEditor configuration file (config.js):
config.extraPlugins = 'wsc';
That's all. WSC will appear on the editor toolbar and will be ready to use.
License
-------
Licensed under the terms of any of the following licenses at your choice: [GPL](http://www.gnu.org/licenses/gpl.html), [LGPL](http://www.gnu.org/licenses/lgpl.html) and [MPL](http://www.mozilla.org/MPL/MPL-1.1.html).
See LICENSE.md for more information.
Developed in cooperation with [WebSpellChecker.net](http://www.webspellchecker.net/).
| PypiClean |
/FESTIM-0.11.1-py3-none-any.whl/festim/materials/material.py | class Material:
"""
Args:
id (int, list): the id of the material. If a list is provided, the
properties will be applied to all the subdomains with the
corresponding ids.
D_0 (float): diffusion coefficient pre-exponential factor (m2/s)
E_D (float): diffusion coefficient activation energy (eV)
S_0 (float, optional): Solubility pre-exponential factor
(H/m3/Pa0.5). Defaults to None.
E_S (float, optional): Solubility activation energy (eV).
Defaults to None.
thermal_cond (float or callable, optional): thermal conductivity
(W/m/K). Can be a function of T. Defaults to None.
heat_capacity (float or callable, optional): heat capacity
(J/K/kg). Can be a function of T. Defaults to None.
rho (float or callable, optional): volumetric density (kg/m3). Can
be a function of T. Defaults to None.
borders (list, optional): The borders of the 1D subdomain.
Only needed in 1D with several materials. Defaults to None.
H (dict, optional): heat of transport (J/mol).
{"free_enthalpy": ..., "entropy": ...} so that
H = free_enthalpy + entropy*T. Defaults to None.
solubility_law (str, optional): the material's solubility law.
Can be "henry" or "sievert". Defaults to "sievert".
name (str, optional): name of the material. Defaults to None.
"""
def __init__(
self,
id,
D_0,
E_D,
S_0=None,
E_S=None,
thermal_cond=None,
heat_capacity=None,
rho=None,
borders=None,
H=None,
solubility_law="sievert",
name=None,
) -> None:
self.id = id
self.name = name
self.D_0 = D_0
self.E_D = E_D
self.S_0 = S_0
self.E_S = E_S
self.thermal_cond = thermal_cond
self.heat_capacity = heat_capacity
self.rho = rho
self.borders = borders
self.H = H
if H is not None:
self.free_enthalpy = H["free_enthalpy"]
self.entropy = H["entropy"]
if solubility_law not in ["henry", "sievert"]:
raise ValueError(
"Acceptable values for solubility_law are 'henry' and 'sievert'"
)
self.solubility_law = solubility_law
self.check_properties()
def check_properties(self):
"""Checks that if S_0 is None E_S is not None and reverse.
Raises:
ValueError: [description]
ValueError: [description]
"""
if self.S_0 is None and self.E_S is not None:
raise ValueError("S_0 cannot be None")
if self.E_S is None and self.S_0 is not None:
raise ValueError("E_S cannot be None") | PypiClean |
/APAC%20SCALE-0.1.2.tar.gz/APAC SCALE-0.1.2/torchscale/architecture/encoder.py |
import math
import numpy as np
import torch
import torch.nn as nn
from fairscale.nn import checkpoint_wrapper, wrap
try:
from apex.normalization import FusedLayerNorm as LayerNorm
except ModuleNotFoundError:
from torch.nn import LayerNorm
from torchscale.architecture.utils import init_bert_params
from torchscale.component.droppath import DropPath
from torchscale.component.feedforward_network import FeedForwardNetwork, make_experts
from torchscale.component.multihead_attention import MultiheadAttention
from torchscale.component.multiway_network import MultiwayWrapper, set_split_position
from torchscale.component.relative_position_bias import RelativePositionBias
from torchscale.component.xmoe.moe_layer import MOELayer
from torchscale.component.xmoe.routing import Top1Gate, Top2Gate
class EncoderLayer(nn.Module):
def __init__(self, args, depth, is_moe_layer=False, is_encoder_decoder=False):
super().__init__()
self.args = args
self.embed_dim = args.encoder_embed_dim
self.self_attn = self.build_self_attention(self.embed_dim, args)
self.self_attn_layer_norm = MultiwayWrapper(args, LayerNorm(self.embed_dim, eps=args.layernorm_eps))
self.dropout_module = torch.nn.Dropout(args.dropout)
if args.drop_path_rate > 0:
drop_path_prob = np.linspace(0, args.drop_path_rate, args.encoder_layers)[
depth
]
self.drop_path = DropPath(drop_path_prob)
else:
self.drop_path = None
self.normalize_before = args.encoder_normalize_before
self.is_moe_layer = is_moe_layer
self.ffn_dim = args.encoder_ffn_embed_dim
if not self.is_moe_layer:
self.ffn = MultiwayWrapper(
args,
self.build_ffn(
self.embed_dim,
self.args,
),
)
else:
assert not self.args.multiway
if args.moe_top1_expert:
gate = Top1Gate(
self.embed_dim,
args.moe_expert_count,
use_fp32=args.moe_gating_use_fp32,
moe_eval_capacity_token_fraction=args.moe_eval_capacity_token_fraction,
use_xmoe=args.use_xmoe,
)
else:
gate = Top2Gate(
self.embed_dim,
args.moe_expert_count,
args.moe_gating_use_fp32,
args.moe_second_expert_policy,
args.moe_normalize_gate_prob_before_dropping,
args.moe_eval_capacity_token_fraction,
use_xmoe=args.use_xmoe,
)
experts = make_experts(args, self.embed_dim, self.ffn_dim)
self.moe_layer = MOELayer(gate, experts, args)
self.final_layer_norm = MultiwayWrapper(args, LayerNorm(self.embed_dim, eps=args.layernorm_eps))
if args.deepnorm:
if is_encoder_decoder:
self.alpha = (
math.pow(
math.pow(args.encoder_layers, 4) * args.decoder_layers, 0.0625
)
* 0.81
)
else:
self.alpha = math.pow(2.0 * args.encoder_layers, 0.25)
else:
self.alpha = 1.0
def build_ffn(self, embed_dim, args):
return FeedForwardNetwork(
embed_dim,
self.ffn_dim,
args.activation_fn,
args.dropout,
args.activation_dropout,
args.layernorm_eps,
args.subln,
)
def build_self_attention(self, embed_dim, args):
return MultiheadAttention(
args,
embed_dim,
args.encoder_attention_heads,
dropout=args.attention_dropout,
self_attention=True,
encoder_decoder_attention=False,
subln=args.subln,
)
def residual_connection(self, x, residual):
return residual * self.alpha + x
def forward(self, x, encoder_padding_mask, attn_mask=None, rel_pos=None, multiway_split_position=None, incremental_state=None):
if multiway_split_position is not None:
assert self.args.multiway
self.apply(set_split_position(multiway_split_position))
if attn_mask is not None:
attn_mask = attn_mask.masked_fill(attn_mask.to(torch.bool), -1e8)
residual = x
if self.normalize_before:
x = self.self_attn_layer_norm(x)
x, _ = self.self_attn(
query=x,
key=x,
value=x,
key_padding_mask=encoder_padding_mask,
attn_mask=attn_mask,
rel_pos=rel_pos,
incremental_state=incremental_state,
)
x = self.dropout_module(x)
if self.drop_path is not None:
x = self.drop_path(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.self_attn_layer_norm(x)
residual = x
if self.normalize_before:
x = self.final_layer_norm(x)
if not self.is_moe_layer:
x = self.ffn(x)
l_aux = None
else:
x = x.transpose(0, 1)
x, l_aux = self.moe_layer(x)
x = x.transpose(0, 1)
if self.drop_path is not None:
x = self.drop_path(x)
x = self.residual_connection(x, residual)
if not self.normalize_before:
x = self.final_layer_norm(x)
return x, l_aux
class Encoder(nn.Module):
def __init__(
self,
args,
embed_tokens=None,
embed_positions=None,
output_projection=None,
is_encoder_decoder=False,
**kwargs
):
self.args = args
super().__init__(**kwargs)
self.dropout_module = torch.nn.Dropout(args.dropout)
embed_dim = args.encoder_embed_dim
self.embed_scale = 1.0 if args.no_scale_embedding else math.sqrt(embed_dim)
self.embed_tokens = embed_tokens
self.embed_positions = embed_positions
if (
output_projection is None
and not is_encoder_decoder
and not args.no_output_layer
and args.vocab_size > 0
):
self.output_projection = self.build_output_projection(args)
else:
self.output_projection = output_projection
if args.layernorm_embedding:
self.layernorm_embedding = MultiwayWrapper(
args, LayerNorm(embed_dim, eps=args.layernorm_eps), dim=1
)
else:
self.layernorm_embedding = None
self.layers = nn.ModuleList([])
moe_freq = args.moe_freq
for i in range(args.encoder_layers):
is_moe_layer = moe_freq != 0 and (i + 1) % moe_freq == 0
self.layers.append(
self.build_encoder_layer(
args,
depth=i,
is_moe_layer=is_moe_layer,
is_encoder_decoder=is_encoder_decoder,
)
)
self.num_layers = len(self.layers)
if args.encoder_normalize_before and args.normalize_output:
self.layer_norm = MultiwayWrapper(args, LayerNorm(embed_dim, eps=args.layernorm_eps))
else:
self.layer_norm = None
if args.rel_pos_buckets > 0 and args.max_rel_pos > 0:
self.relative_position = RelativePositionBias(
num_buckets=args.rel_pos_buckets,
max_distance=args.max_rel_pos,
n_heads=args.encoder_attention_heads,
)
else:
self.relative_position = None
if args.bert_init:
self.apply(init_bert_params)
if args.deepnorm:
if is_encoder_decoder:
init_scale = (
math.pow(
math.pow(args.encoder_layers, 4) * args.decoder_layers, 0.0625
)
/ 1.15
)
else:
init_scale = math.pow(8.0 * args.encoder_layers, 0.25)
for name, p in self.named_parameters():
if (
"fc1" in name
or "fc2" in name
or "out_proj" in name
or "v_proj" in name
):
p.data.div_(init_scale)
if args.subln:
if is_encoder_decoder:
init_scale = math.sqrt(
math.log(3 * args.decoder_layers)
* math.log(2 * args.encoder_layers)
/ 3
)
else:
init_scale = math.sqrt(math.log(args.encoder_layers * 2))
for name, p in self.named_parameters():
if (
"fc1" in name
or "fc2" in name
or "out_proj" in name
or "v_proj" in name
):
p.data.mul_(init_scale)
def build_output_projection(
self,
args,
):
if args.share_encoder_input_output_embed:
assert args.encoder_embedding_type == "language"
output_projection = torch.nn.Linear(
self.embed_tokens.weight.shape[1],
self.embed_tokens.weight.shape[0],
bias=False,
)
output_projection.weight = self.embed_tokens.weight
else:
output_projection = torch.nn.Linear(
args.encoder_embed_dim, args.vocab_size, bias=False
)
torch.nn.init.normal_(
output_projection.weight, mean=0, std=args.encoder_embed_dim**-0.5
)
return output_projection
def build_encoder_layer(
self, args, depth, is_moe_layer=False, is_encoder_decoder=False
):
layer = EncoderLayer(
args,
depth,
is_moe_layer=is_moe_layer,
is_encoder_decoder=is_encoder_decoder,
)
if args.checkpoint_activations:
layer = checkpoint_wrapper(layer)
if args.fsdp:
layer = wrap(layer)
return layer
def forward_embedding(
self,
src_tokens,
token_embedding=None,
positions=None,
):
if token_embedding is None:
token_embedding = self.embed_tokens(src_tokens)
x = embed = self.embed_scale * token_embedding
if self.embed_positions is not None:
if src_tokens is not None:
x = embed + self.embed_positions(src_tokens, positions=positions)
else:
x = embed + self.embed_positions(x, positions=positions)
if self.layernorm_embedding is not None:
x = self.layernorm_embedding(x)
x = self.dropout_module(x)
return x, embed
def forward(
self,
src_tokens,
encoder_padding_mask=None,
attn_mask=None,
return_all_hiddens=False,
token_embeddings=None,
multiway_split_position=None,
features_only=False,
incremental_state=None,
positions=None,
**kwargs
):
assert src_tokens is not None or token_embeddings is not None
if encoder_padding_mask is None:
if src_tokens is not None:
encoder_padding_mask = torch.zeros_like(
src_tokens, device=src_tokens.device
).bool()
else:
encoder_padding_mask = torch.zeros(
[token_embeddings.size(0), token_embeddings.size(1)],
device=token_embeddings.device,
).bool()
if multiway_split_position is not None:
assert self.args.multiway
self.apply(set_split_position(multiway_split_position))
x, encoder_embedding = self.forward_embedding(src_tokens, token_embeddings, positions)
x = x * (1 - encoder_padding_mask.unsqueeze(-1).type_as(x))
encoder_states = []
if return_all_hiddens:
encoder_states.append(x)
rel_pos_bias = None
if self.relative_position is not None:
rel_pos_bias = self.relative_position(
batch_size=x.size(0), qlen=x.size(1), klen=x.size(1)
)
# incremental_state is not None during inference if we use the bidirectional encoder as a generator as in s2s-ft (https://arxiv.org/abs/2110.13640)
l_aux = []
for idx, layer in enumerate(self.layers):
x, l_aux_i = layer(
x,
encoder_padding_mask=encoder_padding_mask if incremental_state is None else None,
attn_mask=attn_mask,
rel_pos=rel_pos_bias,
multiway_split_position=multiway_split_position,
incremental_state=incremental_state[idx] if incremental_state is not None else None,
)
if return_all_hiddens:
assert encoder_states is not None
encoder_states.append(x)
l_aux.append(l_aux_i)
if self.layer_norm is not None:
x = self.layer_norm(x)
if not features_only and self.output_projection is not None:
x = self.output_projection(x)
return {
"encoder_out": x,
"encoder_embedding": encoder_embedding,
"encoder_padding_mask": encoder_padding_mask,
"encoder_states": encoder_states,
"l_aux": l_aux,
} | PypiClean |
/MUSiCC-1.0.4-py3-none-any.whl/musicc/core.py | from __future__ import absolute_import, division, print_function
# general imports
import os
import sys
import warnings
from time import time
import base64
import uuid
# specific imports that need to be pre-installed
import numpy as np
from scipy import stats
import pandas as pd
from sklearn.linear_model import lars_path,Lasso
from sklearn.model_selection import KFold
from sklearn.exceptions import ConvergenceWarning
################################################################################################################
# learn_lasso_model: Learns a cross-validation Lasso model from given features
# This function is only used if Intra-MUSiCC is used with the "LearnModel" option
# notes:
# - does NOT normalize X, so X needs to be normalized before sending to function
# - does NOT center Y, so Y needs to be centered before sending to function
################################################################################################################
def learn_lasso_model(cov_train, res_train):
num_cv = 5
# Check that there are enough USCGs present for cross validation
if cov_train.shape[0] < num_cv:
print("\nError: There are fewer USCGs present in the input table ({}) than the number of folds for cross validation ({}) when learning sample-specific correction models. This may have occurred if the input table was generated by truncating a full-sized KO table.".format(str(cov_train.shape[0]), str(num_cv)))
exit()
k_fold = KFold(n_splits=num_cv, shuffle=True).split(cov_train, res_train)
best_validation_rsqr = np.zeros((num_cv,1))
best_validation_alpha = np.zeros((num_cv,1))
# iterate for all internal cross-validations to learn the best alpha
for inner_k, (inner_train, inner_validation) in enumerate(k_fold):
cov_inner_train = cov_train[inner_train, :]
cov_inner_validation = cov_train[inner_validation, :]
response_inner_train = res_train[inner_train]
response_inner_validation = res_train[inner_validation]
lpath = lars_path(cov_inner_train, response_inner_train)
lpath_alphas = lpath[0]
lpath_coefs = lpath[2] # for enet_path = 1
num_alphas = len(lpath_alphas)
prediction_validation = np.dot(lpath_coefs.transpose(), cov_inner_validation.transpose())
rep_res_val = np.repeat(response_inner_validation, num_alphas).reshape(len(response_inner_validation), num_alphas).transpose()
rep_mean_val = np.repeat(np.mean(response_inner_validation), len(response_inner_validation)*num_alphas).reshape(len(response_inner_validation), num_alphas).transpose()
sos_residual = np.sum((prediction_validation - rep_res_val) ** 2, axis=1)
sos_original = np.sum((rep_res_val - rep_mean_val) ** 2, axis=1)
rep_validation_rsqr = np.array(1 - (sos_residual / sos_original))
sorted_ind = np.argsort(rep_validation_rsqr)[::-1]
best_validation_rsqr[inner_k] = rep_validation_rsqr[sorted_ind[0]]
best_validation_alpha[inner_k] = lpath_alphas[sorted_ind[0]]
mean_best_alpha = np.mean(best_validation_alpha)
mean_best_rsqr = np.mean(best_validation_rsqr)
# now learn one unified model on the given data using the mean_best_alpha
lasso = Lasso(fit_intercept=True, normalize=False, alpha=mean_best_alpha)
lasso.fit(cov_train, res_train)
return lasso, mean_best_rsqr
################################################################################################################
###################################################################################################################
# MAIN FUNCTION
# corrects the given abundance data
###################################################################################################################
def correct_and_normalize(args):
print("Running MUSiCC...")
# if verbose, print given options
if args['verbose']:
print("Input: " + args['input_file'])
print("Output: " + args['output_file'])
print("Normalize: " + str(args['musicc_inter']))
print("Correct: " + args['musicc_intra'])
print("Compute scores: " + str(args['compute_scores']))
# set some initial settings for the script
np.set_printoptions(precision=5,suppress=True, linewidth=200) # nicer output
warnings.filterwarnings("ignore", category=DeprecationWarning)
warnings.filterwarnings("ignore", category=UserWarning)
warnings.filterwarnings("ignore", category=RuntimeWarning)
# get the current directory of the module
curr_dir_of_module = os.path.split(os.path.realpath(__file__))[0]
########################################################
# start the timer to measure how long it takes to run the module
t_start = time()
########################################################
########################################################
# import the list of Universal single-copy genes (USCG)
########################################################
assert os.path.isfile(curr_dir_of_module + "/data/uscg_76_kegg_min_2313.lst"), "data file of USiCGs is missing"
ins = open(curr_dir_of_module + "/data/uscg_76_kegg_min_2313.lst", "r")
uscg = []
for line in ins:
uscg.append(line.strip())
ins.close()
uscg = set(uscg)
if args['compute_scores']:
# import the semi-USCGs list
assert os.path.isfile(curr_dir_of_module + "/data/semi_uscg_72_kegg_min_2148_max_2313.lst"), "data file of semi-USiCGs is missing"
ins = open(curr_dir_of_module + "/data/semi_uscg_72_kegg_min_2148_max_2313.lst", "r")
semi_uscg = []
for line in ins:
semi_uscg.append(line.strip())
ins.close()
semi_uscg = set(semi_uscg)
# import the correlog clusters lists
assert os.path.isfile(curr_dir_of_module + "/data/Correlog_clusters_genes_DATA_HMP_STOOL_CLASS_HMP_SAMP_134_KO_76.tab"), "data file of correlogs is missing"
ins = open(curr_dir_of_module + "/data/Correlog_clusters_genes_DATA_HMP_STOOL_CLASS_HMP_SAMP_134_KO_76.tab", "r")
correlog_clusters = []
for line in ins:
line_arr = line.strip().split("\t")
correlog_clusters.append(line_arr[1].split(":"))
ins.close()
number_of_correlog_clusters = len(correlog_clusters)
########################################################
########################################################
# import the data file, KO vs. Samples
########################################################
print("Loading data using pandas module...")
assert os.path.isfile(args['input_file']), "abundance data input file is missing"
if args['input_format'] == 'biom': # input in biom format
print("converting from biom format...")
temporary_name = base64.urlsafe_b64encode(uuid.uuid4().bytes).replace('=', '')
os.system("biom convert -i " + args['input_file'] + "-o " + temporary_name + " -b")
print("Done.")
ko_abun_data = pd.read_csv(temporary_name, index_col=0, dtype={0: str}, sep="\t")
#1.0 version: ins = open(temporary_name, "r")
#1.0 version: delimiter = "\t"
elif args['input_format'] == 'csv': # csv format
ko_abun_data = pd.read_csv(args['input_file'], index_col=0, dtype={0: str}, sep=',')
#1.0 version: ins = open(args['input_file'], "r")
#1.0 version: delimiter = ","
else: # tab format
ko_abun_data = pd.read_csv(args['input_file'], index_col=0, dtype={0: str}, sep="\t")
#1.0 version: ins = open(args['input_file'], "r")
#1.0 version: delimiter = "\t"
genes = ko_abun_data.index.values
samples = ko_abun_data.columns.values
abun = ko_abun_data.values
# 1.0 version:
#abun = []
#genes = []
#row_counter = 0
#for line in ins:
# line_arr = line.strip().split(delimiter)
# if len(line_arr[0]) == 0 or line_arr[0][0] == '#':
# continue
# if row_counter == 0:
# samples = line_arr[1:]
# else:
# genes.append(line_arr[0])
# abun.append(line_arr[1:])
# if not len(line_arr[1:]) == len(samples):
# print("Error: number of values for gene " + str(line_arr[0]) + " (" + str(len(line_arr)) + ")"
# " does not match number of samples " + "(" + str(len(samples)) + ")"
# ", possibly because of missing row header for samples (top left header)")
# exit()
# row_counter += 1
#ins.close()
#genes = np.array(genes)
#abun = np.array(abun, dtype=np.float64)
#samples = np.array(samples)
if args['input_format'] == 'biom':
os.system("rm " + temporary_name)
# now sort by genes
genes_sort_ind = np.array(sorted(range(len(genes)), key=lambda k: genes[k]))
genes = genes[genes_sort_ind]
abun = abun[genes_sort_ind]
num_of_samples = len(samples)
num_of_genes = len(genes)
if args['verbose']:
print(str(num_of_samples) + " samples and " + str(num_of_genes) + " genes")
# intersect genes with USCGs to find out their indices
uscg_ind = [i for i, item in enumerate(genes) if item in uscg]
print("Done.")
########################################################
################################################################
# if option selected, correct the abundance per sample by a model based on USiCG
################################################################
if args['musicc_intra'] != 'None':
print("Performing MUSiCC Correction...")
if args['musicc_intra'] == 'use_generic': # use generic model
# load the learned weights from file
model__feature_names = []
model__sample_names = []
model__intercept_vals = []
model__coef_vals = []
row_counter = 0 # counter and reference
assert os.path.isfile(curr_dir_of_module + "/data/Final_Betas_ALL_SAMPLES_DATA_HMP_STOOL_CLASS_HMP_SAMP_134_KO_76.tab"), "data file of learned models is missing"
ins = open(curr_dir_of_module + "/data/Final_Betas_ALL_SAMPLES_DATA_HMP_STOOL_CLASS_HMP_SAMP_134_KO_76.tab", "r")
for line in ins:
vals = line.strip().split("\t")
# check if first line
if (row_counter == 0):
model__feature_names = vals[1:]
else:
model__sample_names.append(vals[0])
model__intercept_vals.append(vals[1])
model__coef_vals.append(vals[2:])
row_counter += 1
ins.close()
# convert to arrays
model__feature_names = np.array(model__feature_names)
model__sample_names = np.array(model__sample_names)
model__intercept_vals = np.array(model__intercept_vals, dtype=np.float64)
model__coef_vals = np.array(model__coef_vals, dtype=np.float64)
# compute mean of intercept and mean of coefficients
model__mean_intercept = np.mean(model__intercept_vals)
model__mean_coef = np.mean(model__coef_vals, axis=0)
if args['verbose']:
print("Generic model intercept:" + str(model__mean_intercept))
print("Generic model coefficients:" + str(model__mean_coef))
# load features for the genes
feature_names = []
features_kos = []
features_vals = []
row_counter = 0 # counter and reference
assert os.path.isfile(curr_dir_of_module + "/data/Gene_level_features_for_all_KOs_DATA_HMP_STOOL_CLASS_HMP_SAMP_134_KO_76.tab"), "data file of gene-level features is missing"
ins = open(curr_dir_of_module + "/data/Gene_level_features_for_all_KOs_DATA_HMP_STOOL_CLASS_HMP_SAMP_134_KO_76.tab", "r")
for line in ins:
vals = line.strip().split("\t")
# check if first line
if (row_counter == 0):
feature_names = vals[1:]
else:
features_kos.append(vals[0])
features_vals.append(vals[1:])
row_counter += 1
ins.close()
# convert to arrays
feature_names = np.array(feature_names)
features_kos = np.array(features_kos)
features_vals = np.array(features_vals, dtype=np.float64)
#sort features by genes
featuers_sorting_by_ko = np.array(sorted(range(len(features_kos)), key=lambda k: features_kos[k]))
features_kos = features_kos[featuers_sorting_by_ko]
features_vals = features_vals[featuers_sorting_by_ko, :]
# intersect lists of USCGs between features and abundance
uscg__inter_features_abundance = np.intersect1d(features_kos, np.intersect1d(genes, np.array(list(uscg))))
uscg__features_ind_of_intersection = [i for i,item in enumerate(features_kos) if item in uscg__inter_features_abundance]
uscg__abundance_ind_of_intersection = [i for i,item in enumerate(genes) if item in uscg__inter_features_abundance]
# intersect lists of ALL GENES between features and abundance
all_genes__inter_features_abundance = np.intersect1d(features_kos, genes)
all_genes__features_ind_of_intersection = [i for i,item in enumerate(features_kos) if item in all_genes__inter_features_abundance]
all_genes__abundance_ind_of_intersection = [i for i,item in enumerate(genes) if item in all_genes__inter_features_abundance]
if args['compute_scores']:
# intersect lists of semi-USCGs between features and abundance
semi_uscg__inter_features_abundance = np.intersect1d(features_kos, np.intersect1d(genes, np.array(list(semi_uscg))))
semi_uscg__features_ind_of_intersection = [i for i,item in enumerate(features_kos) if item in semi_uscg__inter_features_abundance]
semi_uscg__abundance_ind_of_intersection = [i for i,item in enumerate(genes) if item in semi_uscg__inter_features_abundance]
# intersect lists of correlog clusters between features and abundance
corelog_cluster__inter_features_abundance = []
corelog_cluster__features_ind_of_intersection = []
corelog_cluster__abundance_ind_of_intersection = []
for clus in range(number_of_correlog_clusters):
corelog_cluster__inter_features_abundance.append(np.intersect1d(features_kos, np.intersect1d(genes, correlog_clusters[clus])))
corelog_cluster__features_ind_of_intersection.append([i for i,item in enumerate(features_kos) if item in corelog_cluster__inter_features_abundance[clus]])
corelog_cluster__abundance_ind_of_intersection.append([i for i,item in enumerate(genes) if item in corelog_cluster__inter_features_abundance[clus]])
##########################################################################################################
# Correct abundances per sample across all samples
##########################################################################################################
all_samples_mean_scores = np.zeros((num_of_samples, 1))
all_samples_mean_scores[:] = np.NaN
all_samples_final_weights = np.zeros((features_vals.shape[1], num_of_samples))
if args['compute_scores']:
all_samples_semi_uscg_scores = np.zeros((num_of_samples, 1))
all_samples_semi_uscg_scores[:] = np.NaN
all_samples_correlog_clusters_scores = np.zeros((num_of_samples, number_of_correlog_clusters))
all_samples_correlog_clusters_scores[:] = np.NaN
if args['musicc_intra'] == 'learn_model':
print("Learning sample-specific models")
else:
print("Correcting samples using generic model")
# loop over all samples
for s in range(num_of_samples):
sys.stdout.write(".")
sys.stdout.flush()
sample_abundance__uscg = np.array(abun[uscg__abundance_ind_of_intersection, s])
covariates_uscg = features_vals[uscg__features_ind_of_intersection, :]
final_response = (sample_abundance__uscg / np.mean(sample_abundance__uscg)) - 1.0
final_covariates = np.nan_to_num(stats.zscore(covariates_uscg))
# compute prediction for current sample
if args['musicc_intra'] == 'learn_model':
with warnings.catch_warnings():
warnings.simplefilter('ignore', ConvergenceWarning)
final_model, all_samples_mean_scores[s] = learn_lasso_model(final_covariates, final_response)
# compute prediction on all genes
predicted_correction_for_all_ko = final_model.predict(np.nan_to_num(stats.zscore(features_vals))) + 1
else: # use generic model to predict correction
predicted_correction_for_uscg = np.dot(final_covariates, model__mean_coef) + model__mean_intercept
predicted_correction_for_all_ko = np.dot(np.nan_to_num(stats.zscore(features_vals)), model__mean_coef) + model__mean_intercept
sos_residual = np.sum((predicted_correction_for_uscg - (final_response + 1.0)) ** 2)
sos_original = np.sum(((final_response + 1.0) - np.mean((final_response + 1.0))) ** 2)
all_samples_mean_scores[s] = 1.0 - (sos_residual / sos_original)
# set min/max of prediction to the min/max of USCGs (to eliminate outliers in prediction)
min_correction_uscg = np.min(predicted_correction_for_all_ko[uscg__features_ind_of_intersection])
max_correction_uscg = np.max(predicted_correction_for_all_ko[uscg__features_ind_of_intersection])
low_values_indices = predicted_correction_for_all_ko < min_correction_uscg
predicted_correction_for_all_ko[low_values_indices] = min_correction_uscg
high_values_indices = predicted_correction_for_all_ko > max_correction_uscg
predicted_correction_for_all_ko[high_values_indices] = max_correction_uscg
# apply the correction to the actual abundance array
abun[all_genes__abundance_ind_of_intersection, s] = abun[all_genes__abundance_ind_of_intersection, s] / predicted_correction_for_all_ko[all_genes__features_ind_of_intersection]
if args['compute_scores']: # test prediction on semi-USCGs and clusters to report socres
sample_abundance__semi_uscg = np.array(abun[semi_uscg__abundance_ind_of_intersection, s])
covariates_semi_uscg = np.nan_to_num(stats.zscore(features_vals[semi_uscg__features_ind_of_intersection, :]))
response_semi_uscg = (sample_abundance__semi_uscg / np.mean(sample_abundance__semi_uscg)) - 1.0
if args['musicc_intra'] == 'learn_model':
all_samples_semi_uscg_scores[s] = final_model.score(covariates_semi_uscg, response_semi_uscg)
else:
predicted_correction_for_semi_uscg = np.dot(covariates_semi_uscg, model__mean_coef) + model__mean_intercept
sos_residual = np.sum((predicted_correction_for_semi_uscg - (response_semi_uscg + 1.0)) ** 2)
sos_original = np.sum(((response_semi_uscg + 1.0) - np.mean((response_semi_uscg + 1.0))) ** 2)
all_samples_semi_uscg_scores[s] = 1.0 - (sos_residual / sos_original)
for clus in range(number_of_correlog_clusters):
covariates_clus = np.nan_to_num(stats.zscore(features_vals[corelog_cluster__features_ind_of_intersection[clus], :]))
response_clus = (abun[corelog_cluster__abundance_ind_of_intersection[clus], s] / np.mean(abun[corelog_cluster__abundance_ind_of_intersection[clus], s])) - 1
if len(response_clus) >= 5 and not np.max(np.isnan(response_clus)):
if args['musicc_intra'] == 'learn_model':
all_samples_correlog_clusters_scores[s, clus] = final_model.score(covariates_clus, response_clus)
else:
predicted_correction_for_correlog_clusters = np.dot(covariates_clus, model__mean_coef) + model__mean_intercept
sos_residual = np.sum((predicted_correction_for_correlog_clusters - (response_clus + 1.0)) ** 2)
sos_original = np.sum(((response_clus + 1.0) - np.mean((response_clus + 1.0))) ** 2)
all_samples_correlog_clusters_scores[s, clus] = 1.0 - (sos_residual / sos_original)
print("Done.")
# if option selected, aggregate scores from all samples
if args['compute_scores'] and args['verbose']:
print("Model performance on various gene sets:")
print("Median R^2 across samples for all USCG:" + str(np.nanmedian(all_samples_mean_scores)))
print("Median R^2 across samples for all semi-USCG:" + str(np.nanmedian(all_samples_semi_uscg_scores)))
print("Number_of_correlog_clusters:" + str(number_of_correlog_clusters))
print("Median R^2 across samples for all correlog clusters:" + str(np.nanmedian(np.nanmedian(all_samples_correlog_clusters_scores))))
################################################################
################################################################
# if option selected, normalize the samples by the median USiCG
################################################################
if args['musicc_inter']:
print("Performing MUSiCC Normalization...")
# compute median USCGs per sample
median_uscg_per_sample = np.median(abun[uscg_ind,:], axis=0)
if args['verbose']:
print("median USiCG before MUSiCC = " + str(median_uscg_per_sample))
# check to see which samples have NO USiCGs at all
no_zero_samples = np.all(median_uscg_per_sample > 0)
if not no_zero_samples:
samples_with_no_usicg = (median_uscg_per_sample == 0)
print("Warning: The following samples have no Universal Single-copy genes and were converted to NaN - " + str(samples[samples_with_no_usicg]))
# generate the array of correction by median USCGs
uscg_median_corrector = np.repeat(median_uscg_per_sample, num_of_genes, axis=0).reshape(num_of_samples, num_of_genes).transpose()
# perform the correction on given abundance
abun = abun / uscg_median_corrector
# convert inf to NaN
abun[np.isinf(abun)] = np.nan
if args['verbose']:
print("median USiCG after MUSiCC = " + str(np.median(abun[uscg_ind,:], axis=0)))
print("Done.")
################################################################################
################################################################
# print corrected abundance to output file
################################################################
output_pd = pd.DataFrame(data=abun, index=genes, columns=samples)
output_pd.index.name = 'KO'
with warnings.catch_warnings():
warnings.filterwarnings("ignore",category=DeprecationWarning)
if args['output_format'] == 'csv': # csv format
output_pd.to_csv(args['output_file'], sep=',', na_rep='NaN')
elif args['output_format'] == 'tab': # tab format
output_pd.to_csv(args['output_file'], sep='\t', na_rep='NaN')
else: # biom format
print("Writing output in biom format...")
temporary_name = base64.urlsafe_b64encode(uuid.uuid4().bytes).replace('=', '')
output_pd.to_csv(temporary_name, sep='\t', na_rep='NaN')
if os.path.isfile(args['output_file']): # remove file if exists such that biom won't crash
os.system("rm " + args['output_file'])
os.system("biom convert -i " + temporary_name + " -o " + args['output_file'] + " --table-type=\"gene table\" --matrix-type=dense")
os.system("rm " + temporary_name)
print("Done.")
##########################################
##########################################
# print out time spent in module
t_end = time()
print('Running time was %.0f seconds.' % (t_end - t_start))
########################################## | PypiClean |
/Hangman-1.4.0..zip/Hangman-1.4.0Hangman.py | import random
import sys
words=[
"War","Brother","Winter"
,"Sneeze","Word"
,"Agreement"
,"Theory"
,"Slope"
,"Need"
,"Event"
,"Help"
,"Play"
,"Price"
,"Birth"
,"Respect"
,"Month"
,"Move"
,"Use"
,"Produce"
,"Substance"
,"Scale"
,"Letter"
,"Oil"
,"Meeting"
,"Size"
,"Death"
,"Servant"
,"Driving"
,"Hope"
,"Room"
,"Invention"
,"Shake"
,"Nation"
,"Relation"
,"Love"
,"Tin"
,"River"
,"Summer"
,"Weight"
,"Increase"
,"Crack"
,"Flower"
,"Base"
,"Offer"
,"Mountain"
,"Committee"
,"Ray"
,"Blood"
,"Rain"
,"Name"
,"Amount"
,"Woman"
,"Paper"
,"Trade"
,"Money"
,"Wind"
,"Friend"
,"Teaching"
,"Group"
,"Story"
,"Judge"
,"Science"
,"Comparison"
,"Religion"
,"Paint"
,"Harbor"
,"Man"
,"Request"
,"Answer"
,"Meal"
,"Vessel"
,"Swim"
,"Reading"
,"Knowledge"
,"Machine"
,"Amusement"
,"Digestion"
,"List"
,"Snow"
,"Current"
,"Stitch"
,"Sand"
,"Smell"
,"Company"
,"Family"
,"Reaction"
,"Shock"
,"Owner"
,"Lift"
,"Cotton"
,"Fear"
,"Steam"
,"Observation"
,"Animal"
,"Attempt"
,"Butter"
,"Music"
,"Rest"
,"Unit"
,"Desire"
,"Representative"
,"Sugar",
"Market",
"Wood",
"Front",
"Top",
"Idea",
"Art",
"Thunder",
"Value","Grass"
]
a=0
while a==0:
Rightword=random.choice(words)
correctword=Rightword.lower()
correctletters=set(list(correctword))
guessed=[]
print("Welcome to Suhail's Hangman!")
print("#You will get *number of letters in word* + 5 chances to guess.")
print("#Incase you notice the missing Hangman, I oppose Capital Punishment ;)")
print("1. Start")
print("2. Quit")
b=input("Your choice: ")
if b=="2":
print("K bye.")
sys.exit()
if b=="1":
print("Guess this word: ")
dashes=("_ "*len(correctword))
print(dashes)
c=1
while c< (len(correctword)+6):
letter=input("Your Guess: ")
if letter==("cheatercock"):
print("YOU WIN")
print("The word is", correctword)
break
if letter in correctword and letter not in guessed and len(letter)==1:
guessed.append(letter)
print("Yus.You got that right")
else:
print("Nope, wrong letter.Try again.")
for letter in correctword:
if letter in guessed:
print(letter,end=" ")
else:
print("_ ",end=" ")
print()
print("You have ", len(correctword)+5-c, "tries remaining.")
c+=1
if len(guessed)==len(correctletters):
print("Congratulations!!!")
print("You have guessed wisely! ")
print(correctword,"is the word you have correctly guessed.")
print()
c+=30
break
if c==(len(correctword)+6):
print("The correct word is", correctword)
print("You got " ,guessed,"correct.")
print("AAP HAARE HUE INSAAN HO.")
print("Try again now.")
print() | PypiClean |
/DIRestPlus-0.2.2-py3-none-any.whl/direstplus/wind.py | from direstplus import api
from flask_restplus import Resource, reqparse
from WindPy import w
import pandas as pd
import logging
from datetime import datetime, date
from direstplus.exceptions import RequestError
logger = logging.getLogger(__name__)
STR_FORMAT_DATE = '%Y-%m-%d'
STR_FORMAT_DATETIME_WIND = '%Y-%m-%d %H:%M:%S' # 2017-03-06 00:00:00
UN_AVAILABLE_DATETIME = datetime.strptime('1900-01-01', STR_FORMAT_DATE)
UN_AVAILABLE_DATE = UN_AVAILABLE_DATETIME.date()
header = {'Content-Type': 'application/json'}
rec = api.namespace('wind', description='wind接口')
ERROR_CODE_MSG_DIC = {
-40522005: "不支持的万得代码",
-40522003: "非法请求",
-40521004: "请求发送失败。无法发送请求,请连接网络",
-40520007: "没有可用数据",
-40521009: "数据解码失败。检查输入参数是否正确,如:日期参数注意大小月月末及短二月",
-40521010: "网络超时",
-40522017: "数据提取量超限",
-40522006: "指标语法错误。请检查代码中的相关指标是否正确,无缺失或重复",
}
# parser
receive_wset_parser = reqparse.RequestParser().add_argument(
'tablename', type=str, required=True, help="数据集名称"
).add_argument(
'options', type=str, help="可选参数"
)
receive_wsd_parser = reqparse.RequestParser().add_argument(
'codes', type=str, required=True, help="数据集名称"
).add_argument(
'fields', type=str, help="指标"
).add_argument(
'beginTime', type=str, help="开始时间"
).add_argument(
'endTime', type=str, help="截止时间"
).add_argument(
'options', type=str, help="可选参数"
)
receive_wsi_parser = reqparse.RequestParser().add_argument(
'codes', type=str, required=True, help="数据集名称"
).add_argument(
'fields', type=str, help="指标"
).add_argument(
'beginTime', type=str, help="开始时间"
).add_argument(
'endTime', type=str, help="截止时间"
).add_argument(
'options', type=str, help="可选参数"
)
receive_wss_parser = reqparse.RequestParser().add_argument(
'codes', type=str, required=True, help="数据集名称"
).add_argument(
'fields', type=str, help="指标"
).add_argument(
'options', type=str, help="可选参数"
)
tdays_offset_parser = reqparse.RequestParser().add_argument(
'offsets', type=str, required=True, help="偏移值"
).add_argument(
'beginTime', type=str, help="基准时间"
).add_argument(
'options', type=str, help="可选参数"
)
tdays_parser = reqparse.RequestParser().add_argument(
'beginTime', type=str, help="开始时间"
).add_argument(
'endTime', type=str, help="结束时间"
).add_argument(
'options', type=str, help="可选参数"
)
receive_wsq_parser = reqparse.RequestParser().add_argument(
'codes', type=str, required=True, help="数据集名称"
).add_argument(
'fields', type=str, help="指标"
).add_argument(
'options', type=str, help="可选参数"
)
receive_wst_parser = reqparse.RequestParser().add_argument(
'codes', type=str, required=True, help="数据集名称"
).add_argument(
'fields', type=str, help="指标"
).add_argument(
'beginTime', type=str, help="开始时间"
).add_argument(
'endTime', type=str, help="截止时间"
).add_argument(
'options', type=str, help="可选参数"
)
receive_edb_parser = reqparse.RequestParser().add_argument(
'codes', type=str, required=True, help="数据集名称"
).add_argument(
'beginTime', type=str, help="开始时间"
).add_argument(
'endTime', type=str, help="截止时间"
).add_argument(
'options', type=str, help="可选参数"
)
def format_2_date_str(dt):
if dt is None:
return None
dt_type = type(dt)
if dt_type == str:
return dt
elif dt_type == date:
if dt > UN_AVAILABLE_DATE:
return dt.strftime(STR_FORMAT_DATE)
else:
return None
elif dt_type == datetime:
if dt > UN_AVAILABLE_DATETIME:
return dt.strftime(STR_FORMAT_DATE)
else:
return None
else:
return dt
def format_2_datetime_str(dt):
if dt is None:
return None
dt_type = type(dt)
if dt_type == str:
return dt
elif dt_type == date:
if dt > UN_AVAILABLE_DATE:
return dt.strftime(STR_FORMAT_DATE)
else:
return None
elif dt_type == datetime:
if dt > UN_AVAILABLE_DATETIME:
return dt.strftime(STR_FORMAT_DATETIME_WIND)
else:
return None
else:
return dt
@rec.route('/wset/')
class ReceiveWSET(Resource):
@rec.expect(receive_wset_parser)
def post(self):
"""
json str:{"tablename": "sectorconstituent", "options": "date=2017-03-21;sectorid=1000023121000000"}
:return: 返回万得返回数据dict
"""
args = receive_wset_parser.parse_args()
logger.info('/wset/ args:%s' % args)
# print('args:%s' % args)
# table_name = args['table_name']
# options = args['options']
if args['options'] == "":
args['options'] = None
if not w.isconnected():
w.start()
ret_data = None
for nth in range(2):
ret_data = w.wset(**args)
error_code = ret_data.ErrorCode
if error_code != 0:
if nth == 0 and error_code == -40521010:
w.start()
logger.warning('尝试重新登陆成功,再次调用函数')
continue
msg = ERROR_CODE_MSG_DIC.setdefault(error_code, "")
logger.error('wset(%s) ErrorCode=%d %s' % (args, error_code, msg))
raise RequestError(msg, None, error_code)
else:
break
else:
if ret_data is None:
msg = 'wst(%s) ret_data is None' % args
logger.error(msg)
raise RequestError(msg, None, 0)
data_count = len(ret_data.Data)
# if data_count > 0:
# print('ret_data.Fields\n', ret_data.Fields)
# ret_data.Data[0] = [format_2_date_str(dt) for dt in ret_data.Data[0]]
# print('ret_data.Data\n', ret_data.Data)
for n_data in range(data_count):
data = ret_data.Data[n_data]
data_len2 = len(data)
if data_len2 > 0:
# 取出第一个部位None的数据
item_check = None
for item_check in data:
if item_check is not None:
break
# 进行类型检查,如果发现是 datetime, date 类型之一,则进行类型转换
if item_check is not None and type(item_check) in (datetime, date):
ret_data.Data[n_data] = [format_2_date_str(dt) for dt in data]
logger.info('%d column["%s"] date to str', n_data, ret_data.Fields[n_data])
ret_df = pd.DataFrame(ret_data.Data, index=ret_data.Fields, columns=ret_data.Codes)
# print('ret_df\n', ret_df)
ret_dic = ret_df.to_dict()
# print('ret_dic:\n', ret_dic)
return ret_dic
@rec.route('/wsd/')
class ReceiveWSD(Resource):
@rec.expect(receive_wsd_parser)
def post(self):
"""
json str:{"codes": "603555.SH", "fields": "close,pct_chg",
"begin_time": "2017-01-04", "end_time": "2017-02-28", "options": "PriceAdj=F"}
:return: 返回万得返回数据dict
"""
args = receive_wsd_parser.parse_args()
# print(request.json)
logger.info('/wsd/ args:%s' % args)
# codes = args['codes']
# fields = args['fields']
# begin_time = args['begin_time']
# end_time = args['end_time']
# options = args['options']
if args['options'] == "":
args['options'] = None
if not w.isconnected():
w.start()
ret_data = None
for nth in range(2):
ret_data = w.wsd(**args)
error_code = ret_data.ErrorCode
if error_code != 0:
if nth == 0 and error_code == -40521010:
w.start()
logger.warning('尝试重新登陆成功,再次调用函数')
continue
msg = ERROR_CODE_MSG_DIC.setdefault(error_code, "")
logger.error('wsd(%s) ErrorCode=%d %s' % (args, error_code, msg))
raise RequestError(msg, None, error_code)
else:
break
else:
if ret_data is None:
msg = 'wst(%s) ret_data is None' % args
logger.error(msg)
raise RequestError(msg, None, 0)
# 将 Data数据中所有 datetime date 类型的数据转换为 string
data_len = len(ret_data.Data)
for n_data in range(data_len):
data = ret_data.Data[n_data]
data_len2 = len(data)
if data_len2 > 0:
# 取出第一个部位None的数据
item_check = None
for item_check in data:
if item_check is not None:
break
# 进行类型检查,如果发现是 datetime, date 类型之一,则进行类型转换
if item_check is not None and type(item_check) in (datetime, date):
ret_data.Data[n_data] = [format_2_date_str(dt) for dt in data]
logger.info('%d column["%s"] date to str', n_data, ret_data.Fields[n_data])
# 组成 DataFrame
if len(ret_data.Codes) == 1:
ret_df = pd.DataFrame(ret_data.Data, index=ret_data.Fields,
columns=[format_2_date_str(dt) for dt in ret_data.Times])
elif len(ret_data.Times) == 1:
ret_df = pd.DataFrame(ret_data.Data, index=ret_data.Fields,
columns=ret_data.Codes)
else:
ret_df = pd.DataFrame(ret_data.Data, index=ret_data.Codes,
columns=[format_2_date_str(dt) for dt in ret_data.Times])
# print(ret_df)
ret_dic = ret_df.to_dict()
# print('ret_dic:\n', ret_dic)
return ret_dic
@rec.route('/wsi/')
class ReceiveWSI(Resource):
@rec.expect(receive_wsi_parser)
def post(self):
"""
json str:{"codes": "RU1801.SHF", "fields": "open,high,low,close,volume,amt,oi",
"begin_time": "2017-12-11 09:00:00", "end_time": "2017-12-11 10:27:41", "options": ""}
:return: 返回万得返回数据dict
"""
args = receive_wsi_parser.parse_args()
# print(request.json)
logger.info('/wsi/ args:%s' % args)
# codes = args['codes']
# fields = args['fields']
# begin_time = args['begin_time']
# end_time = args['end_time']
# options = args['options']
if args['options'] == "":
args['options'] = None
if not w.isconnected():
w.start()
ret_data = None
for nth in range(2):
ret_data = w.wsi(**args)
error_code = ret_data.ErrorCode
if error_code != 0:
if nth == 0 and error_code == -40521010:
w.start()
logger.warning('尝试重新登陆成功,再次调用函数')
continue
msg = ERROR_CODE_MSG_DIC.setdefault(error_code, "")
logger.error('wsi(%s) ErrorCode=%d %s' % (
args, error_code, msg))
raise RequestError(msg, None, error_code)
else:
break
else:
if ret_data is None:
msg = 'wst(%s) ret_data is None' % args
logger.error(msg)
raise RequestError(msg, None, 0)
# 将 Data数据中所有 datetime date 类型的数据转换为 string
data_len = len(ret_data.Data)
for n_data in range(data_len):
data = ret_data.Data[n_data]
data_len2 = len(data)
if data_len2 > 0:
# 取出第一个部位None的数据
item_check = None
for item_check in data:
if item_check is not None:
break
# 进行类型检查,如果发现是 datetime, date 类型之一,则进行类型转换
if item_check is not None and type(item_check) in (datetime, date):
ret_data.Data[n_data] = [format_2_datetime_str(dt) for dt in data]
logger.info('%d column["%s"] date to str', n_data, ret_data.Fields[n_data])
# 组成 DataFrame
ret_df = pd.DataFrame(ret_data.Data, index=ret_data.Fields,
columns=[format_2_datetime_str(dt) for dt in ret_data.Times])
# print(ret_df)
ret_dic = ret_df.to_dict()
# print('ret_dic:\n', ret_dic)
return ret_dic
@rec.route('/wss/')
class ReceiveWSS(Resource):
@rec.expect(receive_wss_parser)
def post(self):
"""
json str:{"codes": "XT1522613.XT",
"fields": "fund_setupdate,fund_maturitydate,fund_mgrcomp,fund_existingyear,fund_fundmanager", "options": ""}
:return: 返回万得返回数据dict
"""
args = receive_wss_parser.parse_args()
logger.info('/wss/ args:%s', args)
# codes = args['codes']
# fields = args['fields']
# options = args['options']
if args['options'] == "":
args['options'] = None
if not w.isconnected():
w.start()
ret_data = None
for nth in range(2):
ret_data = w.wss(**args)
error_code = ret_data.ErrorCode
if error_code != 0:
if nth == 0 and error_code == -40521010:
w.start()
logger.warning('尝试重新登陆成功,再次调用函数')
continue
msg = ERROR_CODE_MSG_DIC.setdefault(error_code, "")
logger.error('wss(%s) ErrorCode=%d %s' % (args, error_code, msg))
raise RequestError(msg, None, error_code)
else:
break
else:
if ret_data is None:
msg = 'wst(%s) ret_data is None' % args
logger.error(msg)
raise RequestError(msg, None, 0)
# 将 Data数据中所有 datetime date 类型的数据转换为 string
data_len = len(ret_data.Data)
logger.debug('ret_data.Data len:%d', data_len)
logger.debug('ret_data.Codes : %s', ret_data.Codes)
for n_data in range(data_len):
data = ret_data.Data[n_data]
data_len2 = len(data)
if data_len2 > 0:
if type(data[0]) in (datetime, date):
ret_data.Data[n_data] = [format_2_date_str(dt) for dt in data]
logger.info('%d column["%s"] date to str', n_data, ret_data.Fields[n_data])
# print('ret_data.Data:\n', ret_data.Data)
# 组成 DataFrame
ret_df = pd.DataFrame(ret_data.Data, index=ret_data.Fields, columns=ret_data.Codes)
ret_dic = ret_df.to_dict()
# print('ret_dic:\n', ret_dic)
return ret_dic
@rec.route('/tdaysoffset/')
class ReceiveTdaysoffset(Resource):
@rec.expect(tdays_offset_parser)
def post(self):
"""
json str:{"offset": "1", "begin_time": "2017-3-31", "options": ""}
:return: 返回万得返回数据dict
"""
args = tdays_offset_parser.parse_args()
logger.info('/tdaysoffset/ args:%s', args)
# offset = int(args['offset'])
# begin_time = args['begin_time']
# options = args['options']
if args['options'] == "":
args['options'] = None
if not w.isconnected():
w.start()
ret_data = None
for nth in range(2):
ret_data = w.tdaysoffset(**args)
error_code = ret_data.ErrorCode
if error_code != 0:
if nth == 0 and error_code == -40521010:
w.start()
logger.warning('尝试重新登陆成功,再次调用函数')
continue
msg = ERROR_CODE_MSG_DIC.setdefault(error_code, "")
logger.error(
'tdaysoffset("%s") ErrorCode=%d %s' % (args, error_code, msg))
raise RequestError(msg, None, error_code)
else:
break
else:
if ret_data is None:
msg = 'wst(%s) ret_data is None' % args
logger.error(msg)
raise RequestError(msg, None, 0)
# 将 Data数据中所有 datetime date 类型的数据转换为 string
if len(ret_data.Data) > 0 and len(ret_data.Data[0]) > 0:
date_str = format_2_date_str(ret_data.Data[0][0])
else:
logger.warning('tdaysoffset(%s) No value return' % args)
date_str = ''
ret_dic = {'Date': date_str}
# print('offset:\n', ret_dic)
return ret_dic
@rec.route('/tdays/')
class ReceiveTdays(Resource):
@rec.expect(tdays_parser)
def post(self):
"""
json str:{"begin_time": "2017-3-31", "end_time": "2017-3-31", "options": ""}
:return: 返回万得返回数据dict
"""
args = tdays_parser.parse_args()
logger.info('/tdays/ args:%s', args)
# begin_time = args['begin_time']
# end_time = args['end_time']
# options = args['options']
if args['options'] == "":
args['options'] = None
if not w.isconnected():
w.start()
ret_data = None
for nth in range(2):
ret_data = w.tdays(**args)
error_code = ret_data.ErrorCode
if error_code != 0:
if nth == 0 and error_code == -40521010:
w.start()
logger.warning('尝试重新登陆成功,再次调用函数')
continue
msg = ERROR_CODE_MSG_DIC.setdefault(error_code, "")
logger.error('tdays(%s) ErrorCode=%d %s' % (args, error_code, msg))
raise RequestError(msg, None, error_code)
else:
break
else:
if ret_data is None:
msg = 'wst(%s) ret_data is None' % args
logger.error(msg)
raise RequestError(msg, None, 0)
# 将 Data数据中所有 datetime date 类型的数据转换为 string
if len(ret_data.Data) > 0 and len(ret_data.Data[0]) > 0:
# date_str = format_datetime_to_str(ret_data.Data[0][0])
# ret_df = pd.DataFrame({'date': [format_datetime_to_str(d) for d in ret_data.Data[0]]})
# ret_df.index = [str(idx) for idx in ret_df.index]
# ret_dic = {'date': [format_datetime_to_str(d) for d in ret_data.Data[0]]}
ret_dic = [format_2_date_str(d) for d in ret_data.Data[0]]
else:
logger.warning('tdays(%s) No value return' % args)
ret_dic = []
# ret_dic = ret_df.to_dict()
# print('tdays:\n', ret_dic)
return ret_dic
@rec.route('/wsq/')
class ReceiveWSQ(Resource):
@rec.expect(receive_wsq_parser)
def post(self):
"""
json str:{"codes": "600008.SH,600010.SH,600017.SH", "fields": "rt_open,rt_low_limit", "options": ""}
:return: 返回万得返回数据dict
"""
args = receive_wsq_parser.parse_args()
logger.info('/wsq/ args:%s', args)
# codes = args['codes']
# fields = args['fields']
# options = args['options']
if args['options'] == "":
args['options'] = None
if not w.isconnected():
w.start()
ret_data = None
for nth in range(2):
ret_data = w.wsq(**args)
error_code = ret_data.ErrorCode
if error_code != 0:
if nth == 0 and error_code == -40521010:
w.start()
logger.warning('尝试重新登陆成功,再次调用函数')
continue
msg = ERROR_CODE_MSG_DIC.setdefault(error_code, "")
logger.error('wsq(%s) ErrorCode=%d %s' % args)
raise RequestError(msg, None, error_code)
else:
break
else:
if ret_data is None:
msg = 'wst(%s) ret_data is None' % args
logger.error(msg)
raise RequestError(msg, None, 0)
# 将 Data数据中所有 datetime date 类型的数据转换为 string
data_len = len(ret_data.Data)
logger.debug('ret_data.Data len:%d', data_len)
logger.debug('ret_data.Codes : %s', ret_data.Codes)
for n_data in range(data_len):
data = ret_data.Data[n_data]
data_len2 = len(data)
if data_len2 > 0:
if type(data[0]) in (datetime, date):
ret_data.Data[n_data] = [format_2_date_str(dt) for dt in data]
logger.info('%d column["%s"] date to str', n_data, ret_data.Fields[n_data])
# print('ret_data.Data:\n', ret_data.Data)
# 组成 DataFrame
ret_df = pd.DataFrame(ret_data.Data, index=ret_data.Fields, columns=ret_data.Codes)
ret_dic = ret_df.to_dict()
# print('ret_dic:\n', ret_dic)
return ret_dic
@rec.route('/wst/')
class ReceiveWST(Resource):
@rec.expect(receive_wst_parser)
def post(self):
"""
json str:{"codes": "600008.SH, "fields": "ask1,bid1,asize1,bsize1,volume,amt,pre_close,open,high,low,last",
"begin_time": "2017-01-04", "end_time": "2017-02-28", "options": ""}
:return: 返回万得返回数据dict
"""
args = receive_wst_parser.parse_args()
logger.info('/wst/ args:%s', args)
# codes = args['codes']
# fields = args['fields']
# begin_time = args['begin_time']
# end_time = args['end_time']
# options = args['options']
if args['options'] == "":
args['options'] = None
if not w.isconnected():
w.start()
ret_data = None
for nth in range(2):
ret_data = w.wst(**args)
error_code = ret_data.ErrorCode
if error_code != 0:
if nth == 0 and error_code == -40521010:
w.start()
logger.warning('尝试重新登陆成功,再次调用函数')
continue
msg = ERROR_CODE_MSG_DIC.setdefault(error_code, "")
logger.error('wst(%s) ErrorCode=%d %s' % (args, error_code, msg))
raise RequestError(msg, None, error_code)
else:
break
else:
if ret_data is None:
msg = 'wst(%s) ret_data is None' % args
logger.error(msg)
raise RequestError(msg, None, 0)
# 将 Data数据中所有 datetime date 类型的数据转换为 string
data_len = len(ret_data.Data)
for n_data in range(data_len):
data = ret_data.Data[n_data]
data_len2 = len(data)
if data_len2 > 0:
# 取出第一个部位None的数据
item_check = None
for item_check in data:
if item_check is not None:
break
# 进行类型检查,如果发现是 datetime, date 类型之一,则进行类型转换
if item_check is not None and type(item_check) in (datetime, date):
ret_data.Data[n_data] = [format_2_datetime_str(dt) for dt in data]
logger.info('%d column["%s"] date to str', n_data, ret_data.Fields[n_data])
# 组成 DataFrame
ret_df = pd.DataFrame(ret_data.Data, index=ret_data.Fields,
columns=[format_2_datetime_str(dt) for dt in ret_data.Times])
# print(ret_df)
ret_dic = ret_df.to_dict()
# print('ret_dic:\n', ret_dic)
return ret_dic
@rec.route('/edb/')
class ReceiveEDB(Resource):
@rec.expect(receive_edb_parser)
def post(self):
"""
json str:{"codes": "M0017126,M0017127,M0017128",
"begin_time": "2016-11-10", "end_time": "2017-11-10", "options": "Fill=Previous"}
:return: 返回万得返回数据dict
"""
args = receive_edb_parser.parse_args()
logger.info('/edb/ args:%s', args)
codes = args['codes']
# begin_time = args['begin_time']
# end_time = args['end_time']
# options = args['options']
if args['options'] == "":
args['options'] = None
if not w.isconnected():
w.start()
ret_data = None
for nth in range(2):
ret_data = w.edb(**args)
error_code = ret_data.ErrorCode
if error_code != 0:
if nth == 0 and error_code == -40521010:
w.start()
logger.warning('尝试重新登陆成功,再次调用函数')
continue
msg = ERROR_CODE_MSG_DIC.setdefault(error_code, "")
logger.error('wst(%s) ErrorCode=%d %s' % (args, error_code, msg))
raise RequestError(msg, None, error_code)
else:
break
else:
if ret_data is None:
msg = 'wst(%s) ret_data is None' % args
logger.error(msg)
raise RequestError(msg, None, 0)
# 将 Data数据中所有 datetime date 类型的数据转换为 string
data_len = len(ret_data.Data)
for n_data in range(data_len):
data = ret_data.Data[n_data]
data_len2 = len(data)
if data_len2 > 0:
# 取出第一个部位None的数据
item_check = None
for item_check in data:
if item_check is not None:
break
# 进行类型检查,如果发现是 datetime, date 类型之一,则进行类型转换
if item_check is not None and type(item_check) in (datetime, date):
ret_data.Data[n_data] = [format_2_date_str(dt) for dt in data]
logger.info('%d column["%s"] date to str', n_data, ret_data.Fields[n_data])
# 组成 DataFrame
ret_df = pd.DataFrame(ret_data.Data, index=[xx.strip() for xx in codes.split(',')],
columns=[format_2_date_str(dt) for dt in ret_data.Times])
# print(ret_df)
ret_dic = ret_df.to_dict()
# print('ret_dic:\n', ret_dic)
return ret_dic | PypiClean |
/FreeTAKServer_UI-1.9.9.2-py3-none-any.whl/FreeTAKServer_UI-1.9.9.2.dist-info/LICENSE.md | MIT License
Copyright (c) 2019 [AppSeed App Generator](https://appseed.us)
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
| PypiClean |
/Montreal-Forced-Aligner-3.0.0a3.tar.gz/Montreal-Forced-Aligner-3.0.0a3/montreal_forced_aligner/helper.py | from __future__ import annotations
import collections
import functools
import itertools
import json
import logging
import re
import typing
from contextlib import contextmanager
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Type
import dataclassy
import numpy
import yaml
from Bio import pairwise2
from rich.console import Console
from rich.logging import RichHandler
from rich.theme import Theme
if TYPE_CHECKING:
from montreal_forced_aligner.abc import MetaDict
from montreal_forced_aligner.data import CtmInterval
__all__ = [
"comma_join",
"make_safe",
"make_scp_safe",
"load_scp",
"load_scp_safe",
"score_wer",
"score_g2p",
"edit_distance",
"output_mapping",
"parse_old_features",
"compare_labels",
"overlap_scoring",
"make_re_character_set_safe",
"align_phones",
"split_phone_position",
"align_pronunciations",
"configure_logger",
"mfa_open",
"load_configuration",
"format_correction",
"format_probability",
]
console = Console(
theme=Theme(
{
"logging.level.debug": "cyan",
"logging.level.info": "green",
"logging.level.warning": "yellow",
"logging.level.error": "red",
}
),
stderr=True,
)
@contextmanager
def mfa_open(path, mode="r", encoding="utf8", newline=""):
if "r" in mode:
if "b" in mode:
file = open(path, mode)
else:
file = open(path, mode, encoding=encoding)
else:
if "b" in mode:
file = open(path, mode)
else:
file = open(path, mode, encoding=encoding, newline=newline)
try:
yield file
finally:
file.close()
def load_configuration(config_path: typing.Union[str, Path]) -> Dict[str, Any]:
"""
Load a configuration file
Parameters
----------
config_path: :class:`~pathlib.Path`
Path to yaml or json configuration file
Returns
-------
dict[str, Any]
Configuration dictionary
"""
data = {}
if not isinstance(config_path, Path):
config_path = Path(config_path)
with mfa_open(config_path, "r") as f:
if config_path.suffix == ".yaml":
data = yaml.load(f, Loader=yaml.Loader)
elif config_path.suffix == ".json":
data = json.load(f)
if not data:
return {}
return data
def split_phone_position(phone_label: str) -> List[str]:
"""
Splits a phone label into its original phone and it's positional label
Parameters
----------
phone_label: str
Phone label
Returns
-------
List[str]
Phone and position
"""
phone = phone_label
pos = None
try:
phone, pos = phone_label.rsplit("_", maxsplit=1)
except ValueError:
pass
return phone, pos
def parse_old_features(config: MetaDict) -> MetaDict:
"""
Backwards compatibility function to parse old feature configuration blocks
Parameters
----------
config: dict[str, Any]
Configuration parameters
Returns
-------
dict[str, Any]
Up to date versions of feature blocks
"""
feature_key_remapping = {
"type": "feature_type",
"deltas": "uses_deltas",
}
skip_keys = ["lda", "fmllr"]
if "features" in config:
for key in skip_keys:
if key in config["features"]:
del config["features"][key]
for key, new_key in feature_key_remapping.items():
if key in config["features"]:
config["features"][new_key] = config["features"][key]
del config["features"][key]
else:
for key in skip_keys:
if key in config:
del config[key]
for key, new_key in feature_key_remapping.items():
if key in config:
config[new_key] = config[key]
del config[key]
return config
def configure_logger(identifier: str, log_file: Optional[Path] = None) -> None:
"""
Configure logging for the given identifier
Parameters
----------
identifier: str
Logger identifier
log_file: str
Path to file to write all messages to
"""
from montreal_forced_aligner.config import MfaConfiguration
config = MfaConfiguration()
logger = logging.getLogger(identifier)
logger.setLevel(logging.DEBUG)
if log_file is not None:
file_handler = logging.FileHandler(log_file, encoding="utf8")
file_handler.setLevel(logging.DEBUG)
formatter = logging.Formatter("%(asctime)s - %(name)s - %(levelname)s - %(message)s")
file_handler.setFormatter(formatter)
logger.addHandler(file_handler)
if not config.current_profile.quiet:
handler = RichHandler(
rich_tracebacks=True, log_time_format="", console=console, show_path=False
)
if config.current_profile.verbose:
handler.setLevel(logging.DEBUG)
else:
handler.setLevel(logging.INFO)
handler.setFormatter(logging.Formatter("%(message)s"))
logger.addHandler(handler)
def comma_join(sequence: List[Any]) -> str:
"""
Helper function to combine a list into a human-readable expression with commas and a
final "and" separator
Parameters
----------
sequence: list[Any]
Items to join together into a list
Returns
-------
str
Joined items in list format
"""
if len(sequence) < 3:
return " and ".join(sequence)
return f"{', '.join(sequence[:-1])}, and {sequence[-1]}"
def make_re_character_set_safe(
characters: typing.Collection[str], extra_strings: Optional[List[str]] = None
) -> str:
"""
Construct a character set string for use in regex, escaping necessary characters and
moving "-" to the initial position
Parameters
----------
characters: Collection[str]
Characters to compile
extra_strings: list[str], optional
Optional other strings to put in the character class
Returns
-------
str
Character set specifier for re functions
"""
characters = sorted(characters)
extra = ""
if "-" in characters:
extra = "-"
characters = [x for x in characters if x != "-"]
if extra_strings:
extra += "".join(extra_strings)
return f"[{extra}{re.escape(''.join(characters))}]"
def make_safe(element: Any) -> str:
"""
Helper function to make an element a string
Parameters
----------
element: Any
Element to recursively turn into a string
Returns
-------
str
All elements combined into a string
"""
if isinstance(element, list):
return " ".join(map(make_safe, element))
return str(element)
def make_scp_safe(string: str) -> str:
"""
Helper function to make a string safe for saving in Kaldi scp files. They use space as a delimiter, so
any spaces in the string will be converted to "_MFASPACE_" to preserve them
Parameters
----------
string: str
Text to escape
Returns
-------
str
Escaped text
"""
return str(string).replace(" ", "_MFASPACE_")
def load_scp_safe(string: str) -> str:
"""
Helper function to load previously made safe text. All instances of "_MFASPACE_" will be converted to a
regular space character
Parameters
----------
string: str
String to convert
Returns
-------
str
Converted string
"""
return string.replace("_MFASPACE_", " ")
def output_mapping(mapping: Dict[str, Any], path: Path, skip_safe: bool = False) -> None:
"""
Helper function to save mapping information (i.e., utt2spk) in Kaldi scp format
CorpusMappingType is either a dictionary of key to value for
one-to-one mapping case and a dictionary of key to list of values for one-to-many case.
Parameters
----------
mapping: dict[str, Any]
Mapping to output
path: :class:`~pathlib.Path`
Path to save mapping
skip_safe: bool, optional
Flag for whether to skip over making a string safe
"""
if not mapping:
return
with mfa_open(path, "w") as f:
for k in sorted(mapping.keys()):
v = mapping[k]
if isinstance(v, (list, set, tuple)):
v = " ".join(map(str, v))
elif not skip_safe:
v = make_scp_safe(v)
f.write(f"{make_scp_safe(k)} {v}\n")
def load_scp(path: Path, data_type: Optional[Type] = str) -> Dict[str, Any]:
"""
Load a Kaldi script file (.scp)
Scp files in Kaldi can either be one-to-one or one-to-many, with the first element separated by
whitespace as the key and the remaining whitespace-delimited elements the values.
Returns a dictionary of key to value for
one-to-one mapping case and a dictionary of key to list of values for one-to-many case.
See Also
--------
:kaldi_docs:`io#io_sec_scp_details`
For more information on the SCP format
Parameters
----------
path : :class:`~pathlib.Path`
Path to Kaldi script file
data_type : type
Type to coerce the data to
Returns
-------
dict[str, Any]
Dictionary where the keys are the first column and the values are all
other columns in the scp file
"""
scp = {}
with mfa_open(path, "r") as f:
for line in f:
line = line.strip()
if line == "":
continue
line_list = line.split()
key = load_scp_safe(line_list.pop(0))
if len(line_list) == 1:
value = data_type(line_list[0])
if isinstance(value, str):
value = load_scp_safe(value)
else:
value = [data_type(x) for x in line_list if x not in ["[", "]"]]
scp[key] = value
return scp
def edit_distance(x: List[str], y: List[str]) -> int:
"""
Compute edit distance between two sets of labels
See Also
--------
`https://gist.github.com/kylebgorman/8034009 <https://gist.github.com/kylebgorman/8034009>`_
For a more expressive version of this function
Parameters
----------
x: list[str]
First sequence to compare
y: list[str]
Second sequence to compare
Returns
-------
int
Edit distance
"""
idim = len(x) + 1
jdim = len(y) + 1
table = numpy.zeros((idim, jdim), dtype=numpy.uint8)
table[1:, 0] = 1
table[0, 1:] = 1
for i in range(1, idim):
for j in range(1, jdim):
if x[i - 1] == y[j - 1]:
table[i][j] = table[i - 1][j - 1]
else:
c1 = table[i - 1][j]
c2 = table[i][j - 1]
c3 = table[i - 1][j - 1]
table[i][j] = min(c1, c2, c3) + 1
return int(table[-1][-1])
def score_g2p(gold: List[str], hypo: List[str]) -> Tuple[int, int]:
"""
Computes sufficient statistics for LER calculation.
Parameters
----------
gold: WordData
The reference labels
hypo: WordData
The hypothesized labels
Returns
-------
int
Edit distance
int
Length of the gold labels
"""
for h in hypo:
if h in gold:
return 0, len(h)
edits = 100000
best_length = 100000
for (g, h) in itertools.product(gold, hypo):
e = edit_distance(g.split(), h.split())
if e < edits:
edits = e
best_length = len(g)
if not edits:
best_length = len(g)
break
return edits, best_length
def score_wer(gold: List[str], hypo: List[str]) -> Tuple[int, int, int, int]:
"""
Computes word error rate and character error rate for a transcription
Parameters
----------
gold: list[str]
The reference words
hypo: list[str]
The hypothesized words
Returns
-------
int
Word Edit distance
int
Length of the gold words labels
int
Character edit distance
int
Length of the gold characters
"""
word_edits = edit_distance(gold, hypo)
character_gold = list("".join(gold))
character_hypo = list("".join(hypo))
character_edits = edit_distance(character_gold, character_hypo)
return word_edits, len(gold), character_edits, len(character_gold)
def compare_labels(
ref: str, test: str, silence_phone: str, mapping: Optional[Dict[str, str]] = None
) -> int:
"""
Parameters
----------
ref: str
test: str
mapping: Optional[dict[str, str]]
Returns
-------
int
0 if labels match or they're in mapping, 2 otherwise
"""
if ref == test:
return 0
if ref == silence_phone or test == silence_phone:
return 10
if mapping is not None and test in mapping:
if isinstance(mapping[test], str):
if mapping[test] == ref:
return 0
elif ref in mapping[test]:
return 0
ref = ref.lower()
test = test.lower()
if ref == test:
return 0
return 2
def overlap_scoring(
first_element: CtmInterval,
second_element: CtmInterval,
silence_phone: str,
mapping: Optional[Dict[str, str]] = None,
) -> float:
r"""
Method to calculate overlap scoring
.. math::
Score = -(\lvert begin_{1} - begin_{2} \rvert + \lvert end_{1} - end_{2} \rvert + \begin{cases}
0, & if label_{1} = label_{2} \\
2, & otherwise
\end{cases})
See Also
--------
`Blog post <https://memcauliffe.com/update-on-montreal-forced-aligner-performance.html>`_
For a detailed example that using this metric
Parameters
----------
first_element: :class:`~montreal_forced_aligner.data.CtmInterval`
First CTM interval to compare
second_element: :class:`~montreal_forced_aligner.data.CtmInterval`
Second CTM interval
mapping: Optional[dict[str, str]]
Optional mapping of phones to treat as matches even if they have different symbols
Returns
-------
float
Score calculated as the negative sum of the absolute different in begin timestamps, absolute difference in end
timestamps and the label score
"""
begin_diff = abs(first_element.begin - second_element.begin)
end_diff = abs(first_element.end - second_element.end)
label_diff = compare_labels(first_element.label, second_element.label, silence_phone, mapping)
return -1 * (begin_diff + end_diff + label_diff)
class EnhancedJSONEncoder(json.JSONEncoder):
"""JSON serialization"""
def default(self, o: Any) -> Any:
"""Get the dictionary of a dataclass"""
if dataclassy.functions.is_dataclass_instance(o):
return dataclassy.asdict(o)
if isinstance(o, set):
return list(o)
return dataclassy.asdict(o)
def align_pronunciations(
ref_text: typing.List[str],
pronunciations: typing.List[str],
oov_phone: str,
silence_phone: str,
silence_word: str,
word_pronunciations: typing.Dict[str, typing.Set[str]],
):
def score_function(ref: str, pron: typing.List[str]):
if not word_pronunciations:
return 0
if ref in word_pronunciations and pron in word_pronunciations[ref]:
return 0
if pron == oov_phone:
return 0
return -2
alignments = pairwise2.align.globalcs(
ref_text,
pronunciations,
score_function,
-1 if word_pronunciations else -5,
-1 if word_pronunciations else -5,
gap_char=["-"],
one_alignment_only=True,
)
transformed_pronunciations = []
for a in alignments:
for i, sa in enumerate(a.seqA):
sb = a.seqB[i]
if sa == "-" and sb == silence_phone:
sa = silence_word
if "-" in (sa, sb):
continue
transformed_pronunciations.append((sa, sb.split()))
return transformed_pronunciations
def align_phones(
ref: List[CtmInterval],
test: List[CtmInterval],
silence_phone: str,
ignored_phones: typing.Set[str] = None,
custom_mapping: Optional[Dict[str, str]] = None,
debug: bool = False,
) -> Tuple[float, float, Dict[Tuple[str, str], int]]:
"""
Align phones based on how much they overlap and their phone label, with the ability to specify a custom mapping for
different phone labels to be scored as if they're the same phone
Parameters
----------
ref: list[:class:`~montreal_forced_aligner.data.CtmInterval`]
List of CTM intervals as reference
test: list[:class:`~montreal_forced_aligner.data.CtmInterval`]
List of CTM intervals to compare to reference
silence_phone: str
Silence phone (these are ignored in the final calculation)
custom_mapping: dict[str, str], optional
Mapping of phones to treat as matches even if they have different symbols
debug: bool, optional
Flag for logging extra information about alignments
Returns
-------
float
Score based on the average amount of overlap in phone intervals
float
Phone error rate
dict[tuple[str, str], int]
Dictionary of error pairs with their counts
"""
if ignored_phones is None:
ignored_phones = set()
if custom_mapping is None:
score_func = functools.partial(overlap_scoring, silence_phone=silence_phone)
else:
score_func = functools.partial(
overlap_scoring, silence_phone=silence_phone, mapping=custom_mapping
)
alignments = pairwise2.align.globalcs(
ref, test, score_func, -2, -2, gap_char=["-"], one_alignment_only=True
)
overlap_count = 0
overlap_sum = 0
num_insertions = 0
num_deletions = 0
num_substitutions = 0
errors = collections.Counter()
for a in alignments:
for i, sa in enumerate(a.seqA):
sb = a.seqB[i]
if sa == "-":
if sb.label not in ignored_phones:
errors[(sa, sb.label)] += 1
num_insertions += 1
else:
continue
elif sb == "-":
if sa.label not in ignored_phones:
errors[(sa.label, sb)] += 1
num_deletions += 1
else:
continue
else:
if sa.label in ignored_phones:
continue
overlap_sum += (abs(sa.begin - sb.begin) + abs(sa.end - sb.end)) / 2
overlap_count += 1
if compare_labels(sa.label, sb.label, silence_phone, mapping=custom_mapping) > 0:
num_substitutions += 1
errors[(sa.label, sb.label)] += 1
if debug:
import logging
logger = logging.getLogger("mfa")
logger.debug(pairwise2.format_alignment(*alignments[0]))
if overlap_count:
score = overlap_sum / overlap_count
else:
score = None
phone_error_rate = (num_insertions + num_deletions + (2 * num_substitutions)) / len(ref)
return score, phone_error_rate, errors
def format_probability(probability_value: float) -> float:
"""Format a probability to have two decimal places and be between 0.01 and 0.99"""
return min(max(round(probability_value, 2), 0.01), 0.99)
def format_correction(correction_value: float, positive_only=True) -> float:
"""Format a probability correction value to have two decimal places and be greater than 0.01"""
correction_value = round(correction_value, 2)
if correction_value <= 0 and positive_only:
correction_value = 0.01
return correction_value | PypiClean |
/Django_patch-2.2.19-py3-none-any.whl/django/contrib/gis/db/backends/oracle/schema.py | from django.contrib.gis.db.models.fields import GeometryField
from django.db.backends.oracle.schema import DatabaseSchemaEditor
from django.db.backends.utils import strip_quotes, truncate_name
class OracleGISSchemaEditor(DatabaseSchemaEditor):
sql_add_geometry_metadata = ("""
INSERT INTO USER_SDO_GEOM_METADATA
("TABLE_NAME", "COLUMN_NAME", "DIMINFO", "SRID")
VALUES (
%(table)s,
%(column)s,
MDSYS.SDO_DIM_ARRAY(
MDSYS.SDO_DIM_ELEMENT('LONG', %(dim0)s, %(dim2)s, %(tolerance)s),
MDSYS.SDO_DIM_ELEMENT('LAT', %(dim1)s, %(dim3)s, %(tolerance)s)
),
%(srid)s
)""")
sql_add_spatial_index = 'CREATE INDEX %(index)s ON %(table)s(%(column)s) INDEXTYPE IS MDSYS.SPATIAL_INDEX'
sql_drop_spatial_index = 'DROP INDEX %(index)s'
sql_clear_geometry_table_metadata = 'DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s'
sql_clear_geometry_field_metadata = (
'DELETE FROM USER_SDO_GEOM_METADATA WHERE TABLE_NAME = %(table)s '
'AND COLUMN_NAME = %(column)s'
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.geometry_sql = []
def geo_quote_name(self, name):
return self.connection.ops.geo_quote_name(name)
def column_sql(self, model, field, include_default=False):
column_sql = super().column_sql(model, field, include_default)
if isinstance(field, GeometryField):
db_table = model._meta.db_table
self.geometry_sql.append(
self.sql_add_geometry_metadata % {
'table': self.geo_quote_name(db_table),
'column': self.geo_quote_name(field.column),
'dim0': field._extent[0],
'dim1': field._extent[1],
'dim2': field._extent[2],
'dim3': field._extent[3],
'tolerance': field._tolerance,
'srid': field.srid,
}
)
if field.spatial_index:
self.geometry_sql.append(
self.sql_add_spatial_index % {
'index': self.quote_name(self._create_spatial_index_name(model, field)),
'table': self.quote_name(db_table),
'column': self.quote_name(field.column),
}
)
return column_sql
def create_model(self, model):
super().create_model(model)
self.run_geometry_sql()
def delete_model(self, model):
super().delete_model(model)
self.execute(self.sql_clear_geometry_table_metadata % {
'table': self.geo_quote_name(model._meta.db_table),
})
def add_field(self, model, field):
super().add_field(model, field)
self.run_geometry_sql()
def remove_field(self, model, field):
if isinstance(field, GeometryField):
self.execute(self.sql_clear_geometry_field_metadata % {
'table': self.geo_quote_name(model._meta.db_table),
'column': self.geo_quote_name(field.column),
})
if field.spatial_index:
self.execute(self.sql_drop_spatial_index % {
'index': self.quote_name(self._create_spatial_index_name(model, field)),
})
super().remove_field(model, field)
def run_geometry_sql(self):
for sql in self.geometry_sql:
self.execute(sql)
self.geometry_sql = []
def _create_spatial_index_name(self, model, field):
# Oracle doesn't allow object names > 30 characters. Use this scheme
# instead of self._create_index_name() for backwards compatibility.
return truncate_name('%s_%s_id' % (strip_quotes(model._meta.db_table), field.column), 30) | PypiClean |
/OBP_logging_v5-0.1.1.tar.gz/OBP_logging_v5-0.1.1/OBP_logging_v5/rds.py | import logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger()
# checks compliance for rds logging enabled
def rds_logging_enabled(self, **kwargs) -> dict:
"""
:param self:
:return:
"""
logger.info(" ---Inside rds :: rds_logging_enabled()--- ")
self.refresh_session()
result = True
failReason = ''
offenders = []
control_id = 'Id3.77'
compliance_type = 'RDS Logging Enabled'
description = 'Checks that respective logs of Amazon Relational Database Service (Amazon RDS) are enabled'
resource_type = 'RDS'
risk_level = 'Low'
if 'exception' in kwargs.keys() and kwargs['exception']:
return {
'Result': False,
'failReason': kwargs['exception_text'],
'resource_type': resource_type,
'Offenders': offenders,
'Compliance_type': compliance_type,
'Description': description,
'Risk Level': risk_level,
'ControlId': control_id
}
for region, instances in kwargs['rds_lst'].items():
for instance in instances:
try:
if len(instance['EnabledCloudwatchLogsExports']) <= 0:
result = False
failReason = "RDS logging is not enabled"
offenders.append(instance['DBInstanceIdentifier'])
except KeyError:
result = False
failReason = "RDS logging is not enabled"
offenders.append(instance['DBInstanceIdentifier'])
return {
'Result': result,
'failReason': failReason,
'resource_type': resource_type,
'Offenders': offenders,
'Compliance_type': compliance_type,
'Description': description,
'Risk Level': risk_level,
'ControlId': control_id
}
# checks compliance for rds cluster deletion protection enabled
def rds_cluster_deletion_protection_enabled(self, **kwargs) -> dict:
"""
:param self:
:param kwargs:
:return:
"""
logger.info(" ---Inside rds :: rds_cluster_deletion_protection_enabled()--- ")
self.refresh_session()
result = True
failReason = ''
offenders = []
control_id = 'Id5.13'
compliance_type = 'Rds Cluster Deletion Protection Enabled'
description = 'Checks if an Amazon Relational Database Service (Amazon RDS) cluster has deletion protection enabled'
resource_type = 'RDS'
risk_level = 'Medium'
if 'exception' in kwargs.keys() and kwargs['exception']:
return {
'Result': False,
'failReason': kwargs['exception_text'],
'resource_type': resource_type,
'Offenders': offenders,
'Compliance_type': compliance_type,
'Description': description,
'Risk Level': risk_level,
'ControlId': control_id
}
for region, clusters in kwargs['cluster_list'].items():
for cluster in clusters:
if not cluster['DeletionProtection']:
result = False
failReason = "Deletion protection is not enabled"
offenders.append(cluster['DBClusterIdentifier'])
return {
'Result': result,
'failReason': failReason,
'resource_type': resource_type,
'Offenders': offenders,
'Compliance_type': compliance_type,
'Description': description,
'Risk Level': risk_level,
'ControlId': control_id
}
# checks compliance for rds cluster multi az enabled
def rds_cluster_multi_az_enabled(self, **kwargs) -> dict:
"""
:param self:
:param kwargs:
:return:
"""
logger.info(" ---Inside rds :: rds_cluster_multi_az_enabled()--- ")
self.refresh_session()
result = True
failReason = ''
offenders = []
control_id = 'Id5.14'
compliance_type = 'Rds Cluster Multi Az Enabled'
description = 'Checks if an Amazon Relational Database Service (Amazon RDS) cluster has Multi-AZ replication ' \
'enabled'
resource_type = 'RDS'
risk_level = 'Medium'
if 'exception' in kwargs.keys() and kwargs['exception']:
return {
'Result': False,
'failReason': kwargs['exception_text'],
'resource_type': resource_type,
'Offenders': offenders,
'Compliance_type': compliance_type,
'Description': description,
'Risk Level': risk_level,
'ControlId': control_id
}
for region, clusters in kwargs['cluster_list'].items():
for cluster in clusters:
if not cluster['MultiAZ']:
result = False
failReason = "MultiAZ is not enabled"
offenders.append(cluster['DBClusterIdentifier'])
return {
'Result': result,
'failReason': failReason,
'resource_type': resource_type,
'Offenders': offenders,
'Compliance_type': compliance_type,
'Description': description,
'Risk Level': risk_level,
'ControlId': control_id
}
# checks compliance for RdsInstanceIamAuthenticationEnabled
def rds_instance_iam_authentication_enabled(self, **kwargs) -> dict:
"""
:param self:
:return:
"""
logger.info(" ---Inside rds :: rds_instance_iam_authentication_enabled()--- ")
self.refresh_session()
result = True
failReason = ''
offenders = []
control_id = 'Id5.15'
compliance_type = 'Rds Instance Iam Authentication Enabled'
description = "Checks if an Amazon Relational Database Service (Amazon RDS) instance has AWS Identity and Access " \
"Management (IAM) authentication enabled. The DB Engine should be one of 'mysql', 'postgres', " \
"'aurora', 'aurora-mysql', or 'aurora-postgresql'. The DB instance status should be one of " \
"'available', 'backing-up', 'storage-optimization', or 'storage-full'"
resource_type = 'RDS'
risk_level = 'Medium'
if 'exception' in kwargs.keys() and kwargs['exception']:
return {
'Result': False,
'failReason': kwargs['exception_text'],
'resource_type': resource_type,
'Offenders': offenders,
'Compliance_type': compliance_type,
'Description': description,
'Risk Level': risk_level,
'ControlId': control_id
}
for region, instances in kwargs['rds_lst'].items():
for instance in instances:
if instance['Engine'] in ['mysql', 'postgres', 'aurora', 'aurora-mysql', 'aurora-postgresql']:
if instance['DBInstanceStatus'] in ['available', 'backing-up', 'storage-optimization', 'storage-full']:
if not instance['IAMDatabaseAuthenticationEnabled']:
result = False
failReason = 'IAM Database Authentication is not enabled'
offenders.append(instance['DBInstanceIdentifier'])
return {
'Result': result,
'failReason': failReason,
'resource_type': resource_type,
'Offenders': offenders,
'Compliance_type': compliance_type,
'Description': description,
'Risk Level': risk_level,
'ControlId': control_id
} | PypiClean |
/DeepPhysX-22.6.tar.gz/DeepPhysX-22.6/src/Manager/NetworkManager.py | from typing import Any, Dict, Tuple, Optional
from os import listdir
from os.path import join as osPathJoin
from os.path import isdir, isfile
from numpy import copy, array, ndarray
from DeepPhysX.Core.Network.BaseNetworkConfig import BaseNetworkConfig
from DeepPhysX.Core.Utils.pathUtils import copy_dir, create_dir, get_first_caller
class NetworkManager:
"""
Deals with all the interactions with the neural network. Predictions, saves, initialisation, loading,
back-propagation, etc...
:param Optional[BaseNetworkConfig] network_config: Specialisation containing the parameters of the network manager
:param Manager manager: Manager that handle the network manager
:param str session_name: Name of the newly created directory if session_dir is not defined
:param Optional[str] session_dir: Name of the directory in which to write all the necessary data
:param bool new_session: Define the creation of new directories to store data
:param bool train: If True prediction will cause tensors gradient creation
"""
def __init__(self,
network_config: Optional[BaseNetworkConfig] = None,
manager: Optional[Any] = None,
session_name: str = 'default',
session_dir: Optional[str] = None,
new_session: bool = True,
train: bool = True):
self.name: str = self.__class__.__name__
# Check network_config type
if not isinstance(network_config, BaseNetworkConfig):
raise TypeError(f"[{self.name}] Wrong 'network_config' type: BaseNetworkConfig required, "
f"get {type(network_config)}")
# Check session_name type
if type(session_name) != str:
raise TypeError(f"[{self.name}] Wrong 'session_name' type: str required, get {type(session_name)}")
# Check session_dir type and existence
if session_dir is not None:
if type(session_dir) != str:
raise TypeError(f"[{self.name}] Wrong 'session_dir' type: str required, get {type(session_dir)}")
if not isdir(session_dir):
raise ValueError(f"[{self.name}] Given 'session_dir' does not exists: {session_dir}")
# Check new_session type
if type(new_session) != bool:
raise TypeError(f"[{self.name}] Wrong 'new_session' type: bool required, get {type(new_session)}")
# Check train type
if type(train) != bool:
raise TypeError(f"[{self.name}] Wrong 'train' type: bool required, get {type(train)}")
# Storage management
self.session_dir: str = session_dir if session_dir is not None else osPathJoin(get_first_caller(), session_name)
self.new_session: bool = new_session
self.network_dir: Optional[str] = None
self.network_template_name: str = session_name + '_network_{}'
# Network management
self.manager: Any = manager
if train and not network_config.training_stuff:
raise ValueError(f"[{self.name}] Training requires a loss and an optimizer in your NetworkConfig")
self.training: bool = train
self.save_each_epoch: bool = network_config.save_each_epoch
self.saved_counter: int = 0
# Init network objects: Network, Optimization, DataTransformation
self.network: Any = None
self.optimization: Any = None
self.data_transformation: Any = None
self.network_config: BaseNetworkConfig = network_config
self.set_network()
def get_manager(self) -> Any:
"""
| Return the Manager of the NetworkManager.
:return: Manager that handles the NetworkManager
"""
return self.manager
def set_network(self) -> None:
"""
| Set the network to the corresponding weight from a given file.
"""
# Init network
self.network = self.network_config.create_network()
self.network.set_device()
# Init optimization
self.optimization = self.network_config.create_optimization()
self.optimization.manager = self
if self.optimization.loss_class:
self.optimization.set_loss()
# Init DataTransformation
self.data_transformation = self.network_config.create_data_transformation()
# Training
if self.training:
# Configure as training
self.network.set_train()
self.optimization.set_optimizer(self.network)
# Setting network directory
if self.new_session and self.network_config.network_dir and isdir(self.network_config.network_dir):
self.network_dir = self.network_config.network_dir
self.network_dir = copy_dir(self.network_dir, self.session_dir, dest_dir='network')
self.load_network()
else:
self.network_dir = osPathJoin(self.session_dir, 'network/')
self.network_dir = create_dir(self.network_dir, dir_name='network')
# Prediction
else:
# Configure as prediction
self.network.set_eval()
# Need an existing network
self.network_dir = osPathJoin(self.session_dir, 'network/')
# Load parameters
self.load_network()
def load_network(self) -> None:
"""
| Load an existing set of parameters to the network.
"""
# Get eventual epoch saved networks
networks_list = [osPathJoin(self.network_dir, f) for f in listdir(self.network_dir) if
isfile(osPathJoin(self.network_dir, f)) and f.__contains__('_network_.')]
networks_list = sorted(networks_list)
# Add the final saved network
last_saved_network = [osPathJoin(self.network_dir, f) for f in listdir(self.network_dir) if
isfile(osPathJoin(self.network_dir, f)) and f.__contains__('network.')]
networks_list = networks_list + last_saved_network
which_network = self.network_config.which_network
if len(networks_list) == 0:
print(f"[{self.name}]: There is no network in {self.network_dir}. Shutting down.")
quit(0)
elif len(networks_list) == 1:
which_network = 0
elif len(networks_list) > 1 and which_network is None:
print(f"[{self.name}] There is more than one network in this directory, loading the most trained by "
f"default. If you want to load another network please use the 'which_network' variable.")
which_network = -1
elif which_network > len(networks_list) > 1:
print(f"[{self.name}] The selected network doesn't exist (index is too big), loading the most trained "
f"by default.")
which_network = -1
print(f"[{self.name}]: Loading network from {networks_list[which_network]}.")
self.network.load_parameters(networks_list[which_network])
def compute_prediction_and_loss(self, batch: Dict[str, ndarray],
optimize: bool) -> Tuple[ndarray, Dict[str, float]]:
"""
| Make a prediction with the data passed as argument, optimize or not the network
:param Dict[str, ndarray] batch: Format {'input': numpy.ndarray, 'output': numpy.ndarray}.
Contains the input value and ground truth to compare against
:param bool optimize: If true run a back propagation
:return: The prediction and the associated loss value
"""
# Getting data from the data manager
data_in = self.network.transform_from_numpy(batch['input'], grad=optimize)
data_gt = self.network.transform_from_numpy(batch['output'], grad=optimize)
loss_data = self.network.transform_from_numpy(batch['loss'], grad=False) if 'loss' in batch.keys() else None
# Compute prediction
data_in = self.data_transformation.transform_before_prediction(data_in)
data_out = self.network.predict(data_in)
# Compute loss
data_out, data_gt = self.data_transformation.transform_before_loss(data_out, data_gt)
loss_dict = self.optimization.compute_loss(data_out.reshape(data_gt.shape), data_gt, loss_data)
# Optimizing network if training
if optimize:
self.optimization.optimize()
# Transform prediction to be compatible with environment
data_out = self.data_transformation.transform_before_apply(data_out)
prediction = self.network.transform_to_numpy(data_out)
return prediction, loss_dict
def compute_online_prediction(self, network_input: ndarray) -> ndarray:
"""
| Make a prediction with the data passed as argument.
:param ndarray network_input: Input of the network=
:return: The prediction
"""
# Getting data from the data manager
data_in = self.network.transform_from_numpy(copy(network_input), grad=False)
# Compute prediction
data_in = self.data_transformation.transform_before_prediction(data_in)
pred = self.network.predict(data_in)
pred, _ = self.data_transformation.transform_before_loss(pred)
pred = self.data_transformation.transform_before_apply(pred)
pred = self.network.transform_to_numpy(pred)
return pred.reshape(-1)
def save_network(self, last_save: bool = False) -> None:
"""
| Save the network with the corresponding suffix, so they do not erase the last save.
:param bool last_save: Do not add suffix if it's the last save
"""
# Final session saving
if last_save:
path = self.network_dir + "network"
print(f"[{self.name}] Saving final network at {path}.")
self.network.save_parameters(path)
# Intermediate states saving
elif self.save_each_epoch:
path = self.network_dir + self.network_template_name.format(self.saved_counter)
self.saved_counter += 1
print(f"[{self.name}] Saving intermediate network at {path}.")
self.network.save_parameters(path)
def close(self) -> None:
"""
| Closing procedure.
"""
if self.training:
self.save_network(last_save=True)
del self.network
del self.network_config
def __str__(self) -> str:
"""
:return: String containing information about the BaseNetwork object
"""
description = "\n"
description += f"# {self.__class__.__name__}\n"
description += f" Network Directory: {self.network_dir}\n"
description += f" Save each Epoch: {self.save_each_epoch}\n"
description += f" Managed objects: Network: {self.network.__class__.__name__}\n"
description += f" Optimization: {self.optimization.__class__.__name__}\n"
description += f" Data Transformation: {self.data_transformation.__class__.__name__}\n"
description += str(self.network)
description += str(self.optimization)
description += str(self.data_transformation)
return description | PypiClean |
/IO_Model_Builder-1.1.2-py3-none-any.whl/iomb/validation.py | import iomb.model as model
import iomb.refmap as ref
import iomb.sat as sat
import iomb.ia as ia
import pandas as pd
import logging as log
class ValidationResult(object):
def __init__(self):
self.display_count = 5
self.failed = False
self.errors = []
self.warnings = []
self.information = []
def fail(self, message):
self.errors.insert(0, 'invalid model: ' + message)
self.failed = True
return self
def __str__(self):
t = 'Validation result:\n\n'
c_errors, c_warnings = len(self.errors), len(self.warnings)
if c_errors == 0 and c_warnings == 0:
t += ' no errors or warnings, everything seems to be fine\n\n'
else:
t += ' there are %s errors and %s warnings\n\n' % (c_errors,
c_warnings)
t += self._list_str('errors', self.errors)
t += self._list_str('warnings', self.warnings)
t += self._list_str('information', self.information)
return t
def _repr_html_(self):
""" HTML representation of a validation result for the display in
Jupyter workbooks. """
t = '<div><h1>Validation result</h1>'
c_errors, c_warnings = len(self.errors), len(self.warnings)
if c_errors == 0 and c_warnings == 0:
t += '<p style="color:#2E4172">no errors or warnings, everything ' \
'seems to be fine</p>'
else:
t += '<p style="color:#AA3939">there are %s errors and %s warnings' \
'</p>' % (c_errors, c_warnings)
t += self._list_html('errors', self.errors, '#AA3939')
t += self._list_html('warnings', self.warnings, '#C7C732')
t += self._list_html('information', self.information, '#2E4172')
t += '</div>'
return t
def _list_str(self, title: str, messages: list) -> str:
if len(messages) == 0:
return ''
t = " %s:\n" % title
for i in range(0, len(messages)):
if self.display_count >= 0 and i >= self.display_count:
r = len(messages) - self.display_count
t += ' * %s more\n' % r
break
t += ' * %s\n' % messages[i]
t += '\n'
return t
def _list_html(self, title: str, messages: list, color: str) -> str:
if len(messages) == 0:
return ''
t = '<h3 style="color:%s">%s</h3><ul>' % (color, title)
for i in range(0, len(messages)):
if self.display_count >= 0 and i >= self.display_count:
r = len(messages) - self.display_count
t += '<li style="color:%s">%s more</li>' % (color, r)
break
t += '<li style="color:%s">%s</li>' % (color, messages[i])
t += '</ul>'
return t
def validate(m: model.Model) -> ValidationResult:
log.info('validate model')
vr = ValidationResult()
if not isinstance(m, model.Model):
return vr.fail('not an instance of iomb.model.Model')
_check_field_types(m, vr)
_check_sat_units(m, vr)
_check_sat_compartments(m, vr)
_check_sat_sectors(m, vr)
_check_sector_locations(m, vr)
_check_flow_uids(m, vr)
_check_ia_coverage(m, vr)
_check_duplicate_flow_uids(m, vr)
return vr
def _check_field_types(m: model.Model, vr: ValidationResult):
# field checks: (field value, type, field name, optional)
field_checks = [
(m.drc_matrix, pd.DataFrame, 'drc_matrix', False),
(m.sat_table, sat.Table, 'sat_table', False),
(m.sectors, ref.SectorMap, 'sectors', False),
(m.ia_table, ia.Table, 'ia_table', True),
(m.units, ref.UnitMap, 'units', True),
(m.compartments, ref.CompartmentMap, 'compartments', True),
(m.locations, ref.LocationMap, 'locations', True)
]
for field in field_checks:
value = field[0]
optional = field[3]
if optional and value is None:
continue
if not isinstance(value, field[1]):
vr.fail('field %s is not an instance of %s' % (field[2], field[1]))
break
if m.ia_table is None:
vr.information.append('model without LCIA data')
def _check_sector_locations(m: model.Model, vr: ValidationResult):
unknown_codes = []
for key in m.sectors.mappings.keys():
sector = m.sectors.get(key)
code = sector.location
if code in unknown_codes:
continue
location = m.locations.get(code)
if location is None:
vr.warnings.append('unknown location %s' % code)
unknown_codes.append(code)
if len(unknown_codes) == 0:
vr.information.append('all location codes of sectors are ok')
def _check_ia_coverage(m: model.Model, vr: ValidationResult):
if m.ia_table is None:
return
uncovered_count = 0
for flow in m.sat_table.flows:
covered = False
for category in m.ia_table.categories:
factor = m.ia_table.get_factor(category, flow)
if factor != 0:
covered = True
break
if not covered:
uncovered_count += 1
vr.warnings.append('flow %s is not covered by the LCIA model' %
flow)
if uncovered_count == 0:
vr.information.append('all flows covered by LCIA model')
def _check_sat_units(m: model.Model, vr: ValidationResult):
unknown_units = []
for flow in m.sat_table.flows:
unit_name = flow.unit
if unit_name in unknown_units:
continue
unit = m.units.get(unit_name)
if unit is None:
unknown_units.append(unit_name)
vr.errors.append('Unit %s of flow %s is unknown' % (unit_name,
flow))
if len(unknown_units) == 0:
vr.information.append('all units in satellite table are known')
def _check_sat_compartments(m: model.Model, vr: ValidationResult):
unknown = []
for flow in m.sat_table.flows:
ck = flow.compartment_key
if ck in unknown:
continue
if ck != "/":
c = m.compartments.get(ck)
if c is None:
unknown.append(ck)
vr.errors.append('Compartment %s of flow %s is unknown' % (ck,
flow))
if len(unknown) == 0:
vr.information.append('all compartments in satellite table are known')
def _check_sat_sectors(m: model.Model, vr: ValidationResult):
""" Check that the sectors from the satellite tables match the sectors in
the direct requirements matrix. """
unknown = []
for sector in m.sat_table.sectors:
key = sector.key
if key in unknown:
continue
if key not in m.drc_matrix.index:
unknown.append(key)
vr.errors.append('Sector %s in satellite matrix does not match a'
' sector in the direct requirements matrix' % key)
if len(unknown) == 0:
vr.information.append('all sectors in the satellite matrix match a'
' sector in the direct requirements matrix')
def _check_flow_uids(m: model.Model, vr: ValidationResult):
""" Checks if flows with the same key attributes (name, category, unit,
etc.) have also the same UUIDs in the satellite and LCIA table. """
if m.sat_table is None or m.ia_table is None:
return
errors = False
for sat_flow in m.sat_table.flows:
ia_flow = m.ia_table.get_flow(sat_flow.key)
if ia_flow is None or sat_flow.uid == ia_flow.uid:
continue
errors = True
vr.errors.append('Flow %s has different UUIDs in the satellite and LCIA'
' table (%s <> %s)' % (sat_flow.key, sat_flow.uid,
ia_flow.uid))
if not errors:
vr.information.append('all elementary flows have the same UUIDs in the'
' satellite and LCIA table')
def _check_duplicate_flow_uids(m: model.Model, vr: ValidationResult):
""" Check if different flows have the same UUID in the satellite table """
checks = {}
errors = []
for flow in m.sat_table.flows:
key = checks.get(flow.uid)
if key is None:
checks[flow.uid] = flow.key
elif key != flow.key:
log_it = False
if key not in errors:
errors.append(key)
log_it = True
if flow.key not in errors:
errors.append(flow.key)
log_it = True
if log_it:
vr.errors.append('Flow %s has the same UUID = %s as flow %s' % (flow.key, flow.uid, key))
if len(errors) == 0:
vr.information.append('all flow UUIDs in the satellite table are unique') | PypiClean |
/Joson_yaml_pytest-1.2.7-py3-none-any.whl/Joson_yaml_pytest/extract.py | from requests import Response
import jsonpath
import jmespath
import re
from . import exceptions
def extract_by_object(response: Response, extract_expression: str):
"""
从response 对象属性取值 [status_code, url, ok, headers, cookies, text, json, encoding]
:param response: Response Obj
:param extract_expression: 取值表达式
:return: 返回取值后的结果
"""
if not isinstance(extract_expression, str):
return extract_expression
res = {
"headers": response.headers if response else {},
"cookies": dict(response.cookies if response else {})
}
if extract_expression in ["status_code", "url", "ok", "encoding", "text"]:
return getattr(response, extract_expression)
elif extract_expression.startswith('headers') or extract_expression.startswith('cookies'):
return extract_by_jmespath(res, extract_expression)
elif extract_expression.startswith('body') or extract_expression.startswith('content'):
try:
response_parse_dict = response.json()
return extract_by_jmespath({"body": response_parse_dict}, extract_expression)
except Exception as msg:
raise exceptions.ExtractExpressionError(f'expression:<{extract_expression}>, error: {msg}')
elif extract_expression.startswith('$.'):
try:
response_parse_dict = response.json()
return extract_by_jsonpath(response_parse_dict, extract_expression)
except Exception as msg:
raise exceptions.ExtractExpressionError(f'expression:<{extract_expression}>, error: {msg}')
elif '.+?' in extract_expression or '.*?' in extract_expression:
# 正则匹配
return extract_by_regex(response.text, extract_expression)
else:
# 其它非取值表达式,直接返回
return extract_expression
def extract_by_jsonpath(extract_value: dict, extract_expression: str): # noqa
"""
json path 取值
:param extract_value: response.json()
:param extract_expression: eg: '$.code'
:return: None或 提取的第一个值 或全部
"""
if not isinstance(extract_expression, str):
return extract_expression
extract_value = jsonpath.jsonpath(extract_value, extract_expression)
if not extract_value:
return
elif len(extract_value) == 1:
return extract_value[0]
else:
return extract_value
def extract_by_jmespath(extract_obj: dict, extract_expression: str): # noqa
"""
jmes path 取值
:param extract_obj: {
"body": response.json(),
"cookies": dict(response.cookies),
"headers": response.headers,
}
:param extract_expression: eg: 'body.code'
:return: 未提取到返回None, 提取到返回结果
""" # noqa
if not isinstance(extract_expression, str):
return extract_expression
try:
extract_value = jmespath.search(extract_expression, extract_obj)
return extract_value
except Exception as msg:
raise exceptions.ExtractExpressionError(f'expression:<{extract_expression}>, error: {msg}')
def extract_by_regex(extract_obj: str, extract_expression: str):
"""
正则表达式提取返回结果
:param extract_obj: response.text
:param extract_expression:
:return:
"""
if not isinstance(extract_expression, str):
return extract_expression
extract_value = re.findall(extract_expression, extract_obj, flags=re.S)
if not extract_value:
return ''
elif len(extract_value) == 1:
return extract_value[0]
else:
return extract_value | PypiClean |
/CloeePy-Mongo-0.0.0-rc2.tar.gz/CloeePy-Mongo-0.0.0-rc2/CONTRIBUTING.md | # Contributor's Guide
## Contribution Process
This project is in early alpha, so a more concrete contribution process is still
being developed.
1. Open Github Issue
It's best to create a bug report or feature proposal first before you begin work.
If your contribution request is approved, your PR will be more likey to be accepted.
2. Fork
3. Branch off of master
4. Create pull request against the correct RC branch.
If it's not clear which RC branch to PR against, just ask a maintainer.
## Coding and Documentation Standards
**Style:** [PEP8 Style Guide](https://www.python.org/dev/peps/pep-0008/)
**Docstrings**: [PEP257 Docstring Guide](https://www.python.org/dev/peps/pep-0257)
## Test Coverage
Your code must have significant test coverage in order to be accepted.
| PypiClean |
/AeoLiS-2.1.1.tar.gz/AeoLiS-2.1.1/aeolis/wind.py | from __future__ import absolute_import, division
import numpy as np
import logging
import operator
#import scipy.special
#import scipy.interpolate
from scipy import ndimage, misc
import matplotlib.pyplot as plt
# package modules
import aeolis.shear
from aeolis.utils import *
# initialize logger
logger = logging.getLogger(__name__)
def initialize(s, p):
'''Initialize wind model
'''
# apply wind direction convention
if isarray(p['wind_file']):
if p['wind_convention'] == 'nautical':
#fix issue associated with longshore winds/divide by zero
ifix = p['wind_file'][:, 2] == 0.
p['wind_file'][ifix, 2] = 0.01
elif p['wind_convention'] == 'cartesian':
#fix issue associated with longshore winds/divide by zero
ifix = p['wind_file'][:, 2] == 270.
p['wind_file'][ifix, 2] = 270.01
p['wind_file'][:,2] = 270.0 - p['wind_file'][:,2]
else:
logger.log_and_raise('Unknown convention: %s'
% p['wind_convention'], exc=ValueError)
# initialize wind shear model (z0 according to Duran much smaller)
# Otherwise no Barchan
z0 = calculate_z0(p, s)
if p['process_shear']:
if p['ny'] > 0:
s['shear'] = aeolis.shear.WindShear(s['x'], s['y'], s['zb'],
dx=p['dx'], dy=p['dy'],
L=p['L'], l=p['l'], z0=z0,
buffer_width=p['buffer_width'])
else:
s['shear'] = np.zeros(s['x'].shape)
return s
def interpolate(s, p, t):
'''Interpolate wind velocity and direction to current time step
Interpolates the wind time series for velocity and direction to
the current time step. The cosine and sine of the direction angle
are interpolated separately to prevent zero-crossing errors. The
wind velocity is decomposed in two grid components based on the
orientation of each individual grid cell. In case of a
one-dimensional model only a single positive component is used.
Parameters
----------
s : dict
Spatial grids
p : dict
Model configuration parameters
t : float
Current time
Returns
-------
dict
Spatial grids
'''
if p['process_wind'] and p['wind_file'] is not None:
uw_t = p['wind_file'][:,0]
uw_s = p['wind_file'][:,1]
uw_d = p['wind_file'][:,2] / 180. * np.pi
s['uw'][:,:] = interp_circular(t, uw_t, uw_s)
s['udir'][:,:] = np.arctan2(interp_circular(t, uw_t, np.sin(uw_d)),
interp_circular(t, uw_t, np.cos(uw_d))) * 180. / np.pi
s['uws'] = - s['uw'] * np.sin((-p['alfa'] + s['udir']) / 180. * np.pi) # alfa [deg] is real world grid cell orientation (clockwise)
s['uwn'] = - s['uw'] * np.cos((-p['alfa'] + s['udir']) / 180. * np.pi)
s['uw'] = np.abs(s['uw'])
# Compute wind shear velocity
kappa = p['kappa']
z = p['z']
z0 = calculate_z0(p, s)
s['ustars'] = s['uws'] * kappa / np.log(z/z0)
s['ustarn'] = s['uwn'] * kappa / np.log(z/z0)
s['ustar'] = np.hypot(s['ustars'], s['ustarn'])
s = velocity_stress(s,p)
s['ustar0'] = s['ustar'].copy()
s['ustars0'] = s['ustar'].copy()
s['ustarn0'] = s['ustar'].copy()
s['tau0'] = s['tau'].copy()
s['taus0'] = s['taus'].copy()
s['taun0'] = s['taun'].copy()
return s
def calculate_z0(p, s):
'''Calculate z0 according to chosen roughness method
The z0 is required for the calculation of the shear velocity. Here, z0
is calculated based on a user-defined method. The constant method defines
the value of z0 as equal to k (z0 = ks). This was implemented to ensure
backward compatibility and does not follow the definition of Nikuradse
(z0 = k / 30). For following the definition of Nikuradse use the method
constant_nikuradse. The mean_grainsize_initial method uses the intial
mean grain size ascribed to the bed (grain_dist and grain_size in the
input file) to calculate the z0. The median_grainsize_adaptive bases the
z0 on the median grain size (D50) in the surface layer in every time step.
The resulting z0 is variable accross the domain (x,y). The
strypsteen_vanrijn method is based on the roughness calculation in their
paper.
Parameters
----------
s : dict
Spatial grids
p : dict
Model configuration parameters
Returns
-------
array
z0
'''
if p['method_roughness'] == 'constant':
z0 = p['k'] # Here, the ks (roughness length) is equal to the z0, this method is implemented to assure backward compatibility. Note, this does not follow the definition of z0 = ks /30 by Nikuradse
if p['method_roughness'] == 'constant_nikuradse':
z0 = p['k'] / 30 # This equaion follows the definition of the bed roughness as introduced by Nikuradse
if p['method_roughness'] == 'mean_grainsize_initial': #(based on Nikuradse and Bagnold, 1941), can only be applied in case with uniform grain size and is most applicable to a flat bed
z0 = np.sum(p['grain_size']*p['grain_dist']) / 30.
if p['method_roughness'] == 'mean_grainsize_adaptive': # makes Nikuradse roughness method variable through time and space depending on grain size variations
z0 = calc_mean_grain_size(p, s) / 30.
if p['method_roughness'] == 'median_grainsize_adaptive': # based on Sherman and Greenwood, 1982 - only appropriate for naturally occurring grain size distribution
d50 = calc_grain_size(p, s, 50)
z0 = 2*d50 / 30.
if p['method_roughness'] == 'vanrijn_strypsteen': # based on van Rijn and Strypsteen, 2019; Strypsteen et al., 2021
if len(p['grain_dist']) == 1: # if one grainsize is used the d90 is calculated with the d50
d50 = p['grain_size']
d90 = 2*d50
else:
d50 = calc_grain_size(p, s, 50) #calculate d50 and d90 per cell.
d90 = calc_grain_size(p, s, 90)
ustar_grain_stat = p['kappa'] * (s['uw'] / np.log(30*p['z']/d90))
ustar_th_B = 0.1 * np.sqrt((p['rhog'] - p['rhoa']) / p['rhoa'] * p['g'] * d50) # Note that Aa could be filled in in the spot of 0.1
T = (np.square(ustar_grain_stat) - np.square(ustar_th_B))/np.square(ustar_th_B) # T represents different phases of the transport related to the saltation layer and ripple formation
#T[T < 0] = 0
alpha1 = 15
alpha2 = 1
gamma_r = 1 + 1/T
z0 = (d90 + alpha1 * gamma_r * d50 * np.power(T, alpha2)) / 30
return z0
def shear(s,p):
# Compute shear velocity field (including separation)
if 'shear' in s.keys() and p['process_shear'] and p['ny'] > 0:
s['shear'](x=s['x'], y=s['y'], z=s['zb'],
taux=s['taus'], tauy=s['taun'],
u0=s['uw'][0,0], udir=s['udir'][0,0],
process_separation = p['process_separation'],
c = p['c_b'],
mu_b = p['mu_b'],
taus0 = s['taus0'][0,0], taun0 = s['taun0'][0,0],
sep_filter_iterations=p['sep_filter_iterations'],
zsep_y_filter=p['zsep_y_filter'])
s['taus'], s['taun'] = s['shear'].get_shear()
s['tau'] = np.hypot(s['taus'], s['taun'])
s = stress_velocity(s,p)
# Returns separation surface
if p['process_separation']:
s['hsep'] = s['shear'].get_separation()
s['zsep'] = s['hsep'] + s['zb']
elif p['process_shear'] and p['ny'] == 0: #NTC - Added in 1D only capabilities
s = compute_shear1d(s, p)
s = stress_velocity(s, p)
if p['process_separation']:
zsep = separation1d(s, p)
s['zsep'] = zsep
s['hsep'] = s['zsep'] - s['zb']
tau_sep = 0.5
slope = 0.2 # according to Durán 2010 (Sauermann 2001: c = 0.25 for 14 degrees)
delta = 1. / (slope * tau_sep)
zsepdelta = np.minimum(np.maximum(1. - delta * s['hsep'], 0.), 1.)
s['taus'] *= zsepdelta
s['taun'] *= zsepdelta
s = stress_velocity(s, p)
# if p['process_nelayer']:
# if p['th_nelayer']:
# ustar = s['ustar'].copy()
# ustars = s['ustars'].copy()
# ustarn = s['ustarn'].copy()
# s['zne'][:,:] = p['ne_file']
# ix = s['zb'] <= s['zne']
# s['ustar'][ix] = np.maximum(0., s['ustar'][ix] - (s['zne'][ix]-s['zb'][ix])* (1/p['layer_thickness']) * s['ustar'][ix])
# ix = ustar != 0.
# s['ustars'][ix] = s['ustar'][ix] * (ustars[ix] / ustar[ix])
# s['ustarn'][ix] = s['ustar'][ix] * (ustarn[ix] / ustar[ix])
return s
def velocity_stress(s, p):
s['tau'] = p['rhoa'] * s['ustar'] ** 2
ix = s['ustar'] > 0.
s['taus'][ix] = s['tau'][ix]*s['ustars'][ix]/s['ustar'][ix]
s['taun'][ix] = s['tau'][ix]*s['ustarn'][ix]/s['ustar'][ix]
s['tau'] = np.hypot(s['taus'], s['taun'])
ix = s['ustar'] == 0.
s['taus'][ix] = 0.
s['taun'][ix] = 0.
s['tau'][ix] = 0.
return s
def stress_velocity(s, p):
s['ustar'] = np.sqrt(s['tau'] / p['rhoa'])
ix = s['tau'] > 0.
s['ustars'][ix] = s['ustar'][ix] * s['taus'][ix] / s['tau'][ix]
s['ustarn'][ix] = s['ustar'][ix] * s['taun'][ix] / s['tau'][ix]
ix = s['tau'] == 0.
s['ustar'][ix] = 0.
s['ustars'][ix] = 0.
s['ustarn'][ix] = 0.
return s
def compute_shear1d(s, p):
'''Compute wind shear perturbation for given free-flow wind
speed on computational grid. based on same implementation in Duna'''
tau = s['tau'].copy()
taus = s['taus'].copy()
taun = s['taun'].copy()
ets = np.zeros(s['tau'].shape)
etn = np.zeros(s['tau'].shape)
ix = tau != 0
ets[ix] = taus[ix] / tau[ix]
etn[ix] = taun[ix] / tau[ix]
x = s['x'][0,:]
zb = s['zb'][0,:]
#Bart: check for negative wind direction
if np.sum(taus) < 0:
x = np.flip(x)
zb = np.flip(zb)
dzbdx = np.zeros(x.shape)
tau_over_tau0 = np.zeros(x.shape)
dx = x[1] - x[0]
dx = np.abs(dx)
dzbdx[1:-1] = (zb[2:] - zb[0:-2]) / 2 / dx
nx = x.size - 1
alfa = 3
beta = 1
for i in range(nx + 1):
integ = 0
startval = i - nx
endval = i - 1
for j in np.arange(startval, endval + 1):
if j != 0:
integ = integ + dzbdx[i - j] / (j * np.pi)
tau_over_tau0[i] = alfa * (integ + beta * dzbdx[i]) + 1
tau_over_tau0[i] = np.maximum(tau_over_tau0[i], 0.1)
#should double check this - but i think this is right. duna is in u10, so slightly different
#Bart: check for negative wind direction
if np.sum(taus) < 0:
tau_over_tau0 = np.flip(tau_over_tau0)
s['tau'] = tau * tau_over_tau0
s['taus'] = s['tau'] * ets
s['taun'] = s['tau'] * etn
return s
def separation1d(s, p):
# Initialize grid and bed dimensions
#load relevant input
x = s['x'][0,:]
#x = s['x']
z = s['zb'][0,:]
dx = p['dx']
dy = dx
c = p['c_b']
mu_b = p['mu_b']
nx = np.size(z)
udir = s['udir'][0][0]
#make the grids 2d to utilize same code as in the shear module
ny = 3
#z = np.matlib.repmat(z, ny, 1)
z = np.tile(z, [ny, 1])
if udir < 360:
udir = udir + 360
if udir > 360:
udir = udir - 360
if udir > 180 and udir < 360:
udir = np.abs(udir-270)
dx = dx / np.cos(udir * np.pi / 180)
dy = dx
direction = 1
elif udir == 180:
dx = 0.0001
direction = 1
elif udir == 360:
dx = 0.0001
direction = 1
else:
udir = np.abs(udir-90)
dx = dx / np.cos(udir * np.pi / 180)
dy = dx
direction = 2
x = np.tile(x, [ny, 1])
if direction == 2:
z = np.flip(z, 1)
#y = np.matrix.transpose(np.tile(y, [ny, 1]))
# Initialize arrays
dzx = np.zeros(z.shape)
dzdx0 = np.zeros(z.shape)
dzdx1 = np.zeros(z.shape)
stall = np.zeros(z.shape)
bubble = np.zeros(z.shape)
k = np.array(range(0, nx))
zsep = z.copy() # total separation bubble
zsep0 = np.zeros(z.shape) # zero-order separation bubble surface
zsep1 = np.zeros(z.shape) # first-oder separation bubble surface
zfft = np.zeros((ny, nx), dtype=complex)
# Compute bed slope angle in x-dir
dzx[:, :-1] = np.rad2deg(np.arctan((z[:, 1:] - z[:, :-1]) / dx))
dzx[:, 0] = dzx[:, 1]
dzx[:, -1] = dzx[:, -2]
# Determine location of separation bubbles
'''Separation bubble exist if bed slope angle (lee side)
is larger than max angle that wind stream lines can
follow behind an obstacle (mu_b = ..)'''
stall += np.logical_and(abs(dzx) > mu_b, dzx < 0.)
stall[:, 1:-1] += np.logical_and(stall[:, 1:-1] == 0, stall[:, :-2] > 0., stall[:, 2:] > 0.)
# Define separation bubble
bubble[:, :-1] = np.logical_and(stall[:, :-1] == 0., stall[:, 1:] > 0.)
# Shift bubble back to x0: start of separation bubble
p = 2
bubble[:, :-p] = bubble[:, p:]
bubble[:, :p] = 0
bubble = bubble.astype(int)
# Count separation bubbles
n = np.sum(bubble)
bubble_n = np.asarray(np.where(bubble == True)).T
# Walk through all separation bubbles and determine polynoms
for k in range(0, n):
i = bubble_n[k, 1]
j = bubble_n[k, 0]
ix_neg = (dzx[j, i + 5:] >= 0) # i + 5??
if np.sum(ix_neg) == 0:
zbrink = z[j, i] # z level of brink at z(x0)
else:
zbrink = z[j, i] - z[j, i + 5 + np.where(ix_neg)[0][0]]
# Zero order polynom
dzdx0 = (z[j, i - 1] - z[j, i - 2]) / dx
# if dzdx0 > 0.1:
# dzdx0 = 0.1
a = dzdx0 / c
ls = np.minimum(np.maximum((3. * zbrink / (2. * c) * (1. + a / 4. + a ** 2 / 8.)), 0.1), 200.)
a2 = -3 * zbrink / ls ** 2 - 2 * dzdx0 / ls
a3 = 2 * zbrink / ls ** 3 + dzdx0 / ls ** 2
i_max = min(i + int(ls / dx), int(nx - 1))
xs = x[j, i:i_max] - x[j, i]
zsep0[j, i:i_max] = (a3 * xs ** 3 + a2 * xs ** 2 + dzdx0 * xs + z[j, i])
# Zero order filter
Cut = 1.5
dk = 2.0 * np.pi / (np.max(x))
zfft[j, :] = np.fft.fft(zsep0[j, :])
zfft[j, :] *= np.exp(-(dk * k * dx) ** 2 / (2. * Cut ** 2))
zsep0[j, :] = np.real(np.fft.ifft(zfft[j, :]))
# First order polynom
dzdx1 = (zsep0[j, i - 1] - zsep0[j, i - 2]) / dx
a = dzdx1 / c
ls = np.minimum(np.maximum((3. * z[j, i] / (2. * c) * (1. + a / 4. + a ** 2 / 8.)), 0.1), 200.)
a2 = -3 * z[j, i] / ls ** 2 - 2 * dzdx1 / ls
a3 = 2 * z[j, i] / ls ** 3 + dzdx1 / ls ** 2
i_max1 = min(i + int(ls / dx), int(nx - 1))
xs1 = x[j, i:i_max1] - x[j, i]
# Combine Seperation Bubble
zsep1[j, i:i_max1] = (a3 * xs1 ** 3 + a2 * xs1 ** 2 + dzdx1 * xs1 + z[j, i])
zsep[j, i:i_max] = np.maximum(zsep1[j, i:i_max], z[j, i:i_max])
# Smooth surface of separation bubbles over y direction
zsep = ndimage.gaussian_filter1d(zsep, sigma=0.2, axis=0)
ilow = zsep < z
zsep[ilow] = z[ilow]
#remove the 2d aspect of results
zsepout = zsep[1,:]
if direction == 2:
zsepout = np.flip(zsepout)
return zsepout | PypiClean |
/DJModels-0.0.6-py3-none-any.whl/djmodels/utils/datastructures.py | import copy
from collections import OrderedDict
class OrderedSet:
"""
A set which keeps the ordering of the inserted items.
Currently backs onto OrderedDict.
"""
def __init__(self, iterable=None):
self.dict = OrderedDict.fromkeys(iterable or ())
def add(self, item):
self.dict[item] = None
def remove(self, item):
del self.dict[item]
def discard(self, item):
try:
self.remove(item)
except KeyError:
pass
def __iter__(self):
return iter(self.dict)
def __contains__(self, item):
return item in self.dict
def __bool__(self):
return bool(self.dict)
def __len__(self):
return len(self.dict)
class MultiValueDictKeyError(KeyError):
pass
class MultiValueDict(dict):
"""
A subclass of dictionary customized to handle multiple values for the
same key.
>>> d = MultiValueDict({'name': ['Adrian', 'Simon'], 'position': ['Developer']})
>>> d['name']
'Simon'
>>> d.getlist('name')
['Adrian', 'Simon']
>>> d.getlist('doesnotexist')
[]
>>> d.getlist('doesnotexist', ['Adrian', 'Simon'])
['Adrian', 'Simon']
>>> d.get('lastname', 'nonexistent')
'nonexistent'
>>> d.setlist('lastname', ['Holovaty', 'Willison'])
This class exists to solve the irritating problem raised by cgi.parse_qs,
which returns a list for every key, even though most Web forms submit
single name-value pairs.
"""
def __init__(self, key_to_list_mapping=()):
super().__init__(key_to_list_mapping)
def __repr__(self):
return "<%s: %s>" % (self.__class__.__name__, super().__repr__())
def __getitem__(self, key):
"""
Return the last data value for this key, or [] if it's an empty list;
raise KeyError if not found.
"""
try:
list_ = super().__getitem__(key)
except KeyError:
raise MultiValueDictKeyError(key)
try:
return list_[-1]
except IndexError:
return []
def __setitem__(self, key, value):
super().__setitem__(key, [value])
def __copy__(self):
return self.__class__([
(k, v[:])
for k, v in self.lists()
])
def __deepcopy__(self, memo):
result = self.__class__()
memo[id(self)] = result
for key, value in dict.items(self):
dict.__setitem__(result, copy.deepcopy(key, memo),
copy.deepcopy(value, memo))
return result
def __getstate__(self):
return {**self.__dict__, '_data': {k: self._getlist(k) for k in self}}
def __setstate__(self, obj_dict):
data = obj_dict.pop('_data', {})
for k, v in data.items():
self.setlist(k, v)
self.__dict__.update(obj_dict)
def get(self, key, default=None):
"""
Return the last data value for the passed key. If key doesn't exist
or value is an empty list, return `default`.
"""
try:
val = self[key]
except KeyError:
return default
if val == []:
return default
return val
def _getlist(self, key, default=None, force_list=False):
"""
Return a list of values for the key.
Used internally to manipulate values list. If force_list is True,
return a new copy of values.
"""
try:
values = super().__getitem__(key)
except KeyError:
if default is None:
return []
return default
else:
if force_list:
values = list(values) if values is not None else None
return values
def getlist(self, key, default=None):
"""
Return the list of values for the key. If key doesn't exist, return a
default value.
"""
return self._getlist(key, default, force_list=True)
def setlist(self, key, list_):
super().__setitem__(key, list_)
def setdefault(self, key, default=None):
if key not in self:
self[key] = default
# Do not return default here because __setitem__() may store
# another value -- QueryDict.__setitem__() does. Look it up.
return self[key]
def setlistdefault(self, key, default_list=None):
if key not in self:
if default_list is None:
default_list = []
self.setlist(key, default_list)
# Do not return default_list here because setlist() may store
# another value -- QueryDict.setlist() does. Look it up.
return self._getlist(key)
def appendlist(self, key, value):
"""Append an item to the internal list associated with key."""
self.setlistdefault(key).append(value)
def items(self):
"""
Yield (key, value) pairs, where value is the last item in the list
associated with the key.
"""
for key in self:
yield key, self[key]
def lists(self):
"""Yield (key, list) pairs."""
return iter(super().items())
def values(self):
"""Yield the last value on every key list."""
for key in self:
yield self[key]
def copy(self):
"""Return a shallow copy of this object."""
return copy.copy(self)
def update(self, *args, **kwargs):
"""Extend rather than replace existing key lists."""
if len(args) > 1:
raise TypeError("update expected at most 1 arguments, got %d" % len(args))
if args:
other_dict = args[0]
if isinstance(other_dict, MultiValueDict):
for key, value_list in other_dict.lists():
self.setlistdefault(key).extend(value_list)
else:
try:
for key, value in other_dict.items():
self.setlistdefault(key).append(value)
except TypeError:
raise ValueError("MultiValueDict.update() takes either a MultiValueDict or dictionary")
for key, value in kwargs.items():
self.setlistdefault(key).append(value)
def dict(self):
"""Return current object as a dict with singular values."""
return {key: self[key] for key in self}
class ImmutableList(tuple):
"""
A tuple-like object that raises useful errors when it is asked to mutate.
Example::
>>> a = ImmutableList(range(5), warning="You cannot mutate this.")
>>> a[3] = '4'
Traceback (most recent call last):
...
AttributeError: You cannot mutate this.
"""
def __new__(cls, *args, warning='ImmutableList object is immutable.', **kwargs):
self = tuple.__new__(cls, *args, **kwargs)
self.warning = warning
return self
def complain(self, *wargs, **kwargs):
if isinstance(self.warning, Exception):
raise self.warning
else:
raise AttributeError(self.warning)
# All list mutation functions complain.
__delitem__ = complain
__delslice__ = complain
__iadd__ = complain
__imul__ = complain
__setitem__ = complain
__setslice__ = complain
append = complain
extend = complain
insert = complain
pop = complain
remove = complain
sort = complain
reverse = complain
class DictWrapper(dict):
"""
Wrap accesses to a dictionary so that certain values (those starting with
the specified prefix) are passed through a function before being returned.
The prefix is removed before looking up the real value.
Used by the SQL construction code to ensure that values are correctly
quoted before being used.
"""
def __init__(self, data, func, prefix):
super().__init__(data)
self.func = func
self.prefix = prefix
def __getitem__(self, key):
"""
Retrieve the real value after stripping the prefix string (if
present). If the prefix is present, pass the value through self.func
before returning, otherwise return the raw value.
"""
use_func = key.startswith(self.prefix)
if use_func:
key = key[len(self.prefix):]
value = super().__getitem__(key)
if use_func:
return self.func(value)
return value | PypiClean |
/EnergyCapSdk-8.2304.4743.tar.gz/EnergyCapSdk-8.2304.4743/energycap/sdk/models/audit_settings_request_py3.py |
from msrest.serialization import Model
class AuditSettingsRequest(Model):
"""AuditSettingsRequest.
All required parameters must be populated in order to send to Azure.
:param bill_contains_line_item_descriptions: Required.
:type bill_contains_line_item_descriptions:
~energycap.sdk.models.BillContainsLineItemDescriptionsSettingRequest
:param bill_contains_line_item_types: Required.
:type bill_contains_line_item_types:
~energycap.sdk.models.BillContainsLineItemTypesSettingRequest
:param total_bill_cost_does_not_match_line_item_types: Required.
:type total_bill_cost_does_not_match_line_item_types:
~energycap.sdk.models.TotalBillCostDoesNotMatchLineItemTypesSettingRequest
:param billing_period_outside_start_end_dates: Required.
:type billing_period_outside_start_end_dates:
~energycap.sdk.models.AuditSettingRequest
:param bill_overlaps_with_other_account_bill: Required.
:type bill_overlaps_with_other_account_bill:
~energycap.sdk.models.AuditSettingRequest
:param gap_between_bill_and_previous_bill_on_account: Required.
:type gap_between_bill_and_previous_bill_on_account:
~energycap.sdk.models.AuditSettingRequest
:param bill_ends_in_future: Required.
:type bill_ends_in_future: ~energycap.sdk.models.AuditSettingRequest
:param account_has_multiple_bills_in_billing_period: Required.
:type account_has_multiple_bills_in_billing_period:
~energycap.sdk.models.AuditSettingRequest
:param statement_date_before_end_date: Required.
:type statement_date_before_end_date:
~energycap.sdk.models.AuditSettingRequest
:param due_date_before_end_date: Required.
:type due_date_before_end_date: ~energycap.sdk.models.AuditSettingRequest
:param bill_significantly_shorter_or_longer_than_previous: Required.
:type bill_significantly_shorter_or_longer_than_previous:
~energycap.sdk.models.BillSignificantlyShorterOrLongerThanPreviousSettingRequest
:param too_many_consecutive_estimated_bills: Required.
:type too_many_consecutive_estimated_bills:
~energycap.sdk.models.TooManyConsecutiveEstimatedBillsSettingRequest
:param due_date_too_long_after_bill_end: Required.
:type due_date_too_long_after_bill_end:
~energycap.sdk.models.DueDateTooLongAfterBillEndSettingRequest
:param statement_date_too_long_after_bill_end: Required.
:type statement_date_too_long_after_bill_end:
~energycap.sdk.models.StatementDateTooLongAfterBillEndSettingRequest
:param invoice_number_is_repeated_on_account: Required.
:type invoice_number_is_repeated_on_account:
~energycap.sdk.models.AuditSettingRequest
:param likely_duplicate_bill_on_account: Required.
:type likely_duplicate_bill_on_account:
~energycap.sdk.models.AuditSettingRequest
:param total_meter_cost_is_percentage_higher_than_past_year: Required.
:type total_meter_cost_is_percentage_higher_than_past_year:
~energycap.sdk.models.AuditSettingRequest
:param total_meter_use_is_percentage_higher_than_past_year: Required.
:type total_meter_use_is_percentage_higher_than_past_year:
~energycap.sdk.models.AuditSettingRequest
:param serial_number_does_not_match_import_file: Required.
:type serial_number_does_not_match_import_file:
~energycap.sdk.models.AuditSettingRequest
:param rate_code_does_not_match_import_file: Required.
:type rate_code_does_not_match_import_file:
~energycap.sdk.models.AuditSettingRequest
:param import_file_start_date_adjusted_to_prevent_gaps: Required.
:type import_file_start_date_adjusted_to_prevent_gaps:
~energycap.sdk.models.AuditSettingRequest
:param account_alert_exists_on_account_in_import_file: Required.
:type account_alert_exists_on_account_in_import_file:
~energycap.sdk.models.AuditSettingRequest
:param abnormal_bill_cost_with_outlier_analysis: Required.
:type abnormal_bill_cost_with_outlier_analysis:
~energycap.sdk.models.AbnormalBillCostWithOutlierAnalysisSettingRequest
:param abnormal_bill_use_with_outlier_analysis: Required.
:type abnormal_bill_use_with_outlier_analysis:
~energycap.sdk.models.AbnormalBillUseWithOutlierAnalysisSettingRequest
:param abnormal_bill_demand_with_outlier_analysis: Required.
:type abnormal_bill_demand_with_outlier_analysis:
~energycap.sdk.models.AbnormalBillDemandWithOutlierAnalysisSettingRequest
"""
_validation = {
'bill_contains_line_item_descriptions': {'required': True},
'bill_contains_line_item_types': {'required': True},
'total_bill_cost_does_not_match_line_item_types': {'required': True},
'billing_period_outside_start_end_dates': {'required': True},
'bill_overlaps_with_other_account_bill': {'required': True},
'gap_between_bill_and_previous_bill_on_account': {'required': True},
'bill_ends_in_future': {'required': True},
'account_has_multiple_bills_in_billing_period': {'required': True},
'statement_date_before_end_date': {'required': True},
'due_date_before_end_date': {'required': True},
'bill_significantly_shorter_or_longer_than_previous': {'required': True},
'too_many_consecutive_estimated_bills': {'required': True},
'due_date_too_long_after_bill_end': {'required': True},
'statement_date_too_long_after_bill_end': {'required': True},
'invoice_number_is_repeated_on_account': {'required': True},
'likely_duplicate_bill_on_account': {'required': True},
'total_meter_cost_is_percentage_higher_than_past_year': {'required': True},
'total_meter_use_is_percentage_higher_than_past_year': {'required': True},
'serial_number_does_not_match_import_file': {'required': True},
'rate_code_does_not_match_import_file': {'required': True},
'import_file_start_date_adjusted_to_prevent_gaps': {'required': True},
'account_alert_exists_on_account_in_import_file': {'required': True},
'abnormal_bill_cost_with_outlier_analysis': {'required': True},
'abnormal_bill_use_with_outlier_analysis': {'required': True},
'abnormal_bill_demand_with_outlier_analysis': {'required': True},
}
_attribute_map = {
'bill_contains_line_item_descriptions': {'key': 'billContainsLineItemDescriptions', 'type': 'BillContainsLineItemDescriptionsSettingRequest'},
'bill_contains_line_item_types': {'key': 'billContainsLineItemTypes', 'type': 'BillContainsLineItemTypesSettingRequest'},
'total_bill_cost_does_not_match_line_item_types': {'key': 'totalBillCostDoesNotMatchLineItemTypes', 'type': 'TotalBillCostDoesNotMatchLineItemTypesSettingRequest'},
'billing_period_outside_start_end_dates': {'key': 'billingPeriodOutsideStartEndDates', 'type': 'AuditSettingRequest'},
'bill_overlaps_with_other_account_bill': {'key': 'billOverlapsWithOtherAccountBill', 'type': 'AuditSettingRequest'},
'gap_between_bill_and_previous_bill_on_account': {'key': 'gapBetweenBillAndPreviousBillOnAccount', 'type': 'AuditSettingRequest'},
'bill_ends_in_future': {'key': 'billEndsInFuture', 'type': 'AuditSettingRequest'},
'account_has_multiple_bills_in_billing_period': {'key': 'accountHasMultipleBillsInBillingPeriod', 'type': 'AuditSettingRequest'},
'statement_date_before_end_date': {'key': 'statementDateBeforeEndDate', 'type': 'AuditSettingRequest'},
'due_date_before_end_date': {'key': 'dueDateBeforeEndDate', 'type': 'AuditSettingRequest'},
'bill_significantly_shorter_or_longer_than_previous': {'key': 'billSignificantlyShorterOrLongerThanPrevious', 'type': 'BillSignificantlyShorterOrLongerThanPreviousSettingRequest'},
'too_many_consecutive_estimated_bills': {'key': 'tooManyConsecutiveEstimatedBills', 'type': 'TooManyConsecutiveEstimatedBillsSettingRequest'},
'due_date_too_long_after_bill_end': {'key': 'dueDateTooLongAfterBillEnd', 'type': 'DueDateTooLongAfterBillEndSettingRequest'},
'statement_date_too_long_after_bill_end': {'key': 'statementDateTooLongAfterBillEnd', 'type': 'StatementDateTooLongAfterBillEndSettingRequest'},
'invoice_number_is_repeated_on_account': {'key': 'invoiceNumberIsRepeatedOnAccount', 'type': 'AuditSettingRequest'},
'likely_duplicate_bill_on_account': {'key': 'likelyDuplicateBillOnAccount', 'type': 'AuditSettingRequest'},
'total_meter_cost_is_percentage_higher_than_past_year': {'key': 'totalMeterCostIsPercentageHigherThanPastYear', 'type': 'AuditSettingRequest'},
'total_meter_use_is_percentage_higher_than_past_year': {'key': 'totalMeterUseIsPercentageHigherThanPastYear', 'type': 'AuditSettingRequest'},
'serial_number_does_not_match_import_file': {'key': 'serialNumberDoesNotMatchImportFile', 'type': 'AuditSettingRequest'},
'rate_code_does_not_match_import_file': {'key': 'rateCodeDoesNotMatchImportFile', 'type': 'AuditSettingRequest'},
'import_file_start_date_adjusted_to_prevent_gaps': {'key': 'importFileStartDateAdjustedToPreventGaps', 'type': 'AuditSettingRequest'},
'account_alert_exists_on_account_in_import_file': {'key': 'accountAlertExistsOnAccountInImportFile', 'type': 'AuditSettingRequest'},
'abnormal_bill_cost_with_outlier_analysis': {'key': 'abnormalBillCostWithOutlierAnalysis', 'type': 'AbnormalBillCostWithOutlierAnalysisSettingRequest'},
'abnormal_bill_use_with_outlier_analysis': {'key': 'abnormalBillUseWithOutlierAnalysis', 'type': 'AbnormalBillUseWithOutlierAnalysisSettingRequest'},
'abnormal_bill_demand_with_outlier_analysis': {'key': 'abnormalBillDemandWithOutlierAnalysis', 'type': 'AbnormalBillDemandWithOutlierAnalysisSettingRequest'},
}
def __init__(self, *, bill_contains_line_item_descriptions, bill_contains_line_item_types, total_bill_cost_does_not_match_line_item_types, billing_period_outside_start_end_dates, bill_overlaps_with_other_account_bill, gap_between_bill_and_previous_bill_on_account, bill_ends_in_future, account_has_multiple_bills_in_billing_period, statement_date_before_end_date, due_date_before_end_date, bill_significantly_shorter_or_longer_than_previous, too_many_consecutive_estimated_bills, due_date_too_long_after_bill_end, statement_date_too_long_after_bill_end, invoice_number_is_repeated_on_account, likely_duplicate_bill_on_account, total_meter_cost_is_percentage_higher_than_past_year, total_meter_use_is_percentage_higher_than_past_year, serial_number_does_not_match_import_file, rate_code_does_not_match_import_file, import_file_start_date_adjusted_to_prevent_gaps, account_alert_exists_on_account_in_import_file, abnormal_bill_cost_with_outlier_analysis, abnormal_bill_use_with_outlier_analysis, abnormal_bill_demand_with_outlier_analysis, **kwargs) -> None:
super(AuditSettingsRequest, self).__init__(**kwargs)
self.bill_contains_line_item_descriptions = bill_contains_line_item_descriptions
self.bill_contains_line_item_types = bill_contains_line_item_types
self.total_bill_cost_does_not_match_line_item_types = total_bill_cost_does_not_match_line_item_types
self.billing_period_outside_start_end_dates = billing_period_outside_start_end_dates
self.bill_overlaps_with_other_account_bill = bill_overlaps_with_other_account_bill
self.gap_between_bill_and_previous_bill_on_account = gap_between_bill_and_previous_bill_on_account
self.bill_ends_in_future = bill_ends_in_future
self.account_has_multiple_bills_in_billing_period = account_has_multiple_bills_in_billing_period
self.statement_date_before_end_date = statement_date_before_end_date
self.due_date_before_end_date = due_date_before_end_date
self.bill_significantly_shorter_or_longer_than_previous = bill_significantly_shorter_or_longer_than_previous
self.too_many_consecutive_estimated_bills = too_many_consecutive_estimated_bills
self.due_date_too_long_after_bill_end = due_date_too_long_after_bill_end
self.statement_date_too_long_after_bill_end = statement_date_too_long_after_bill_end
self.invoice_number_is_repeated_on_account = invoice_number_is_repeated_on_account
self.likely_duplicate_bill_on_account = likely_duplicate_bill_on_account
self.total_meter_cost_is_percentage_higher_than_past_year = total_meter_cost_is_percentage_higher_than_past_year
self.total_meter_use_is_percentage_higher_than_past_year = total_meter_use_is_percentage_higher_than_past_year
self.serial_number_does_not_match_import_file = serial_number_does_not_match_import_file
self.rate_code_does_not_match_import_file = rate_code_does_not_match_import_file
self.import_file_start_date_adjusted_to_prevent_gaps = import_file_start_date_adjusted_to_prevent_gaps
self.account_alert_exists_on_account_in_import_file = account_alert_exists_on_account_in_import_file
self.abnormal_bill_cost_with_outlier_analysis = abnormal_bill_cost_with_outlier_analysis
self.abnormal_bill_use_with_outlier_analysis = abnormal_bill_use_with_outlier_analysis
self.abnormal_bill_demand_with_outlier_analysis = abnormal_bill_demand_with_outlier_analysis | PypiClean |
/DirTreeDigest-1.0.7.tar.gz/DirTreeDigest-1.0.7/dirtreedigest/utils.py | import logging
import os
import re
import sys
import time
from contextlib import contextmanager
from datetime import datetime
from enum import Enum
# Enums to communicate with subprocesses
Cmd = Enum('Cmd', 'INIT PROCESS FREE RESULT QUIT')
def shared_memory_available():
""" Single place to check (handy if it gets backported) """
return sys.version_info >= (3, 8)
@contextmanager
def open_with_error_checking(filename, mode='r'):
""" Open a file for reading with error checking """
try:
fileh = open(filename, mode)
except IOError as err:
yield None, err
else:
try:
yield fileh, None
finally:
fileh.close()
def datetime_as_str(dtm=None):
if not dtm:
dtm = datetime.now()
return "{:%Y%m%d_%H%M%S}".format(dtm)
def unixify_path(path):
""" Convert path to Unix format """
return path.replace('\\', '/')
def compile_patterns(patterns, ignorecase=False):
""" Compile exclusion patterns to regular expressions """
re_pats = []
flags = 0
if ignorecase:
flags |= re.IGNORECASE
for pattern in patterns:
pattern = '^' + re.escape(pattern) + '$'
re_pats.append(re.compile(pattern, flags=flags))
return re_pats
def elem_is_matched(root, elem, patterns):
""" Check if element matches any of the exclusion patterns """
elem_mod = unixify_path(os.path.normpath(elem))
rel_elem = get_relative_path(root, elem_mod)
for re_pat in patterns:
if re.match(re_pat, rel_elem):
return True
return False
def split_net_drive(elem):
""" For network shares, split network and path parts """
mval = re.match(r"^(//[^/]+)(.*)$", elem)
if mval:
return (mval.group(1), mval.group(2))
return ('', elem)
def split_win_drive(elem):
""" For Windows drives, split drive spec and path parts """
mval = re.match(r"^([a-zA-Z]:)(.*)$", elem)
if mval:
return (mval.group(1), mval.group(2))
return ('', elem)
def get_relative_path(root, elem):
""" Get the element path relative to a given root path """
matcher = r'^' + re.escape(unixify_path(root)) + r'(.*)$'
retval = elem
mval = re.match(matcher, elem)
if mval:
retval = mval.group(1)
if retval != '/':
retval = retval.strip('/')
return retval
def compare_paths(path1, path2, ignorecase=False):
""" Compare two paths """
path1_mod = unixify_path(os.path.normpath(path1))
path2_mod = unixify_path(os.path.normpath(path2))
if ignorecase:
path1_mod = path1_mod.lower()
path2_mod = path2_mod.lower()
return path1_mod == path2_mod
def unix_time_ms(dtm=None):
""" Get Unix time in msec """
if dtm is None:
dtm = datetime.now()
epoch = datetime.utcfromtimestamp(0)
return int((dtm - epoch).total_seconds() * 1000.0)
def curr_time_secs():
""" Get current high-resolution time """
return time.perf_counter()
def flush_debug_queue(debug_queue, logger):
""" Flush the debug message queue """
while not debug_queue.empty():
retval = debug_queue.get()
log_level = retval[0]
log_message = retval[1]
logger.log(log_level, log_message)
def start_logging(filename, log_level, con_level):
""" Initialize file and console logs """
logfile_handler = logging.FileHandler(filename, 'w', 'utf-8')
log_fmt = logging.Formatter('%(levelname)s:%(name)s:%(message)s')
logfile_handler.setFormatter(log_fmt)
logfile_handler.setLevel(log_level)
console_handler = logging.StreamHandler()
con_fmt = logging.Formatter('%(levelname)s:%(name)s:%(message)s')
console_handler.setFormatter(con_fmt)
console_handler.setLevel(con_level)
logging.basicConfig(
level=logging.NOTSET, # This must be set to something
handlers=[logfile_handler, console_handler])
def outfile_write(fname, fmode, lines):
""" Write a block of data to the output file """
with open(fname, fmode, encoding='utf-8') as fileh:
for line in lines:
fileh.write('{}\n'.format(line)) | PypiClean |
/FiPy-3.4.4.tar.gz/FiPy-3.4.4/fipy/viewers/matplotlibViewer/matplotlibStreamViewer.py | from __future__ import unicode_literals
__docformat__ = 'restructuredtext'
from fipy.tools import numerix
from fipy.variables.faceVariable import FaceVariable
from fipy.variables.cellVariable import CellVariable
from fipy.viewers.matplotlibViewer.matplotlib2DViewer import AbstractMatplotlib2DViewer
__all__ = ["MatplotlibStreamViewer"]
from future.utils import text_to_native_str
__all__ = [text_to_native_str(n) for n in __all__]
class MatplotlibStreamViewer(AbstractMatplotlib2DViewer):
"""Displays a stream plot of a 2D rank-1 `CellVariable` or
`FaceVariable` object using Matplotlib_
One issue is that this `Viewer` relies on `scipy.interpolate.griddata`,
which interpolates on the convex hull of the data. The results is that
streams are plotted across any concavities in the mesh.
Another issue is that it does not seem possible to remove the streams
without calling `cla()`, which means that different set of streams cannot be
overlaid.
.. _Matplotlib: http://matplotlib.sourceforge.net/
"""
def __init__(self, vars, title=None, log=False, limits={}, axes=None, figaspect='auto',
density=1, linewidth=None, color=None, cmap=None, norm=None, arrowsize=1,
arrowstyle='-|>', minlength=0.1,
**kwlimits):
"""Creates a `MatplotlibStreamViewer`.
Parameters
----------
vars : ~fipy.variables.cellVariable.CellVariable or ~fipy.variables.faceVariable.FaceVariable
rank-1 `Variable` to display
title : str, optional
displayed at the top of the `Viewer` window
log : bool, optional
if `True`, arrow length goes at the base-10 logarithm of the magnitude
limits : dict, optional
a (deprecated) alternative to limit keyword arguments
xmin, xmax, ymin, ymax, datamin, datamax : float
displayed range of data. Any limit set to
a (default) value of `None` will autoscale.
axes : ~matplotlib.axes.Axes, optional
if not `None`, `vars` will be plotted into this Matplotlib `Axes` object
figaspect : float, optional
desired aspect ratio of figure. If arg is a number, use that aspect
ratio. If arg is `auto`, the aspect ratio will be determined from
the Variable's mesh.
density : float or (float, float), optional
Controls the closeness of streamlines. When ``density = 1``,
the domain is divided into a 30x30 grid. *density* linearly
scales this grid. Each cell in the grid can have, at most, one
traversing streamline. For different densities in each
direction, use a tuple (density_x, density_y).
linewidth : array_like or ~fipy.variables.cellVariable.CellVariable or ~fipy.variables.faceVariable.FaceVariable, optional
The width of the stream lines. With a rank-0 `CellVariable` or
`FaceVariable` the line width can be varied across the grid.
The MeshVariable must have the same type and be defined on
the same `Mesh` as *vars*.
color : str or ~fipy.variables.cellVariable.CellVariable or ~fipy.variables.faceVariable.FaceVariable, optional
The streamline color as a matplotlib color code or a field of
numbers. If given a rank-0 `CellVariable` or `FaceVariable`,
its values are converted to colors using *cmap* and *norm*.
The MeshVariable must have the same type and be defined on the
same `Mesh` as *vars*.
cmap : ~matplotlib.colors.Colormap, optional
Colormap used to plot streamlines and arrows. This is only
used if *color* is a MeshVariable.
norm : ~matplotlib.colors.Normalize, optional
Normalize object used to scale luminance data to 0, 1. If
``None``, stretch (min, max) to (0, 1). Only necessary when
*color* is a MeshVariable.
arrowsize : float, optional
Scaling factor for the arrow size.
arrowstyle : str, optional
Arrow style specification.
See `~matplotlib.patches.FancyArrowPatch`.
minlength : float, optional
Minimum length of streamline in axes coordinates.
"""
kwlimits.update(limits)
AbstractMatplotlib2DViewer.__init__(self, vars=vars, title=title, axes=axes, figaspect=figaspect, **kwlimits)
self.log = log
self.kwargs = dict(density=density, linewidth=linewidth, color=color,
cmap=cmap, norm=norm, arrowsize=arrowsize,
arrowstyle=arrowstyle, minlength=minlength)
self._stream = None
self._plot()
@property
def kwargs(self):
"""keyword arguments to pass to :func:`~matplotlib.axes.Axes.streamplot`."""
return self._kwargs
@kwargs.setter
def kwargs(self, value):
self._kwargs = value
def _getSuitableVars(self, vars):
from fipy.meshes.mesh2D import Mesh2D
from fipy.meshes.uniformGrid2D import UniformGrid2D
vars = [var for var in AbstractMatplotlib2DViewer._getSuitableVars(self, vars) \
if ((isinstance(var.mesh, Mesh2D)
or isinstance(var.mesh, UniformGrid2D))\
and (isinstance(var, FaceVariable) \
or isinstance(var, CellVariable)) and var.rank == 1)]
if len(vars) == 0:
from fipy.viewers import MeshDimensionError
raise MeshDimensionError("The mesh must be a Mesh2D instance")
# this viewer can only display one variable
return [vars[0]]
def _plot(self):
from scipy.interpolate import griddata
var = self.vars[0]
mesh = var.mesh
xmin, ymin = mesh.extents['min']
xmax, ymax = mesh.extents['max']
N = 100
X = numerix.linspace(xmin, xmax, N)
Y = numerix.linspace(ymin, ymax, N)
grid_x, grid_y = numerix.mgrid[xmin:xmax:N*1j, ymin:ymax:N*1j]
if isinstance(var, FaceVariable):
C = mesh.faceCenters
elif isinstance(var, CellVariable):
C = mesh.cellCenters
U = griddata(C.value.T, var.value[0],
(grid_x, grid_y), method='cubic')
V = griddata(C.value.T, var.value[1],
(grid_x, grid_y), method='cubic')
lw = self.kwargs["linewidth"]
if isinstance(lw, (FaceVariable, CellVariable)):
lw = griddata(C.value.T, lw.value,
(grid_x, grid_y), method='cubic')
color = self.kwargs["color"]
if isinstance(color, (FaceVariable, CellVariable)):
color = griddata(C.value.T, color.value,
(grid_x, grid_y), method='cubic', fill_value=color.min())
U = U.T
V = V.T
ang = numerix.arctan2(V, U)
mag = numerix.sqrt(U**2 + V**2)
datamin, datamax = self._autoscale(vars=(mag,),
datamin=self._getLimit('datamin'),
datamax=self._getLimit('datamax'))
mag = numerix.where(mag > datamax, numerix.nan, mag)
mag = numerix.where(mag < datamin, numerix.nan, mag)
if self.log:
mag = numerix.log10(mag)
U = mag * numerix.cos(ang)
V = mag * numerix.sin(ang)
# if self._stream is not None:
# # the following doesn't work, nor does it help to `add_collection` first
# # self._stream.arrows.remove()
# self._stream.lines.remove()
kwargs = self.kwargs.copy()
kwargs["linewidth"] = lw
kwargs["color"] = color
self.axes.cla()
self._stream = self.axes.streamplot(X, Y, U, V, **kwargs)
self.axes.set_xlim(xmin=self._getLimit('xmin'),
xmax=self._getLimit('xmax'))
self.axes.set_ylim(ymin=self._getLimit('ymin'),
ymax=self._getLimit('ymax'))
@classmethod
def _doctest_body(cls):
return (cls._test2Dvector()
+ cls._test2DvectorIrregular())
if __name__ == "__main__":
import fipy.tests.doctestPlus
fipy.tests.doctestPlus.execButNoTest() | PypiClean |
/Nuitka_winsvc-1.7.10-cp310-cp310-win_amd64.whl/nuitka/nodes/NodeMetaClasses.py | from abc import ABCMeta
from nuitka.__past__ import intern
from nuitka.Errors import NuitkaAssumptionError, NuitkaNodeDesignError
from nuitka.PythonVersions import python_version
def _checkBases(name, bases):
# Avoid duplicate base classes.
assert len(bases) == len(set(bases)), (name, bases)
# Insist on mixins being in proper place for inheritance.
last_mixin = None
for base in bases:
base_name = base.__name__
is_mixin = base_name.endswith("Mixin")
if is_mixin and last_mixin is False:
raise NuitkaNodeDesignError(
name, "Mixins must come first in base classes.", bases
)
last_mixin = is_mixin
if base is not object and "__slots__" not in base.__dict__:
raise NuitkaNodeDesignError(name, "All bases must have __slots__.", base)
class NodeCheckMetaClass(ABCMeta):
kinds = {}
def __new__(mcs, name, bases, dictionary): # pylint: disable=I0021,arguments-differ
_checkBases(name, bases)
if "__slots__" not in dictionary:
dictionary["__slots__"] = ()
if "named_children" in dictionary:
assert type(dictionary["named_children"]) is tuple
dictionary["__slots__"] += tuple(
intern("subnode_" + named_child.split("|", 1)[0])
for named_child in dictionary["named_children"]
)
if "nice_children" in dictionary:
assert type(dictionary["nice_children"]) is tuple
assert len(dictionary["nice_children"]) == len(dictionary["named_children"])
dictionary["nice_children_dict"] = dict(
(intern(named_child.split("|", 1)[0]), nice_name)
for (named_child, nice_name) in zip(
dictionary["named_children"], dictionary["nice_children"]
)
)
if "node_attributes" in dictionary:
dictionary["__slots__"] += dictionary["node_attributes"]
assert len(dictionary["__slots__"]) == len(
set(dictionary["__slots__"])
), dictionary["__slots__"]
if "python_version_spec" in dictionary:
condition = "%s %s" % (
hex(python_version),
dictionary["python_version_spec"],
)
# We trust our node class files, pylint: disable=eval-used
if not eval(condition):
def __init__(self, *args, **kwargs):
raise NuitkaAssumptionError(name, "assumption violated", condition)
dictionary["__init__"] = __init__
# Not a method:
if "checker" in dictionary:
dictionary["checker"] = staticmethod(dictionary["checker"])
return ABCMeta.__new__(mcs, name, bases, dictionary)
def __init__(cls, name, bases, dictionary):
if not name.endswith(("Base", "Mixin")):
if "kind" not in dictionary:
raise NuitkaNodeDesignError(name, "Must provide class variable 'kind'")
kind = dictionary["kind"]
assert type(kind) is str, name
if kind in NodeCheckMetaClass.kinds and not "replaces" in dictionary:
raise NuitkaNodeDesignError(
name, "Duplicate nodes for kind '%s'" % kind
)
NodeCheckMetaClass.kinds[kind] = cls
NodeCheckMetaClass.kinds[name] = cls
kind_to_name_part = "".join([x.capitalize() for x in kind.split("_")])
assert name.endswith(kind_to_name_part), (name, kind_to_name_part)
# Automatically add checker methods for everything to the common
# base class
checker_method = "is" + kind_to_name_part
# TODO: How about making these two functions, one to statically
# return True and False, and put one in the base class, and one
# in the new class, would be slightly faster.
def checkKind(self):
return self.kind == kind
# Add automatic checker methods to the node base class.
from .NodeBases import NodeBase
if not hasattr(NodeBase, checker_method):
setattr(NodeBase, checker_method, checkKind)
ABCMeta.__init__(cls, name, bases, dictionary)
# For every node type, there is a test, and then some more members,
# For Python2/3 compatible source, we create a base class that has the metaclass
# used and doesn't require making a syntax choice.
NodeMetaClassBase = NodeCheckMetaClass(
"NodeMetaClassBase", (object,), {"__slots__": ()}
) | PypiClean |
/Nuitka_fixed-1.1.2-cp310-cp310-win_amd64.whl/nuitka/build/inline_copy/lib/scons-2.3.2/SCons/Tool/link.py |
__revision__ = "src/engine/SCons/Tool/link.py 2014/07/05 09:42:21 garyo"
import re
import SCons.Defaults
import SCons.Tool
import SCons.Util
import SCons.Warnings
cplusplus = __import__('c++', globals(), locals(), [])
def isfortran(env, source): return False
def isD(env,source): return False
issued_mixed_link_warning = False
def smart_link(source, target, env, for_signature):
has_cplusplus = cplusplus.iscplusplus(source)
has_fortran = isfortran(env, source)
has_d = isD(env, source)
if has_cplusplus and has_fortran and not has_d:
global issued_mixed_link_warning
if not issued_mixed_link_warning:
msg = "Using $CXX to link Fortran and C++ code together.\n\t" + \
"This may generate a buggy executable if the '%s'\n\t" + \
"compiler does not know how to deal with Fortran runtimes."
SCons.Warnings.warn(SCons.Warnings.FortranCxxMixWarning,
msg % env.subst('$CXX'))
issued_mixed_link_warning = True
return '$CXX'
elif has_d:
env['LINKCOM'] = env['DLINKCOM']
env['SHLINKCOM'] = env['SHDLINKCOM']
return '$DC'
elif has_fortran:
return '$FORTRAN'
elif has_cplusplus:
return '$CXX'
return '$CC'
def shlib_emitter(target, source, env):
Verbose = False
platform = env.subst('$PLATFORM')
for tgt in target:
tgt.attributes.shared = 1
try:
# target[0] comes in as libtest.so. Add the version extensions
version = env.subst('$SHLIBVERSION')
if version:
version_names = shlib_emitter_names(target, source, env)
# change the name of the target to include the version number
target[0].name = version_names[0]
for name in version_names:
env.SideEffect(name, target[0])
env.Clean(target[0], name)
if Verbose:
print "shlib_emitter: add side effect - ",name
except KeyError:
version = None
return (target, source)
def shlib_emitter_names(target, source, env):
"""Return list of file names that are side effects for a versioned library build. The first name in the list is the new name for the target"""
Verbose = False
platform = env.subst('$PLATFORM')
version_names = []
try:
# target[0] comes in as libtest.so. Add the version extensions
version = env.subst('$SHLIBVERSION')
if version.count(".") != 2:
# We need a version of the form x.y.z to proceed
raise ValueError
if version:
if platform == 'posix':
versionparts = version.split('.')
name = target[0].name
# generate library name with the version number
version_name = target[0].name + '.' + version
if Verbose:
print "shlib_emitter_names: target is ", version_name
print "shlib_emitter_names: side effect: ", name
# add version_name to list of names to be a Side effect
version_names.append(version_name)
if Verbose:
print "shlib_emitter_names: versionparts ",versionparts
for ver in versionparts[0:-1]:
name = name + '.' + ver
if Verbose:
print "shlib_emitter_names: side effect: ", name
# add name to list of names to be a Side effect
version_names.append(name)
elif platform == 'darwin':
shlib_suffix = env.subst('$SHLIBSUFFIX')
name = target[0].name
# generate library name with the version number
suffix_re = re.escape(shlib_suffix)
version_name = re.sub(suffix_re, '.' + version + shlib_suffix, name)
if Verbose:
print "shlib_emitter_names: target is ", version_name
print "shlib_emitter_names: side effect: ", name
# add version_name to list of names to be a Side effect
version_names.append(version_name)
elif platform == 'cygwin':
shlib_suffix = env.subst('$SHLIBSUFFIX')
name = target[0].name
# generate library name with the version number
suffix_re = re.escape(shlib_suffix)
version_name = re.sub(suffix_re, '-' + re.sub('\.', '-', version) + shlib_suffix, name)
if Verbose:
print "shlib_emitter_names: target is ", version_name
print "shlib_emitter_names: side effect: ", name
# add version_name to list of names to be a Side effect
version_names.append(version_name)
except KeyError:
version = None
return version_names
def generate(env):
"""Add Builders and construction variables for gnulink to an Environment."""
SCons.Tool.createSharedLibBuilder(env)
SCons.Tool.createProgBuilder(env)
env['SHLINK'] = '$LINK'
env['SHLINKFLAGS'] = SCons.Util.CLVar('$LINKFLAGS -shared')
env['SHLINKCOM'] = '$SHLINK -o $TARGET $SHLINKFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
# don't set up the emitter, cause AppendUnique will generate a list
# starting with None :-(
env.Append(SHLIBEMITTER = [shlib_emitter])
env['SMARTLINK'] = smart_link
env['LINK'] = "$SMARTLINK"
env['LINKFLAGS'] = SCons.Util.CLVar('')
# __RPATH is only set to something ($_RPATH typically) on platforms that support it.
env['LINKCOM'] = '$LINK -o $TARGET $LINKFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
env['LIBDIRPREFIX']='-L'
env['LIBDIRSUFFIX']=''
env['_LIBFLAGS']='${_stripixes(LIBLINKPREFIX, LIBS, LIBLINKSUFFIX, LIBPREFIXES, LIBSUFFIXES, __env__)}'
env['LIBLINKPREFIX']='-l'
env['LIBLINKSUFFIX']=''
if env['PLATFORM'] == 'hpux':
env['SHLIBSUFFIX'] = '.sl'
elif env['PLATFORM'] == 'aix':
env['SHLIBSUFFIX'] = '.a'
# For most platforms, a loadable module is the same as a shared
# library. Platforms which are different can override these, but
# setting them the same means that LoadableModule works everywhere.
SCons.Tool.createLoadableModuleBuilder(env)
env['LDMODULE'] = '$SHLINK'
# don't set up the emitter, cause AppendUnique will generate a list
# starting with None :-(
env.Append(LDMODULEEMITTER='$SHLIBEMITTER')
env['LDMODULEPREFIX'] = '$SHLIBPREFIX'
env['LDMODULESUFFIX'] = '$SHLIBSUFFIX'
env['LDMODULEFLAGS'] = '$SHLINKFLAGS'
env['LDMODULECOM'] = '$LDMODULE -o $TARGET $LDMODULEFLAGS $__RPATH $SOURCES $_LIBDIRFLAGS $_LIBFLAGS'
def exists(env):
# This module isn't really a Tool on its own, it's common logic for
# other linkers.
return None
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4: | PypiClean |
/Electrum-CHI-3.3.8.tar.gz/Electrum-CHI-3.3.8/packages/chardet/escsm.py |
from .enums import MachineState
HZ_CLS = (
1,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,4,0,5,2,0, # 78 - 7f
1,1,1,1,1,1,1,1, # 80 - 87
1,1,1,1,1,1,1,1, # 88 - 8f
1,1,1,1,1,1,1,1, # 90 - 97
1,1,1,1,1,1,1,1, # 98 - 9f
1,1,1,1,1,1,1,1, # a0 - a7
1,1,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,1,1,1,1,1,1, # c0 - c7
1,1,1,1,1,1,1,1, # c8 - cf
1,1,1,1,1,1,1,1, # d0 - d7
1,1,1,1,1,1,1,1, # d8 - df
1,1,1,1,1,1,1,1, # e0 - e7
1,1,1,1,1,1,1,1, # e8 - ef
1,1,1,1,1,1,1,1, # f0 - f7
1,1,1,1,1,1,1,1, # f8 - ff
)
HZ_ST = (
MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17
5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f
4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27
4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f
)
HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
HZ_SM_MODEL = {'class_table': HZ_CLS,
'class_factor': 6,
'state_table': HZ_ST,
'char_len_table': HZ_CHAR_LEN_TABLE,
'name': "HZ-GB-2312",
'language': 'Chinese'}
ISO2022CN_CLS = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,4,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022CN_ST = (
MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27
5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f
)
ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS,
'class_factor': 9,
'state_table': ISO2022CN_ST,
'char_len_table': ISO2022CN_CHAR_LEN_TABLE,
'name': "ISO-2022-CN",
'language': 'Chinese'}
ISO2022JP_CLS = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,2,2, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,7,0,0,0, # 20 - 27
3,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
6,0,4,0,8,0,0,0, # 40 - 47
0,9,5,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022JP_ST = (
MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f
MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47
)
ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS,
'class_factor': 10,
'state_table': ISO2022JP_ST,
'char_len_table': ISO2022JP_CHAR_LEN_TABLE,
'name': "ISO-2022-JP",
'language': 'Japanese'}
ISO2022KR_CLS = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,3,0,0,0, # 20 - 27
0,4,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,5,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022KR_ST = (
MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27
)
ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS,
'class_factor': 6,
'state_table': ISO2022KR_ST,
'char_len_table': ISO2022KR_CHAR_LEN_TABLE,
'name': "ISO-2022-KR",
'language': 'Korean'} | PypiClean |
/Django_patch-2.2.19-py3-none-any.whl/django/conf/global_settings.py | def gettext_noop(s):
return s
####################
# CORE #
####################
DEBUG = False
# Whether the framework should propagate raw exceptions rather than catching
# them. This is useful under some testing situations and should never be used
# on a live site.
DEBUG_PROPAGATE_EXCEPTIONS = False
# People who get code error notifications.
# In the format [('Full Name', '[email protected]'), ('Full Name', '[email protected]')]
ADMINS = []
# List of IP addresses, as strings, that:
# * See debug comments, when DEBUG is true
# * Receive x-headers
INTERNAL_IPS = []
# Hosts/domain names that are valid for this site.
# "*" matches anything, ".example.com" matches example.com and all subdomains
ALLOWED_HOSTS = []
# Local time zone for this installation. All choices can be found here:
# https://en.wikipedia.org/wiki/List_of_tz_zones_by_name (although not all
# systems may support all possibilities). When USE_TZ is True, this is
# interpreted as the default user time zone.
TIME_ZONE = 'America/Chicago'
# If you set this to True, Django will use timezone-aware datetimes.
USE_TZ = False
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
# Languages we provide translations for, out of the box.
LANGUAGES = [
('af', gettext_noop('Afrikaans')),
('ar', gettext_noop('Arabic')),
('ast', gettext_noop('Asturian')),
('az', gettext_noop('Azerbaijani')),
('bg', gettext_noop('Bulgarian')),
('be', gettext_noop('Belarusian')),
('bn', gettext_noop('Bengali')),
('br', gettext_noop('Breton')),
('bs', gettext_noop('Bosnian')),
('ca', gettext_noop('Catalan')),
('cs', gettext_noop('Czech')),
('cy', gettext_noop('Welsh')),
('da', gettext_noop('Danish')),
('de', gettext_noop('German')),
('dsb', gettext_noop('Lower Sorbian')),
('el', gettext_noop('Greek')),
('en', gettext_noop('English')),
('en-au', gettext_noop('Australian English')),
('en-gb', gettext_noop('British English')),
('eo', gettext_noop('Esperanto')),
('es', gettext_noop('Spanish')),
('es-ar', gettext_noop('Argentinian Spanish')),
('es-co', gettext_noop('Colombian Spanish')),
('es-mx', gettext_noop('Mexican Spanish')),
('es-ni', gettext_noop('Nicaraguan Spanish')),
('es-ve', gettext_noop('Venezuelan Spanish')),
('et', gettext_noop('Estonian')),
('eu', gettext_noop('Basque')),
('fa', gettext_noop('Persian')),
('fi', gettext_noop('Finnish')),
('fr', gettext_noop('French')),
('fy', gettext_noop('Frisian')),
('ga', gettext_noop('Irish')),
('gd', gettext_noop('Scottish Gaelic')),
('gl', gettext_noop('Galician')),
('he', gettext_noop('Hebrew')),
('hi', gettext_noop('Hindi')),
('hr', gettext_noop('Croatian')),
('hsb', gettext_noop('Upper Sorbian')),
('hu', gettext_noop('Hungarian')),
('hy', gettext_noop('Armenian')),
('ia', gettext_noop('Interlingua')),
('id', gettext_noop('Indonesian')),
('io', gettext_noop('Ido')),
('is', gettext_noop('Icelandic')),
('it', gettext_noop('Italian')),
('ja', gettext_noop('Japanese')),
('ka', gettext_noop('Georgian')),
('kab', gettext_noop('Kabyle')),
('kk', gettext_noop('Kazakh')),
('km', gettext_noop('Khmer')),
('kn', gettext_noop('Kannada')),
('ko', gettext_noop('Korean')),
('lb', gettext_noop('Luxembourgish')),
('lt', gettext_noop('Lithuanian')),
('lv', gettext_noop('Latvian')),
('mk', gettext_noop('Macedonian')),
('ml', gettext_noop('Malayalam')),
('mn', gettext_noop('Mongolian')),
('mr', gettext_noop('Marathi')),
('my', gettext_noop('Burmese')),
('nb', gettext_noop('Norwegian Bokmål')),
('ne', gettext_noop('Nepali')),
('nl', gettext_noop('Dutch')),
('nn', gettext_noop('Norwegian Nynorsk')),
('os', gettext_noop('Ossetic')),
('pa', gettext_noop('Punjabi')),
('pl', gettext_noop('Polish')),
('pt', gettext_noop('Portuguese')),
('pt-br', gettext_noop('Brazilian Portuguese')),
('ro', gettext_noop('Romanian')),
('ru', gettext_noop('Russian')),
('sk', gettext_noop('Slovak')),
('sl', gettext_noop('Slovenian')),
('sq', gettext_noop('Albanian')),
('sr', gettext_noop('Serbian')),
('sr-latn', gettext_noop('Serbian Latin')),
('sv', gettext_noop('Swedish')),
('sw', gettext_noop('Swahili')),
('ta', gettext_noop('Tamil')),
('te', gettext_noop('Telugu')),
('th', gettext_noop('Thai')),
('tr', gettext_noop('Turkish')),
('tt', gettext_noop('Tatar')),
('udm', gettext_noop('Udmurt')),
('uk', gettext_noop('Ukrainian')),
('ur', gettext_noop('Urdu')),
('vi', gettext_noop('Vietnamese')),
('zh-hans', gettext_noop('Simplified Chinese')),
('zh-hant', gettext_noop('Traditional Chinese')),
]
# Languages using BiDi (right-to-left) layout
LANGUAGES_BIDI = ["he", "ar", "fa", "ur"]
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
LOCALE_PATHS = []
# Settings for language cookie
LANGUAGE_COOKIE_NAME = 'django_language'
LANGUAGE_COOKIE_AGE = None
LANGUAGE_COOKIE_DOMAIN = None
LANGUAGE_COOKIE_PATH = '/'
# If you set this to True, Django will format dates, numbers and calendars
# according to user current locale.
USE_L10N = False
# Not-necessarily-technical managers of the site. They get broken link
# notifications and other various emails.
MANAGERS = ADMINS
# Default content type and charset to use for all HttpResponse objects, if a
# MIME type isn't manually specified. These are used to construct the
# Content-Type header.
DEFAULT_CONTENT_TYPE = 'text/html'
DEFAULT_CHARSET = 'utf-8'
# Encoding of files read from disk (template and initial SQL files).
FILE_CHARSET = 'utf-8'
# Email address that error messages come from.
SERVER_EMAIL = 'root@localhost'
# Database connection info. If left empty, will default to the dummy backend.
DATABASES = {}
# Classes used to implement DB routing behavior.
DATABASE_ROUTERS = []
# The email backend to use. For possible shortcuts see django.core.mail.
# The default is to use the SMTP backend.
# Third-party backends can be specified by providing a Python path
# to a module that defines an EmailBackend class.
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
# Host for sending email.
EMAIL_HOST = 'localhost'
# Port for sending email.
EMAIL_PORT = 25
# Whether to send SMTP 'Date' header in the local time zone or in UTC.
EMAIL_USE_LOCALTIME = False
# Optional SMTP authentication information for EMAIL_HOST.
EMAIL_HOST_USER = ''
EMAIL_HOST_PASSWORD = ''
EMAIL_USE_TLS = False
EMAIL_USE_SSL = False
EMAIL_SSL_CERTFILE = None
EMAIL_SSL_KEYFILE = None
EMAIL_TIMEOUT = None
# List of strings representing installed apps.
INSTALLED_APPS = []
TEMPLATES = []
# Default form rendering class.
FORM_RENDERER = 'django.forms.renderers.DjangoTemplates'
# Default email address to use for various automated correspondence from
# the site managers.
DEFAULT_FROM_EMAIL = 'webmaster@localhost'
# Subject-line prefix for email messages send with django.core.mail.mail_admins
# or ...mail_managers. Make sure to include the trailing space.
EMAIL_SUBJECT_PREFIX = '[Django] '
# Whether to append trailing slashes to URLs.
APPEND_SLASH = True
# Whether to prepend the "www." subdomain to URLs that don't have it.
PREPEND_WWW = False
# Override the server-derived value of SCRIPT_NAME
FORCE_SCRIPT_NAME = None
# List of compiled regular expression objects representing User-Agent strings
# that are not allowed to visit any page, systemwide. Use this for bad
# robots/crawlers. Here are a few examples:
# import re
# DISALLOWED_USER_AGENTS = [
# re.compile(r'^NaverBot.*'),
# re.compile(r'^EmailSiphon.*'),
# re.compile(r'^SiteSucker.*'),
# re.compile(r'^sohu-search'),
# ]
DISALLOWED_USER_AGENTS = []
ABSOLUTE_URL_OVERRIDES = {}
# List of compiled regular expression objects representing URLs that need not
# be reported by BrokenLinkEmailsMiddleware. Here are a few examples:
# import re
# IGNORABLE_404_URLS = [
# re.compile(r'^/apple-touch-icon.*\.png$'),
# re.compile(r'^/favicon.ico$'),
# re.compile(r'^/robots.txt$'),
# re.compile(r'^/phpmyadmin/'),
# re.compile(r'\.(cgi|php|pl)$'),
# ]
IGNORABLE_404_URLS = []
# A secret key for this particular Django installation. Used in secret-key
# hashing algorithms. Set this in your settings, or Django will complain
# loudly.
SECRET_KEY = ''
# Default file storage mechanism that holds media.
DEFAULT_FILE_STORAGE = 'django.core.files.storage.FileSystemStorage'
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/var/www/example.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT.
# Examples: "http://example.com/media/", "http://media.example.com/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Example: "/var/www/example.com/static/"
STATIC_ROOT = None
# URL that handles the static files served from STATIC_ROOT.
# Example: "http://example.com/static/", "http://static.example.com/"
STATIC_URL = None
# List of upload handler classes to be applied in order.
FILE_UPLOAD_HANDLERS = [
'django.core.files.uploadhandler.MemoryFileUploadHandler',
'django.core.files.uploadhandler.TemporaryFileUploadHandler',
]
# Maximum size, in bytes, of a request before it will be streamed to the
# file system instead of into memory.
FILE_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
# Maximum size in bytes of request data (excluding file uploads) that will be
# read before a SuspiciousOperation (RequestDataTooBig) is raised.
DATA_UPLOAD_MAX_MEMORY_SIZE = 2621440 # i.e. 2.5 MB
# Maximum number of GET/POST parameters that will be read before a
# SuspiciousOperation (TooManyFieldsSent) is raised.
DATA_UPLOAD_MAX_NUMBER_FIELDS = 1000
# Directory in which upload streamed files will be temporarily saved. A value of
# `None` will make Django use the operating system's default temporary directory
# (i.e. "/tmp" on *nix systems).
FILE_UPLOAD_TEMP_DIR = None
# The numeric mode to set newly-uploaded files to. The value should be a mode
# you'd pass directly to os.chmod; see https://docs.python.org/library/os.html#files-and-directories.
FILE_UPLOAD_PERMISSIONS = None
# The numeric mode to assign to newly-created directories, when uploading files.
# The value should be a mode as you'd pass to os.chmod;
# see https://docs.python.org/library/os.html#files-and-directories.
FILE_UPLOAD_DIRECTORY_PERMISSIONS = None
# Python module path where user will place custom format definition.
# The directory where this setting is pointing should contain subdirectories
# named as the locales, containing a formats.py file
# (i.e. "myproject.locale" for myproject/locale/en/formats.py etc. use)
FORMAT_MODULE_PATH = None
# Default formatting for date objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATE_FORMAT = 'N j, Y'
# Default formatting for datetime objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
DATETIME_FORMAT = 'N j, Y, P'
# Default formatting for time objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
TIME_FORMAT = 'P'
# Default formatting for date objects when only the year and month are relevant.
# See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
YEAR_MONTH_FORMAT = 'F Y'
# Default formatting for date objects when only the month and day are relevant.
# See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
MONTH_DAY_FORMAT = 'F j'
# Default short formatting for date objects. See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATE_FORMAT = 'm/d/Y'
# Default short formatting for datetime objects.
# See all available format strings here:
# https://docs.djangoproject.com/en/dev/ref/templates/builtins/#date
SHORT_DATETIME_FORMAT = 'm/d/Y P'
# Default formats to be used when parsing dates from input boxes, in order
# See all available format string here:
# https://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATE_INPUT_FORMATS = [
'%Y-%m-%d', '%m/%d/%Y', '%m/%d/%y', # '2006-10-25', '10/25/2006', '10/25/06'
'%b %d %Y', '%b %d, %Y', # 'Oct 25 2006', 'Oct 25, 2006'
'%d %b %Y', '%d %b, %Y', # '25 Oct 2006', '25 Oct, 2006'
'%B %d %Y', '%B %d, %Y', # 'October 25 2006', 'October 25, 2006'
'%d %B %Y', '%d %B, %Y', # '25 October 2006', '25 October, 2006'
]
# Default formats to be used when parsing times from input boxes, in order
# See all available format string here:
# https://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
TIME_INPUT_FORMATS = [
'%H:%M:%S', # '14:30:59'
'%H:%M:%S.%f', # '14:30:59.000200'
'%H:%M', # '14:30'
]
# Default formats to be used when parsing dates and times from input boxes,
# in order
# See all available format string here:
# https://docs.python.org/library/datetime.html#strftime-behavior
# * Note that these format strings are different from the ones to display dates
DATETIME_INPUT_FORMATS = [
'%Y-%m-%d %H:%M:%S', # '2006-10-25 14:30:59'
'%Y-%m-%d %H:%M:%S.%f', # '2006-10-25 14:30:59.000200'
'%Y-%m-%d %H:%M', # '2006-10-25 14:30'
'%Y-%m-%d', # '2006-10-25'
'%m/%d/%Y %H:%M:%S', # '10/25/2006 14:30:59'
'%m/%d/%Y %H:%M:%S.%f', # '10/25/2006 14:30:59.000200'
'%m/%d/%Y %H:%M', # '10/25/2006 14:30'
'%m/%d/%Y', # '10/25/2006'
'%m/%d/%y %H:%M:%S', # '10/25/06 14:30:59'
'%m/%d/%y %H:%M:%S.%f', # '10/25/06 14:30:59.000200'
'%m/%d/%y %H:%M', # '10/25/06 14:30'
'%m/%d/%y', # '10/25/06'
]
# First day of week, to be used on calendars
# 0 means Sunday, 1 means Monday...
FIRST_DAY_OF_WEEK = 0
# Decimal separator symbol
DECIMAL_SEPARATOR = '.'
# Boolean that sets whether to add thousand separator when formatting numbers
USE_THOUSAND_SEPARATOR = False
# Number of digits that will be together, when splitting them by
# THOUSAND_SEPARATOR. 0 means no grouping, 3 means splitting by thousands...
NUMBER_GROUPING = 0
# Thousand separator symbol
THOUSAND_SEPARATOR = ','
# The tablespaces to use for each model when not specified otherwise.
DEFAULT_TABLESPACE = ''
DEFAULT_INDEX_TABLESPACE = ''
# Default X-Frame-Options header value
X_FRAME_OPTIONS = 'SAMEORIGIN'
USE_X_FORWARDED_HOST = False
USE_X_FORWARDED_PORT = False
# The Python dotted path to the WSGI application that Django's internal server
# (runserver) will use. If `None`, the return value of
# 'django.core.wsgi.get_wsgi_application' is used, thus preserving the same
# behavior as previous versions of Django. Otherwise this should point to an
# actual WSGI application object.
WSGI_APPLICATION = None
# If your Django app is behind a proxy that sets a header to specify secure
# connections, AND that proxy ensures that user-submitted headers with the
# same name are ignored (so that people can't spoof it), set this value to
# a tuple of (header_name, header_value). For any requests that come in with
# that header/value, request.is_secure() will return True.
# WARNING! Only set this if you fully understand what you're doing. Otherwise,
# you may be opening yourself up to a security risk.
SECURE_PROXY_SSL_HEADER = None
##############
# MIDDLEWARE #
##############
# List of middleware to use. Order is important; in the request phase, these
# middleware will be applied in the order given, and in the response
# phase the middleware will be applied in reverse order.
MIDDLEWARE = []
############
# SESSIONS #
############
# Cache to store session data if using the cache session backend.
SESSION_CACHE_ALIAS = 'default'
# Cookie name. This can be whatever you want.
SESSION_COOKIE_NAME = 'sessionid'
# Age of cookie, in seconds (default: 2 weeks).
SESSION_COOKIE_AGE = 60 * 60 * 24 * 7 * 2
# A string like "example.com", or None for standard domain cookie.
SESSION_COOKIE_DOMAIN = None
# Whether the session cookie should be secure (https:// only).
SESSION_COOKIE_SECURE = False
# The path of the session cookie.
SESSION_COOKIE_PATH = '/'
# Whether to use the HttpOnly flag.
SESSION_COOKIE_HTTPONLY = True
# Whether to set the flag restricting cookie leaks on cross-site requests.
# This can be 'Lax', 'Strict', or None to disable the flag.
SESSION_COOKIE_SAMESITE = 'Lax'
# Whether to save the session data on every request.
SESSION_SAVE_EVERY_REQUEST = False
# Whether a user's session cookie expires when the Web browser is closed.
SESSION_EXPIRE_AT_BROWSER_CLOSE = False
# The module to store session data
SESSION_ENGINE = 'django.contrib.sessions.backends.db'
# Directory to store session files if using the file session module. If None,
# the backend will use a sensible default.
SESSION_FILE_PATH = None
# class to serialize session data
SESSION_SERIALIZER = 'django.contrib.sessions.serializers.JSONSerializer'
#########
# CACHE #
#########
# The cache backends to use.
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.locmem.LocMemCache',
}
}
CACHE_MIDDLEWARE_KEY_PREFIX = ''
CACHE_MIDDLEWARE_SECONDS = 600
CACHE_MIDDLEWARE_ALIAS = 'default'
##################
# AUTHENTICATION #
##################
AUTH_USER_MODEL = 'auth.User'
AUTHENTICATION_BACKENDS = ['django.contrib.auth.backends.ModelBackend']
LOGIN_URL = '/accounts/login/'
LOGIN_REDIRECT_URL = '/accounts/profile/'
LOGOUT_REDIRECT_URL = None
# The number of days a password reset link is valid for
PASSWORD_RESET_TIMEOUT_DAYS = 3
# the first hasher in this list is the preferred algorithm. any
# password using different algorithms will be converted automatically
# upon login
PASSWORD_HASHERS = [
'django.contrib.auth.hashers.PBKDF2PasswordHasher',
'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher',
'django.contrib.auth.hashers.Argon2PasswordHasher',
'django.contrib.auth.hashers.BCryptSHA256PasswordHasher',
]
AUTH_PASSWORD_VALIDATORS = []
###########
# SIGNING #
###########
SIGNING_BACKEND = 'django.core.signing.TimestampSigner'
########
# CSRF #
########
# Dotted path to callable to be used as view when a request is
# rejected by the CSRF middleware.
CSRF_FAILURE_VIEW = 'django.views.csrf.csrf_failure'
# Settings for CSRF cookie.
CSRF_COOKIE_NAME = 'csrftoken'
CSRF_COOKIE_AGE = 60 * 60 * 24 * 7 * 52
CSRF_COOKIE_DOMAIN = None
CSRF_COOKIE_PATH = '/'
CSRF_COOKIE_SECURE = False
CSRF_COOKIE_HTTPONLY = False
CSRF_COOKIE_SAMESITE = 'Lax'
CSRF_HEADER_NAME = 'HTTP_X_CSRFTOKEN'
CSRF_TRUSTED_ORIGINS = []
CSRF_USE_SESSIONS = False
############
# MESSAGES #
############
# Class to use as messages backend
MESSAGE_STORAGE = 'django.contrib.messages.storage.fallback.FallbackStorage'
# Default values of MESSAGE_LEVEL and MESSAGE_TAGS are defined within
# django.contrib.messages to avoid imports in this settings file.
###########
# LOGGING #
###########
# The callable to use to configure logging
LOGGING_CONFIG = 'logging.config.dictConfig'
# Custom logging configuration.
LOGGING = {}
# Default exception reporter filter class used in case none has been
# specifically assigned to the HttpRequest instance.
DEFAULT_EXCEPTION_REPORTER_FILTER = 'django.views.debug.SafeExceptionReporterFilter'
###########
# TESTING #
###########
# The name of the class to use to run the test suite
TEST_RUNNER = 'django.test.runner.DiscoverRunner'
# Apps that don't need to be serialized at test database creation time
# (only apps with migrations are to start with)
TEST_NON_SERIALIZED_APPS = []
############
# FIXTURES #
############
# The list of directories to search for fixtures
FIXTURE_DIRS = []
###############
# STATICFILES #
###############
# A list of locations of additional static files
STATICFILES_DIRS = []
# The default file storage backend used during the build process
STATICFILES_STORAGE = 'django.contrib.staticfiles.storage.StaticFilesStorage'
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = [
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
]
##############
# MIGRATIONS #
##############
# Migration module overrides for apps, by app label.
MIGRATION_MODULES = {}
#################
# SYSTEM CHECKS #
#################
# List of all issues generated by system checks that should be silenced. Light
# issues like warnings, infos or debugs will not generate a message. Silencing
# serious issues like errors and criticals does not result in hiding the
# message, but Django will not stop you from e.g. running server.
SILENCED_SYSTEM_CHECKS = []
#######################
# SECURITY MIDDLEWARE #
#######################
SECURE_BROWSER_XSS_FILTER = False
SECURE_CONTENT_TYPE_NOSNIFF = False
SECURE_HSTS_INCLUDE_SUBDOMAINS = False
SECURE_HSTS_PRELOAD = False
SECURE_HSTS_SECONDS = 0
SECURE_REDIRECT_EXEMPT = []
SECURE_SSL_HOST = None
SECURE_SSL_REDIRECT = False | PypiClean |
/Flask-Turbo-Boost-0.2.8.tar.gz/Flask-Turbo-Boost-0.2.8/flask_turbo_boost/api_project/application/__init__.py |
import sys
import os
# Insert project root path to sys.path
project_path = os.path.abspath(os.path.join(os.path.dirname(__file__), '..'))
if project_path not in sys.path:
sys.path.insert(0, project_path)
import time
import logging
from flask import Flask, request, url_for, g, render_template, session
from flask_wtf.csrf import CSRFProtect
from flask_debugtoolbar import DebugToolbarExtension
from werkzeug.wsgi import SharedDataMiddleware
from werkzeug.contrib.fixers import ProxyFix
from six import iteritems
from flask_security.core import current_user, AnonymousUser
from config import load_config
# convert python's encoding to utf8
try:
from imp import reload
reload(sys)
sys.setdefaultencoding('utf8')
except (AttributeError, NameError):
pass
def create_app():
"""Create Flask app."""
config = load_config()
app = Flask(__name__)
app.config.from_object(config)
# Proxy fix
app.wsgi_app = ProxyFix(app.wsgi_app)
# ensure instance path exists
try:
os.makedirs(app.instance_path)
except OSError:
pass
# Register components
register_logs(app)
register_db(app)
register_security(app)
register_routes(app)
register_error_handle(app)
register_hooks(app)
register_scripts(app)
register_shell_context(app)
return app
def register_logs(app):
from .utils.sentry import sentry
if app.testing:
app.logger.setLevel(logging.DEBUG)
return
if app.debug:
# DebugToolbarExtension(app)
app.logger.setLevel(logging.DEBUG)
if os.environ.get('MODE') == 'PRODUCTION':
app.logger.addHandler(logging.StreamHandler())
app.logger.setLevel(logging.ERROR)
# if set gunicorn
gunicorn_logger = logging.getLogger('gunicorn.error')
if gunicorn_logger:
app.logger.handlers = gunicorn_logger.handlers
app.logger.setLevel(gunicorn_logger.level)
# sentry for production
if app.config.get('SENTRY_DSN'):
app.logger.info('SENTRY active')
sentry.init_app(app, dsn=app.config.get('SENTRY_DSN'),
logging=True, level=logging.ERROR)
else:
# enable sentry for development
if app.config.get('SENTRY_DSN'):
app.logger.info('SENTRY is enable')
sentry.init_app(app, dsn=app.config.get('SENTRY_DSN'))
def register_security(app):
from flask_security import SQLAlchemyUserDatastore, Security
from .models import db, User, Role
user_datastore = SQLAlchemyUserDatastore(db, User, Role)
security = Security(app, user_datastore)
from flask import session
@app.before_request
def before_request():
g.user = current_user
if g.user is not None and g.user.has_role('admin'):
g._before_request_time = time.time()
@app.after_request
def after_request(response):
if hasattr(g, '_before_request_time'):
delta = time.time() - g._before_request_time
response.headers['X-Render-Time'] = delta * 1000
return response
def register_db(app):
"""Register models."""
from .models import db
db.init_app(app)
def register_routes(app):
"""Register routes."""
from .controllers import api_v1
from flask.blueprints import Blueprint
for module in _import_submodules_from_package(api_v1):
bp = getattr(module, 'bp')
if bp and isinstance(bp, Blueprint):
app.register_blueprint(bp)
def register_error_handle(app):
"""Register HTTP error pages."""
@app.errorhandler(403)
def page_403(error):
return render_template('site/403/403.html'), 403
@app.errorhandler(404)
def page_404(error):
return render_template('site/404/404.html'), 404
@app.errorhandler(500)
def page_500(error):
return render_template('site/500/500.html'), 500
def register_hooks(app):
pass
def register_scripts(app):
# init migration script
from flask_migrate import Migrate
from .models import db
Migrate(app, db)
from scripts.seed import seed_cli
app.cli.add_command(seed_cli)
def register_shell_context(app):
@app.shell_context_processor
def make_shell_context():
from .models import db
import application.models as m
return dict(app=app, db=db, m=m)
def _get_template_name(template_reference):
"""Get current template name."""
return template_reference._TemplateReference__context.name
def _import_submodules_from_package(package):
import pkgutil
modules = []
for importer, modname, ispkg in pkgutil.iter_modules(package.__path__,
prefix=package.__name__ + "."):
modules.append(__import__(modname, fromlist="dummy"))
return modules
# API Register Helpers -----------------------------------------------------------------
def register_allow_origin(app):
@app.after_request
def after_request(response):
response.headers['Access-Control-Allow-Origin'] = '*'
response.headers['Access-Control-Allow-Credentials'] = True
response.headers['Access-Control-Allow-Methods'] = 'PUT, DELETE, GET, POST, OPTIONS'
response.headers['Access-Control-Allow-Headers'] = 'Access-Token,DNT,User-Agent,X-Requested-With,If-Modified-Since,Cache-Control,Content-Type,Range'
response.headers['Access-Control-Expose-Headers'] = 'Content-Length,Content-Range'
return response
def register_jsonencoder(app):
from flask.json import JSONEncoder
from datetime import datetime, date, time
from decimal import Decimal
import enum
class CustomJSONEncoder(JSONEncoder):
def default(self, obj):
try:
if isinstance(obj, datetime):
# if obj.utcoffset() is not None:
# obj = obj - obj.utcoffset()
return obj.astimezone(timezone('Asia/Bangkok')).isoformat()
if isinstance(obj, date):
return str(obj.isoformat())
if isinstance(obj, Decimal):
return str(obj)
if isinstance(obj, enum.Enum):
return str(obj.value)
if isinstance(obj, time):
return str(obj)
iterable = iter(obj)
except TypeError:
pass
return JSONEncoder.default(self, obj)
app.json_encoder = CustomJSONEncoder
def create_celery(app=None):
app = app or create_app()
celery = Celery(
app.import_name,
backend=app.config.get('RESULT_BACKEND'),
broker=app.config.get('BROKER_URL'),
timezone='Asia/Bangkok')
celery.conf.update(task_always_eager=False)
class ContextTask(celery.Task):
def __call__(self, *args, **kwargs):
with app.app_context():
return self.run(*args, **kwargs)
celery.Task = ContextTask
return celery | PypiClean |
/L-System-Visualizer-1.0.7.tar.gz/L-System-Visualizer-1.0.7/lsystem/core/my_ui.py | import matplotlib.pyplot as plt
import numpy as np
from PyQt5.QtWidgets import QStackedWidget, QVBoxLayout, QPushButton, QWidget, QGridLayout, QLabel, QFileDialog
from PyQt5 import QtWidgets, QtCore
from PyQt5.QtCore import Qt
from lsystem.core.lsystem_utils import (
load_saved_lsystems,
get_saved_lsystem,
generate_lsystem_2d,
generate_lsystem_3d,
remove_saved_lsystem,
)
from PyQt5.QtGui import QIcon
from lsystem.core.input_check import input_check
from lsystem.core.lsystem_2d_widget import LSystem2DWidget
from lsystem.core.lsystem_3d_widget import LSystem3DWidget
from lsystem.core.fract_menu import FractalDimension, fractal_dim_calc
from lsystem.boxcounting3d.calc import calc_fractal_dim3D
import copy
import os
class CustomLineEdit(QtWidgets.QLineEdit):
"""Class that enables input in a textbox by subclassing QLineEdit"""
clicked = QtCore.pyqtSignal()
def __init__(self):
"""initializes variables"""
super().__init__()
self.valid = True
self.error_message = "X"
# Needs to be CamelCase
def mousePressEvent(self, QMouseEvent):
"""Triggers clicked.emit()"""
self.clicked.emit()
def reset_color(self):
"""Resets the text color of the textbox"""
self.setStyleSheet("color: black;")
def clear_box(self):
"""Clears input from the textbox"""
self.setText("")
self.setStyleSheet("color: black;")
def reset_box(self):
"""Resets the textbox"""
self.reset_color()
self.clear_box()
class UIWidget(QWidget):
""" Class that holds all of the widgets for viewing """
def __init__(self):
""" Initializes class and variables """
super(UIWidget, self).__init__()
self.axiom = QLabel("Axiom")
self.angle = QLabel("Angles(degrees)")
self.iters = QLabel("Iterations")
self.axiom_edit = CustomLineEdit()
self.angle_edit = CustomLineEdit()
self.iters_edit = CustomLineEdit()
self.text_boxes = [self.axiom_edit, self.angle_edit, self.iters_edit]
self.prod_plus = QPushButton("+", self)
self.lsys_button = QPushButton("Generate L System", self)
self.boxcount_button = QPushButton("Fractal Dim", self)
self.widget = QWidget()
self.scroll_area = QtWidgets.QScrollArea()
self.layout_examples = QVBoxLayout(self.widget)
self.layout_examples.setAlignment(Qt.AlignTop)
self.prods = 1
self.prod_rules_edit = []
self.examples = []
self.minuses = None
self.made_angle = False
self.prod_percent = []
self.amount = 0
self.index = 0
self.frac_points = []
self.made_line = False
self.prod_rules = []
self.verts = [] # This will store the vertices from generate_lsystem
self.saved_lsystems = load_saved_lsystems()
self.two_d = LSystem2DWidget()
self.three_d = LSystem3DWidget()
self.fractal_menu = FractalDimension(self)
self.dims = QStackedWidget()
self.dims.addWidget(self.two_d)
self.dims.addWidget(self.three_d)
self.dims.setCurrentWidget(self.two_d)
self.graphix = self.two_d
self.init_UI()
self.alphabet = [
"F",
"f",
"G",
"g",
"H",
"h",
"^",
"&",
"-",
"+",
"[",
"]",
"|",
"(",
")",
">",
"<",
" ",
]
self.ctrl_char = [
"A",
"B",
"C",
"D",
"E",
"I",
"J",
"K",
"L",
"M",
"N",
"O",
"P",
"Q",
"R",
"S",
"T",
"U",
"V",
"W",
"X",
"Y",
"Z",
]
def init_UI(self):
""" Creates and adds all widgets in the viewport and sets the layout """
# renames the window
self.setWindowTitle("L-Systems Generator")
self.layout = QGridLayout()
self.init_buttons()
self.init_text_boxes()
self.add_widgets()
self.setLayout(self.layout)
self.setGeometry(500, 500, 500, 500)
def init_text_boxes(self):
"""Creates textboxes for the UI """
self.prod_rules.append(QLabel("Production Rule " + str(self.prods)))
# creates the text box for each label
self.axiom_edit.returnPressed.connect(self.lsys_button.click)
self.axiom_edit.clicked.connect(lambda: self.axiom_edit.reset_color())
self.prod_rules_edit.append(CustomLineEdit())
self.prod_rules_edit[0].clicked.connect(
lambda: self.prod_rules_edit[0].reset_color()
)
self.text_boxes.append(self.prod_rules_edit[-1])
self.prod_rules_edit[0].returnPressed.connect(self.lsys_button.click)
self.prod_rules_edit[0].textChanged.connect(lambda: self.show_popup())
self.prod_percent.append(CustomLineEdit())
self.prod_percent[0].setFixedWidth(50)
self.prod_percent[0].setText("100%")
self.text_boxes.append(self.prod_percent[-1])
self.angle_edit.returnPressed.connect(self.lsys_button.click)
self.angle_edit.clicked.connect(lambda: self.angle_edit.reset_color())
self.iters_edit.returnPressed.connect(self.lsys_button.click)
self.iters_edit.clicked.connect(lambda: self.iters_edit.reset_color())
self.prod_plus.clicked.connect(self.more_prods)
def init_buttons(self):
"""Creates buttons for the UI"""
# makes the lsys generator button
self.lsys_button.clicked.connect(self.on_lsys_button_clicked)
self.lsys_button.setAutoDefault(True)
self.boxcount_button.clicked.connect(self.on_boxcount_button_clicked)
self.boxcount_button.setAutoDefault(True)
self.scroll_area.setVerticalScrollBarPolicy(Qt.ScrollBarAlwaysOn)
self.scroll_area.setHorizontalScrollBarPolicy(Qt.ScrollBarAlwaysOff)
self.scroll_area.setWidgetResizable(True)
self.scroll_area.setFixedWidth(150)
self.scroll_area.setWidget(self.widget)
self.precons = ['SierpinksiTriangle', 'KochCurve', 'KochSnowflake',
'KochIsland', 'PeanoCurve', 'DragonCurve', 'HilbertCurve',
'TreeExample', 'IslandsandLakes']
for i, key in enumerate(self.saved_lsystems["two-d"]):
self.examples.append(QPushButton())
if i < len(self.precons):
self.examples[i].setIcon(QIcon('{}/lsystem/assets/images/{}.png'.format(os.getcwd(), self.precons[i])))
self.examples[i].setIconSize(QtCore.QSize(120, 100))
else:
self.examples[i].setText(key)
self.examples[i].setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.examples[i].customContextMenuRequested.connect(lambda state, x=key: self.del_example(str(x), "two-d"))
self.examples[i].clicked.connect(
lambda state, x=key: self.gen_example(str(x))
)
self.layout_examples.addWidget(self.examples[i])
def reload_presets(self):
"""pulls saved lsystems from file"""
self.saved_lsystems = load_saved_lsystems()
self.set_presets()
def set_presets(self):
"""Shows preset L-Systems for the appropriate dimention"""
if self.is_2d():
for widget in self.examples:
self.layout_examples.removeWidget(widget)
widget.deleteLater()
widget=None
self.examples = []
for i, key in enumerate(self.saved_lsystems["two-d"]):
self.examples.append(QPushButton())
if i < len(self.precons):
self.examples[i].setIcon(QIcon('{}/lsystem/assets/images/{}.png'.format(os.getcwd(), self.precons[i])))
self.examples[i].setIconSize(QtCore.QSize(120, 100))
else:
self.examples[i].setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.examples[i].customContextMenuRequested.connect(lambda state, x=key: self.del_example(str(x),"two-d"))
self.examples[i].setText(key)
self.examples[i].clicked.connect(
lambda state, x=key: self.gen_example(str(x))
)
self.layout_examples.addWidget(self.examples[i])
elif not self.is_2d():
for widget in self.examples:
self.layout_examples.removeWidget(widget)
widget.deleteLater()
widget=None
self.examples = []
for i, key in enumerate(self.saved_lsystems["three-d"]):
self.examples.append(QPushButton())
self.examples[i].setText(key)
self.examples[i].clicked.connect(
lambda state, x=key: self.gen_example(str(x))
)
self.layout_examples.addWidget(self.examples[i])
if(i>1): # TODO - This is temporary until we have a manner of separting 3d precons from 2d precons
self.examples[i].setContextMenuPolicy(QtCore.Qt.CustomContextMenu)
self.examples[i].customContextMenuRequested.connect(lambda state, x=key: self.del_example(str(x),"three-d"))
@QtCore.pyqtSlot()
def on_lsys_button_clicked(self):
"""Generates the L-System"""
self.gen_sys()
def boxcount_2d(self):
"""Calculates the dimensionality of a 2D L-System"""
self.fractal_menu.show()
start_size = 8
num_sizes = 7
self.x_arr = []
self.y_arr = []
fract_avg = []
end_size = start_size * (2 ** num_sizes)
temp_verts = copy.deepcopy(self.verts)
fractal_dim = fractal_dim_calc(temp_verts, end_size, num_sizes)
for i in range(num_sizes):
self.x_arr.append(np.log2((start_size)))
self.y_arr.append(fractal_dim[i])
fract_avg.append(np.polyfit(self.x_arr, self.y_arr, 1)[0])
print("(box width = 1/", start_size, ") FRACTAL AVG: ", fract_avg[-1])
start_size = start_size * 2
# y_arr = np.asarray(y_arr)
# x_arr = np.asarray(x_arr)
print("Made it this far")
figi, ax = plt.subplots()
line, = ax.plot(self.x_arr, self.y_arr, "bo", picker=5)
ax.set_title(
"Fractal dimension = {}".format(np.polyfit(self.x_arr, self.y_arr, 1)[0])
) # np.average(fract_avg)))
figi.canvas.mpl_connect('pick_event', self.onpick1)
#figi.show()
plt.show()
print("AVERAGE: ", np.average(fract_avg))
def boxcount_3d(self):
"""Calculates the dimensionality of a 3D L-System"""
mesh = self.graphix.mesh
if(mesh is not None):
calc_fractal_dim3D(mesh)
def is_2d(self):
"""Returns true if 2D is loaded, false if 3D is loaded."""
if (self.dims.currentWidget().__class__.__name__ == 'LSystem3DWidget'):
return False
return True
def on_boxcount_button_clicked(self):
"""Determines which type of dimension checking is done"""
if(self.is_2d()):
self.boxcount_2d()
else:
self.boxcount_3d()
def onpick1(self, event):
print("I am an EVENT1")
#plt.close(fig=None)
ind = event.ind[0]+1
for i in range(ind):
if(len(self.x_arr)==2):
break
# always delete index 0 because array shifts left after delete
self.x_arr = np.delete(self.x_arr, 0, axis=None)
self.y_arr = np.delete(self.y_arr, 0, axis=None)
print("The x array is: ", self.x_arr)
fig1, ax = plt.subplots()
ax.plot(self.x_arr, self.y_arr, "bo", picker=5)
ax.set_title(
"Fractal dimension = {}".format(np.polyfit(self.x_arr, self.y_arr, 1)[0])
) # np.average(fract_avg)))
fig1.canvas.mpl_connect('pick_event', self.onpick2)
fig1.show()
return True
def onpick2(self, event):
print("I am an EVENT2")
#plt.close(fig=None)
ind = event.ind[0]
num_to_remove = len(self.x_arr)-ind
for i in range(num_to_remove):
if(len(self.x_arr)==2):
break
# always delete index -1
self.x_arr = np.delete(self.x_arr, -1, axis=None)
self.y_arr = np.delete(self.y_arr, -1, axis=None)
fig2, ax = plt.subplots()
ax.plot(self.x_arr, self.y_arr, "bo", picker=5)
ax.set_title(
"Fractal dimension = {}".format(np.polyfit(self.x_arr, self.y_arr, 1)[0])
) # np.average(fract_avg)))
fig2.show()
return True
def add_widgets(self):
"""Adds widgets to window"""
self.layout.addWidget(self.axiom, 1, 0)
self.layout.addWidget(self.axiom_edit, 1, 1, 1, 10)
self.layout.addWidget(self.prod_rules[0], 2, 0, 1, 1)
self.layout.addWidget(self.prod_rules_edit[0], 2, 1, 1, 9)
self.layout.addWidget(self.prod_percent[0], 2, 9)
self.layout.addWidget(self.prod_plus, 2, 10, 1, 1)
self.layout.addWidget(self.angle, 10, 0)
self.layout.addWidget(self.angle_edit, 10, 1, 1, 10)
self.layout.addWidget(self.iters, 13, 0)
self.layout.addWidget(self.iters_edit, 13, 1, 1, 10)
self.layout.addWidget(self.scroll_area, 14, 0, 1, 1)
self.layout.addWidget(self.boxcount_button, 16, 0, 1, 1)
self.layout.addWidget(self.dims, 14, 1, 5, -1)
self.layout.addWidget(self.lsys_button, 20, 0, 1, -1)
def show_popup(self):
"""Adds and removes extra textboxes as needed"""
if self.is_2d():
self.reset_text_box_color()
prod_rule = ""
rules = ""
self.amount = 0
for prod in self.prod_rules_edit:
prod_rule += prod.text()
temp = prod.text()
temp = temp.replace(" ", "")
temp = temp[:].split(":")[0]
rules += temp
rules += " "
all_prod_rule = prod_rule + self.axiom_edit.text()
if (")" in all_prod_rule or "(" in all_prod_rule) and self.made_angle is False:
self.turn_angle = QLabel("Turning Angle")
self.turn_angle_edit = CustomLineEdit()
self.text_boxes.append(self.turn_angle_edit)
self.turn_angle_edit.returnPressed.connect(self.lsys_button.click)
self.turn_angle_edit.clicked.connect(
lambda: self.turn_angle_edit.reset_color()
)
self.layout.addWidget(self.turn_angle, 11, 0)
self.layout.addWidget(self.turn_angle_edit, 11, 1, 1, 10)
self.made_angle = True
if (
self.made_angle is True
and not "(" in all_prod_rule
and not ")" in all_prod_rule
and self.made_angle is True
):
self.text_boxes.remove(self.turn_angle_edit)
self.layout.removeWidget(self.turn_angle_edit)
self.layout.removeWidget(self.turn_angle)
self.turn_angle.deleteLater()
self.turn_angle_edit.deleteLater()
self.turn_angle_edit = None
self.turn_angle = None
self.made_angle = False
if (">" in all_prod_rule or "<" in all_prod_rule) and self.made_line is False:
self.line_scale = QLabel("Line Scale")
self.line_scale_edit = CustomLineEdit()
self.text_boxes.append(self.line_scale_edit)
self.line_scale_edit.returnPressed.connect(self.lsys_button.click)
self.line_scale_edit.clicked.connect(
lambda: self.line_scale_edit.reset_color()
)
self.layout.addWidget(self.line_scale, 12, 0)
self.layout.addWidget(self.line_scale_edit, 12, 1, 1, 10)
self.made_line = True
if (
self.made_line is True
and not "<" in all_prod_rule
and not ">" in all_prod_rule
and self.made_line is True
):
self.text_boxes.remove(self.line_scale_edit)
self.layout.removeWidget(self.line_scale_edit)
self.layout.removeWidget(self.line_scale)
self.line_scale.deleteLater()
self.line_scale_edit.deleteLater()
self.line_scale_edit = None
self.line_scale = None
self.made_line = False
# Probably doesn't need self as a param, can just be static.
def gen_rule_dict(self, prod_rules):
"""
Generates a rule dictionary from an array of production rules taken from the UI.
formats production rules as
{Symbol1: [[probability,replacement],...], Symbol2: [[probability,replacement]... ], ...}
"""
rules = {}
for rule in prod_rules:
rule = rule.text()
rule = rule.replace(" ", "")
prod = rule.split(":")
rules[prod[0]] = []
for i, rule in enumerate(prod_rules):
rule = rule.text()
rule = rule.replace(" ", "")
prod = rule.split(":")
rules[prod[0]].append([float(self.prod_percent[i].text().split("%")[0])/100, prod[1]])
return rules
def close_event(self):
print("[ INFO ] Exiting...")
exit()
def more_prods(self):
""" Adds textboxes for additional production rules, maxiumum 8."""
if self.prods < 8:
self.prods = self.prods + 1
self.prod_rules.append(QLabel("Production Rule " + str(self.prods)))
self.prod_rules_edit.append(CustomLineEdit())
self.prod_percent.append(CustomLineEdit())
self.text_boxes.append(self.prod_rules_edit[-1])
self.text_boxes.append(self.prod_percent[-1])
self.prod_percent[-1].setFixedWidth(50)
self.prod_rules_edit[self.prods - 1].textChanged.connect(
lambda: self.show_popup()
)
self.prod_rules_edit[-1].returnPressed.connect(self.lsys_button.click)
self.prod_rules_edit[-1].clicked.connect(
lambda: self.prod_rules_edit[-1].reset_color()
)
self.layout.addWidget(self.prod_rules[self.prods - 1], self.prods + 1, 0)
self.layout.addWidget(
self.prod_rules_edit[self.prods - 1], self.prods + 1, 1, 1, 9
)
self.layout.addWidget(self.prod_percent[self.prods - 1], self.prods + 1, 9)
if self.minuses is not None:
# remove last minueses
self.layout.removeWidget(self.minuses)
self.minuses.deleteLater()
self.minuses = None
self.minuses = QPushButton("-", self)
self.minuses.clicked.connect(self.less_prods)
self.layout.addWidget(self.minuses, self.prods + 1, 10, 1, 1)
self.prod_percent[-1].setText("100%")
def less_prods(self):
""" Removes textboxes for production rules when less are needed, minimum 1."""
if self.prods > 1:
self.text_boxes.remove(self.prod_rules_edit[-1])
self.text_boxes.remove(self.prod_percent[-1])
# remove last widget prodrules
self.layout.removeWidget(self.prod_rules[-1])
self.prod_rules[-1].deleteLater()
self.prod_rules.pop()
# remove last widget prodrulesEdit
self.layout.removeWidget(self.prod_rules_edit[-1])
self.prod_rules_edit[-1].deleteLater()
self.prod_rules_edit.pop()
# remove last percentage
self.layout.removeWidget(self.prod_percent[-1])
self.prod_percent[-1].deleteLater()
self.prod_percent.pop()
# remove last percentage
# for i in self.index:
# for j in i:
# if j == self.prods-1:
# print("WE NEED TO DELETE")
# print(i)
# print("HELLO")
# self.layout.removeWidget(self.prodPercent[-1])
# self.prodPercent[-1].deleteLater()
# self.prodPercent.pop()
# self.amount = self.amount - 1
# print(len(self.prodPercent))
# remove last minuses
self.layout.removeWidget(self.minuses)
self.minuses.deleteLater()
self.minuses = None
self.prods = self.prods - 1
if self.prods > 1:
self.minuses = QPushButton("-", self)
self.minuses.clicked.connect(self.less_prods)
self.layout.addWidget(self.minuses, self.prods + 1, 10, 1, 1)
def reset_input_boxes(self):
"""Resets textboxes to initial configuration, does not clear the fractal from the widget."""
while self.prods >1:
self.less_prods()
self.prod_rules_edit[-1].setText("")
self.axiom_edit.setText("")
self.prod_percent[0].setText("100%")
self.angle_edit.setText("")
self.iters_edit.setText("")
def gen_sys(self):
"""Generates the L-System described by the production rules"""
if input_check(self):
axiom_input = self.axiom_edit.text()
# prodInput = [self.prodrlesEdit.text()] #changed to array
angle_input = self.angle_edit.text()
if self.made_angle:
turn_angle_input = self.turn_angle_edit.text()
else:
turn_angle_input = 0
if self.made_line:
line_scale_input = self.line_scale_edit.text()
else:
line_scale_input = 1
iters_input = self.iters_edit.text()
# Format input for use
rules = self.gen_rule_dict(self.prod_rules_edit)
# Generate rule grammar dictionary.
grammar = {
"rules": rules,
"axiom": axiom_input,
"iterations": int(iters_input),
"angle": float(angle_input),
"turnAngle": float(turn_angle_input),
"lineScale": float(line_scale_input),
}
if (self.dims.currentWidget().__class__.__name__ == 'LSystem3DWidget'):
self.mesh = generate_lsystem_3d(grammar)
if self.mesh ==-1:
print("[ ERROR ] Invalid input no vertices generated.")
else:
self.graphix.clear()
self.graphix.add_mesh(self.mesh)
else:
self.verts = generate_lsystem_2d(grammar)
if self.verts ==-1:
print("[ ERROR ] Invalid input no vertices generated.")
else:
# Sets verts on graphics widget and draws
self.graphix.clear()
self.graphix.set_graph(self.verts)
# for i in range(1,len(self.verts)):
# self.graphix.set_graph(self.verts[i],1) #split = true
self.graphix.update()
self.graphix.reset_camera()
def gen_example(self, example):
"""Loads preset L-Systems"""
self.axiom_edit.reset_box()
for prod in self.prod_rules_edit:
prod.reset_box()
self.angle_edit.reset_box()
self.iters_edit.reset_box()
if(self.is_2d()):
grammar = get_saved_lsystem(example, self.saved_lsystems["two-d"])
else:
grammar = get_saved_lsystem(example, self.saved_lsystems["three-d"])
self.axiom_edit.setText(grammar["axiom"])
num_rules = 0
for key in grammar["rules"]:
if isinstance(grammar["rules"][key], list):
num_rules += len(grammar["rules"][key])
else:
num_rules += 1
while self.prods < num_rules:
self.more_prods()
while self.prods > num_rules:
self.less_prods()
which_rule = 0
for i, key in enumerate(grammar["rules"]):
value = grammar["rules"][key]
if isinstance(value, str):
self.prod_rules_edit[which_rule].setText(key + ": " + value)
which_rule+=1
else:
for val in value:
print(val)
self.prod_rules_edit[which_rule].setText(key + ": " + val[0])
self.prod_percent[which_rule].setText(val[1])
which_rule+=1
self.angle_edit.setText(str(grammar["angle"]))
if self.made_angle:
self.turn_angle_edit.setText(str(grammar["turn_angle"]))
if self.made_line:
self.line_scale_edit.setText(str(grammar["line_scale"]))
self.iters_edit.setText(str(grammar["iterations"]))
self.gen_sys()
def del_example(self, example, dim):
remove_saved_lsystem(example, dim)
print(example, " was deleted from disk")
self.reload_presets()
def reset_text_box_color(self):
"""resets the color of all textboxes"""
for box in self.text_boxes:
box.reset_color()
def reset_zoom(self):
"""resets the zoom level in the display widget"""
self.two_d.reset_zoom()
self.three_d.reset_zoom() #built in function don't change to snake script
def screenshot(self, parent_pos):
"""Takes a screenshot of the display window"""
#rel pos is the upper left pointof the widget relative to window
rel_pos = self.dims.pos()
#shift the y down by the total height - height of the widget (height of text boxes)
#1.1 factor added arbitrarily
rel_pos.setY((self.height()-self.dims.height())*1.1)
pos = rel_pos+parent_pos
qfd = QFileDialog()
filter = "Images (*.png *.xpm *.jpg)"
filename, type = QFileDialog.getSaveFileName(self, "", "", filter)
if filename:
self.two_d.screenshot(filename,pos) | PypiClean |
/Medical_media_sridhar-0.1.3.tar.gz/Medical_media_sridhar-0.1.3/main/lib/Validation_funtion.py | class Validation_funtion:
def check_bmi(self,weight:int,height:int):
weight=int(weight)
height=int(height)
bmi=((weight)/(height/100)**2)
if bmi <= 18.5:
print("Oops! You are underweight.")
elif bmi <= 24.9:
print("Awesome! You are healthy.")
elif bmi <= 29.9:
print("Eee! You are overweight.")
else:
print("Seesh! You are obese.")
def heartbeat(self,age,gender,heartbeat):
age=int(age)
heartbeat=int(heartbeat)
if(gender == 'male'):
if(age>18 and age<25):
if(heartbeat>62 and heartbeat <73):
print("Your Pulse Rate Status is Good")
else:
print("Your Pulse Rate Status is Good")
elif(age>26 and age<35):
if(heartbeat>62 and heartbeat <73):
print("Your Pulse Rate Status is Good")
else:
print("Your Pulse Rate Status is Good")
elif(age>36 and age<45):
if(heartbeat>63 and heartbeat <75):
print("Your Pulse Rate Status is Good")
else:
print("Your Pulse Rate Status is Good")
elif(age>46 and age<55):
if(heartbeat>64 and heartbeat <76):
print("Your Pulse Rate Status is Good")
else:
print("Your Pulse Rate Status is Good")
elif(age>56 and age<65):
if(heartbeat>62 and heartbeat <75):
print("Your Pulse Rate Status is Good")
else:
print("Your Pulse Rate Status is Good")
elif(age>=57):
if(heartbeat>62 and heartbeat <73):
print("Your Pulse Rate Status is Good")
else:
print("Your Pulse Rate Status is Good")
if(gender == 'female'):
if(age>18 and age<25):
if(heartbeat>64 and heartbeat <80):
print("Your Pulse Rate Status is Good")
else:
print("Your Pulse Rate Status is Good")
elif(age>26 and age<35):
if(heartbeat>62 and heartbeat <73):
print("Your Pulse Rate Status is Good")
else:
print("Your Pulse Rate Status is Good")
elif(age>36 and age<45):
if(heartbeat>63 and heartbeat <75):
print("Your Pulse Rate Status is Good")
else:
print("Your Pulse Rate Status is Good")
elif(age>46 and age<55):
if(heartbeat>64 and heartbeat <76):
print("Your Pulse Rate Status is Good")
else:
print("Your Pulse Rate Status is Good")
elif(age>56 and age<65):
if(heartbeat>62 and heartbeat <75):
print("Your Pulse Rate Status is Good")
else:
print("Your Pulse Rate Status is Good")
elif(age>=57):
if(heartbeat>64 and heartbeat <81):
print("Your Pulse Rate Status is Good")
else:
print("Your Pulse Rate Status is Good")
def bloodpressure(self,age,measuring_type,bloodpressure):
age=int(age)
bloodpressure=int(bloodpressure)
if(measuring_type == 'systolic'):
if(age>1 and age<5):
if(bloodpressure>80 and bloodpressure <115):
print("Your Blood Pressure Rate Status is Good")
else:
print("Your Blood Pressure Rate Status is Bad")
elif(age>6 and age<13):
if(bloodpressure>80 and bloodpressure <120):
print("Your Blood Pressure Rate Status is Good")
else:
print("Your Blood Pressure Rate Status is Bad")
elif(age>14 and age<18):
if(bloodpressure>90 and bloodpressure <120):
print("Your Blood Pressure Rate Status is Good")
else:
print("Your Blood Pressure Rate Status is Bad")
elif(age>19 and age<40):
if(bloodpressure>95 and bloodpressure <135):
print("Your Blood Pressure Rate Status is Good")
else:
print("Your Blood Pressure Rate Status is Bad")
elif(age>41 and age<60):
if(bloodpressure>110 and bloodpressure <145):
print("Your Blood Pressure Rate Status is Good")
else:
print("Your Blood Pressure Rate Status is Bad")
elif(age>=61):
if(bloodpressure>95 and bloodpressure <145):
print("Your Blood Pressure Rate Status is Good")
else:
print("Your Blood Pressure Rate Status is Bad")
if(measuring_type == 'diastolic'):
if(age>1 and age<5):
if(bloodpressure>55 and bloodpressure <80):
print("Your Blood Pressure Rate Status is Good")
else:
print("Your Blood Pressure Rate Status is Bad")
elif(age>6 and age<13):
if(bloodpressure>45 and bloodpressure <80):
print("Your Blood Pressure Rate Status is Good")
else:
print("Your Blood Pressure Rate Status is Bad")
elif(age>14 and age<18):
if(bloodpressure>50 and bloodpressure <80):
print("Your Blood Pressure Rate Status is Good")
else:
print("Your Blood Pressure Rate Status is Bad")
elif(age>19 and age<40):
if(bloodpressure>60 and bloodpressure <80):
print("Your Blood Pressure Rate Status is Good")
else:
print("Your Blood Pressure Rate Status is Bad")
elif(age>41 and age<60):
if(bloodpressure>70 and bloodpressure <90):
print("Your Blood Pressure Rate Status is Good")
else:
print("Your Blood Pressure Rate Status is Bad")
elif(age>=61):
if(bloodpressure>70 and bloodpressure <90):
print("Your Blood Pressure Rate Status is Good")
else:
print("Your Blood Pressure Rate Status is Bad") | PypiClean |
/ACSConv-0.1.1.tar.gz/ACSConv-0.1.1/acsconv/models/resnet.py | import torch.nn as nn
import math
import torch
import torch.utils.model_zoo as model_zoo
import torch.nn.functional as F
from ..operators import ACSConv
model_urls = {
'resnet18': 'https://download.pytorch.org/models/resnet18-5c106cde.pth',
'resnet34': 'https://download.pytorch.org/models/resnet34-333f7ec4.pth',
'resnet50': 'https://download.pytorch.org/models/resnet50-19c8e357.pth',
'resnet101': 'https://download.pytorch.org/models/resnet101-5d3b4d8f.pth',
'resnet152': 'https://download.pytorch.org/models/resnet152-b121ed2d.pth',
}
def resnet18(pretrained=False, **kwargs):
"""Constructs a ResNet-18 model.
Args:
pretrained (bool): If True, returns a model pre-trained on ImageNet
"""
model = ResNet(BasicBlock, [2, 2, 2, 2], **kwargs)
if pretrained:
state_dict = model_zoo.load_url(model_urls['resnet18'])
for key in list(state_dict.keys()):
if 'fc' in key:
del state_dict[key]
model.load_state_dict(state_dict,strict=False)
print('resnet18 loaded imagenet pretrained weights')
else:
print('resnet18 without imagenet pretrained weights')
return model
def conv3x3(in_planes, out_planes, stride=1):
"""3x3 convolution with padding"""
return ACSConv(in_planes, out_planes, kernel_size=3, stride=stride,
padding=1, bias=False)
class BasicBlock(nn.Module):
expansion = 1
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(BasicBlock, self).__init__()
self.conv1 = conv3x3(inplanes, planes, stride)
self.bn1 = nn.BatchNorm3d(planes)
self.relu = nn.ReLU(inplace=True)
self.conv2 = conv3x3(planes, planes)
self.bn2 = nn.BatchNorm3d(planes)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class Bottleneck(nn.Module):
expansion = 4
def __init__(self, inplanes, planes, stride=1, downsample=None):
super(Bottleneck, self).__init__()
self.conv1 = ACSConv(inplanes, planes, kernel_size=1, bias=False)
self.bn1 = nn.BatchNorm3d(planes)
self.conv2 = ACSConv(planes, planes, kernel_size=3, stride=stride,
padding=1, bias=False)
self.bn2 = nn.BatchNorm3d(planes)
self.conv3 = ACSConv(planes, planes * 4, kernel_size=1, bias=False)
self.bn3 = nn.BatchNorm3d(planes * 4)
self.relu = nn.ReLU(inplace=True)
self.downsample = downsample
self.stride = stride
def forward(self, x):
residual = x
out = self.conv1(x)
out = self.bn1(out)
out = self.relu(out)
out = self.conv2(out)
out = self.bn2(out)
out = self.relu(out)
out = self.conv3(out)
out = self.bn3(out)
if self.downsample is not None:
residual = self.downsample(x)
out += residual
out = self.relu(out)
return out
class ResNet(nn.Module):
def __init__(self, block, layers):
self.inplanes = 64
super(ResNet, self).__init__()
self.conv1 = ACSConv(3, 64, kernel_size=7, stride=1, padding=3,
bias=False)
self.bn1 = nn.BatchNorm3d(64)
self.relu = nn.ReLU(inplace=True)
self.layer1 = self._make_layer(block, 64, layers[0])
self.layer2 = self._make_layer(block, 128, layers[1], stride=2)
self.layer3 = self._make_layer(block, 256, layers[2])
self.layer4 = self._make_layer(block, 512, layers[3], stride=2)
for m in self.modules():
if isinstance(m, ACSConv):
n = m.kernel_size[0] * m.kernel_size[1] * m.out_channels
m.weight.data.normal_(0, math.sqrt(2. / n))
elif isinstance(m, nn.BatchNorm3d):
m.weight.data.fill_(1)
m.bias.data.zero_()
def _make_layer(self, block, planes, blocks, stride=1):
downsample = None
if stride != 1 or self.inplanes != planes * block.expansion:
downsample = nn.Sequential(
ACSConv(self.inplanes, planes * block.expansion,
kernel_size=1, stride=stride, bias=False),
nn.BatchNorm3d(planes * block.expansion),
)
layers = []
layers.append(block(self.inplanes, planes, stride, downsample))
self.inplanes = planes * block.expansion
for i in range(1, blocks):
layers.append(block(self.inplanes, planes))
return nn.Sequential(*layers)
def forward(self, x):
x = self.conv1(x)
x = self.bn1(x)
x = self.relu(x)
x2 = x.clone()
x = self.layer1(x)
x = self.layer2(x)
x1 = x.clone()
x = self.layer3(x)
x = self.layer4(x)
return x, x1, x2
class FCNHead(nn.Sequential):
def __init__(self, in_channels, channels):
inter_channels = in_channels // 4
layers = [
ACSConv(in_channels, inter_channels, 3, padding=1, bias=False),
nn.BatchNorm3d(inter_channels),
nn.ReLU(),
ACSConv(inter_channels, channels, 1)
]
super(FCNHead, self).__init__(*layers)
class FCNResNet(nn.Module):
def __init__(self, pretrained, num_classes, backbone='resnet18'):
super().__init__()
self.backbone = globals()[backbone](pretrained=pretrained)
self.conv1 = ACSConv((128+512), 512, kernel_size=1, stride=1, padding=0, bias=False)
self.conv2 = ACSConv(64+512, 512, kernel_size=1, stride=1, padding=0, bias=False)
self.classifier = FCNHead(in_channels=512, channels=num_classes)
def forward(self, x):
features, features1, features2 = self.backbone(x)
features_cat1 = torch.cat([features1, F.interpolate(features, scale_factor=2, mode='trilinear')], dim=1)
features_cat1 = self.conv1(features_cat1)
features_cat2 = torch.cat([features2, F.interpolate(features_cat1, scale_factor=2, mode='trilinear')], dim=1)
features_cat2 = self.conv2(features_cat2)
features = features_cat2
out = self.classifier(features)
return out
class ClsResNet(nn.Module):
def __init__(self, pretrained, num_classes, backbone='resnet18'):
super().__init__()
self.backbone = globals()[backbone](pretrained=pretrained)
self.fc = nn.Linear(512, num_classes, bias=True)
def forward(self, x):
features = self.backbone(x)[0]
features = F.adaptive_avg_pool3d(features, output_size=1).view(features.shape[0], -1)
out = self.fc(features)
return out | PypiClean |
/MLStudio-0.1.15.tar.gz/MLStudio-0.1.15/mlstudio/visual/model_evaluation.py | """Model Evaluation Plots. """
import math
import numpy as np
import pandas as pd
import plotly
import plotly.express as px
import plotly.graph_objects as go
import plotly.offline as py
from sklearn.model_selection import ParameterGrid, learning_curve
from sklearn.model_selection import validation_curve
from .base import ModelVisualatrix
from mlstudio.supervised.machine_learning.linear_regression import LinearRegression
from mlstudio.supervised.machine_learning.ols_regression import OLSRegression
from mlstudio.utils.format import proper
# ---------------------------------------------------------------------------- #
# PREDICTION ERROR #
# ---------------------------------------------------------------------------- #
class PredictionError(ModelVisualatrix):
"""Plots actual target values against predicted values.
Parameters
----------
fig : Plotly Figure or FigureWidget
The plotting object.
estimator : MLStudio estimator object.
The object that implements the 'fit' and 'predict' methods.
kwargs : dict
Keyword arguments that are passed to the base class and influence
the visualization. Optional keyword arguments include:
========= ==========================================
Property Description
-------- ------------------------------------------
height specify the height of the figure
width specify the width of the figure
title specify the title of the figure
template specify the template for the figure.
========= ==========================================
"""
def __init__(self, estimator, fig=None, **kwargs):
super(PredictionError, self).__init__(estimator=estimator,
fig=fig, **kwargs)
self.title = self.title or str(estimator.description + "<br>Prediction Error")
def fit(self, X, y):
"""Generates the prediction error plot.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training vector, where n_samples is the number of samples and
n_features is the number of features.
y : array-like, shape (n_samples) or (n_samples, n_features), optional
Target relative to X for classification or regression;
None for unsupervised learning.
"""
# Compute predicted vs actual.
self.estimator.fit(X,y)
y_pred = self.estimator.predict(X)
# Compute best fit line predicted vs actual
y = y.reshape(-1,1)
est = OLSRegression()
est.fit(y, y_pred)
# Format data for identity and best fit lines
y = y.ravel()
best_fit_x = np.arange(min(y), max(y))
best_fit_y = est.intercept_ + est.coef_ * best_fit_x
identity_x = best_fit_x
identity_y = best_fit_x
# Scatterplot of predicted vs actual
scatter = go.Scatter(
x=y, y=y_pred,
mode='markers',
marker=dict(color='#005b96'),
line_color='rgba(255,255,255,0.5)',
opacity=0.75,
showlegend=False
)
# Plot best fit line
best_fit = go.Scatter(
name='Best Fit',
x=best_fit_x, y=best_fit_y,
mode='lines',
line=dict(color='#005b96'),
showlegend=True
)
identity = go.Scatter(
name='Identity',
x=identity_x, y=identity_y,
mode='lines',
line=dict(color='#b3cde0'),
showlegend=True
)
# Load from bottom up
data = [scatter, best_fit, identity]
# Update layout with designated template
layout = go.Layout(
xaxis=dict(title='y'),
yaxis=dict(title=r'$\hat{y}$'),
title=self.title,title_x=0.5,
template=self.template
)
self.fig = go.Figure(data=data, layout=layout) | PypiClean |
/MDP-3.6.tar.gz/MDP-3.6/mdp/nodes/misc_nodes.py | from __future__ import division
from future import standard_library
standard_library.install_aliases()
from builtins import str
from builtins import range
from past.utils import old_div
from builtins import object
__docformat__ = "restructuredtext en"
import mdp
from mdp import numx, utils, Node, NodeException, PreserveDimNode
import pickle as pickle
import pickle as real_pickle
class IdentityNode(PreserveDimNode):
"""Execute returns the input data and the node is not trainable.
This node can be instantiated and is for example useful in
complex network layouts.
"""
def _get_supported_dtypes(self):
"""Return the data types supported by this node.
:return: The list of numpy.dtypes that this node supports.
:rtype: list
"""
return (mdp.utils.get_dtypes('AllFloat') +
mdp.utils.get_dtypes('AllInteger') +
mdp.utils.get_dtypes('Character'))
@staticmethod
def is_trainable():
return False
class OneDimensionalHitParade(object):
"""
Class to produce hit-parades (i.e., a list of the locally largest
and smallest values) out of a one-dimensional time-series.
"""
def __init__(self, n, d, real_dtype="d", integer_dtype="l"):
"""Initializes an object of type 'OneDimensionalHitParade'.
:param n: Number of maxima and minima to remember.
:type n: int
:param d: Minimum gap between two hits.
:type d: int
:param real_dtype: Datatype of sequence items
:type real_dtype: numpy.dtype or str
:param integer_dtype: Datatype of sequence indices
:type integer_dtype: numpy.dtype or str
"""
self.n = int(n)
self.d = int(d)
self.iM = numx.zeros((n, ), dtype=integer_dtype)
self.im = numx.zeros((n, ), dtype=integer_dtype)
real_dtype = numx.dtype(real_dtype)
if real_dtype in mdp.utils.get_dtypes('AllInteger'):
max_num = numx.iinfo(real_dtype).max
min_num = numx.iinfo(real_dtype).min
else:
max_num = numx.finfo(real_dtype).max
min_num = numx.finfo(real_dtype).min
self.M = numx.array([min_num]*n, dtype=real_dtype)
self.m = numx.array([max_num]*n, dtype=real_dtype)
self.lM = 0
self.lm = 0
def update(self, inp):
"""
:param inp: A time series, defined by a tuple of numpy.ndarrays
with the first element containing the values and the second
containing the corresponding index. An example can be given by::
>>> indices = numpy.array([0,1,2])
>>> values = numpy.array([100,101,100])
>>> avalidtuple = (indices,values)
:type inp: tuple
"""
(x, ix) = inp
rows = len(x)
d = self.d
M = self.M
m = self.m
iM = self.iM
im = self.im
lM = self.lM
lm = self.lm
for i in range(rows):
k1 = M.argmin()
k2 = m.argmax()
if x[i] > M[k1]:
if ix[i]-iM[lM] <= d and x[i] > M[lM]:
M[lM] = x[i]
iM[lM] = ix[i]
elif ix[i]-iM[lM] > d:
M[k1] = x[i]
iM[k1] = ix[i]
lM = k1
if x[i] < m[k2]:
if ix[i]-im[lm] <= d and x[i] < m[lm]:
m[lm] = x[i]
im[lm] = ix[i]
elif ix[i]-im[lm] > d:
m[k2] = x[i]
im[k2] = ix[i]
lm = k2
self.M = M
self.m = m
self.iM = iM
self.im = im
self.lM = lM
self.lm = lm
def get_maxima(self):
"""
Return the tuple defining the maxima fulfilling specified criteria.
:return: A tuple containing maxima and their corresponding indices
as numpy.ndarrays (see example in definition of the method
``OneDimensionalHitParade.update``). The maxima are sorted in
descending order.
:rtype: tuple
"""
iM = self.iM
M = self.M
sort = M.argsort()
return M[sort[::-1]], iM[sort[::-1]]
def get_minima(self):
"""
Return the tuple defining the minima fulfilling specified criteria.
:return: A tuple containing minima and their corresponding indices
as numpy.ndarrays (see example in definition of the
``OneDimensionalHitParade.update()`` function). The minima are sorted
in descending order.
:rtype: tuple
"""
im = self.im
m = self.m
sort = m.argsort()
return m[sort], im[sort]
class HitParadeNode(PreserveDimNode):
"""Collect the first ``n`` local maxima and minima of the training signal
which are separated by a minimum gap ``d``.
This is an analysis node, i.e. the data is analyzed during training
and the results are stored internally. Use the
``get_maxima`` and ``get_minima`` methods to access them.
"""
def __init__(self, n, d=1, input_dim=None, output_dim=None, dtype=None):
"""Initializes an object of type 'HitParadeNode'.
:param n: Number of maxima and minima to remember.
:type n: int
:param d: Minimum gap between two hits.
:type d: int
:param input_dim: The input dimensionality.
:type input_dim: int
:param output_dim: The output dimensionality.
:type output_dim: int
:param dtype: The datatype.
:type dtype: numpy.dtype or str
"""
super(HitParadeNode, self).__init__(input_dim=input_dim,
output_dim=output_dim,
dtype=dtype)
self.n = int(n)
self.d = int(d)
self.itype = 'int64'
self.hit = None
self.tlen = 0
def _set_input_dim(self, n):
self._input_dim = n
self.output_dim = n
def _get_supported_dtypes(self):
"""Return the data types supported by this node.
:return: The list of numpy.dtypes that this node supports.
:rtype: list
"""
return (mdp.utils.get_dtypes('Float') +
mdp.utils.get_dtypes('AllInteger'))
def _train(self, x):
hit = self.hit
old_tlen = self.tlen
if hit is None:
hit = [OneDimensionalHitParade(self.n, self.d, self.dtype,
self.itype)
for c in range(self.input_dim)]
tlen = old_tlen + x.shape[0]
indices = numx.arange(old_tlen, tlen)
for c in range(self.input_dim):
hit[c].update((x[:, c], indices))
self.hit = hit
self.tlen = tlen
def get_maxima(self):
"""
Return the tuple defining the maxima fulfilling specified criteria.
If the training phase has not been completed yet, call stop_training.
:return: A tuple containing maxima and their corresponding indices
as numpy.ndarrays (see example in definition of the method
``OneDimensionalHitParade.update``). The maxima are sorted in
descending order.
:rtype: tuple
"""
self._if_training_stop_training()
cols = self.input_dim
n = self.n
hit = self.hit
iM = numx.zeros((n, cols), dtype=self.itype)
M = numx.ones((n, cols), dtype=self.dtype)
for c in range(cols):
M[:, c], iM[:, c] = hit[c].get_maxima()
return M, iM
def get_minima(self):
"""
Return the tuple defining the minima fulfilling specified criteria.
If the training phase has not been completed yet, call stop_training.
:return: A tuple containing minima and their corresponding indices
as numpy.ndarrays (see example in definition of the
``OneDimensionalHitParade.update()`` function). The minima are sorted
in descending order.
:rtype: tuple
"""
self._if_training_stop_training()
cols = self.input_dim
n = self.n
hit = self.hit
im = numx.zeros((n, cols), dtype=self.itype)
m = numx.ones((n, cols), dtype=self.dtype)
for c in range(cols):
m[:, c], im[:, c] = hit[c].get_minima()
return m, im
class TimeFramesNode(Node):
"""Copy delayed version of the input signal on the space dimensions.
For example, for ``time_frames=3`` and ``gap=2``::
[ X(1) Y(1) [ X(1) Y(1) X(3) Y(3) X(5) Y(5)
X(2) Y(2) X(2) Y(2) X(4) Y(4) X(6) Y(6)
X(3) Y(3) --> X(3) Y(3) X(5) Y(5) X(7) Y(7)
X(4) Y(4) X(4) Y(4) X(6) Y(6) X(8) Y(8)
X(5) Y(5) ... ... ... ... ... ... ]
X(6) Y(6)
X(7) Y(7)
X(8) Y(8)
... ... ]
It is not always possible to invert this transformation (the
transformation is not surjective. However, the ``pseudo_inverse``
method does the correct thing when it is indeed possible.
"""
def __init__(self, time_frames, gap=1,
input_dim=None, dtype=None):
"""Initializes an object of type 'TimeFramesNode'.
:param time_frames: Number of delayed copies.
:type time_frames: int
:param gap: Time delay between the copies.
:type gap: int
:param input_dim: The input dimensionality.
:type input_dim: int
:param dtype: The datatype.
:type dtype: numpy.dtype or str
"""
self.time_frames = time_frames
super(TimeFramesNode, self).__init__(input_dim=input_dim,
output_dim=None,
dtype=dtype)
self.gap = gap
def _get_supported_dtypes(self):
"""Return the data types supported by this node.
:return: The list of numpy.dtypes that this node supports.
:rtype: list
"""
return (mdp.utils.get_dtypes('AllFloat') +
mdp.utils.get_dtypes('AllInteger') +
mdp.utils.get_dtypes('Character'))
@staticmethod
def is_trainable():
return False
@staticmethod
def is_invertible():
return False
def _set_input_dim(self, n):
self._input_dim = n
self._output_dim = n*self.time_frames
def _set_output_dim(self, n):
msg = 'Output dim can not be explicitly set!'
raise NodeException(msg)
def _execute(self, x):
gap = self.gap
tf = x.shape[0] - (self.time_frames-1)*gap
rows = self.input_dim
cols = self.output_dim
y = numx.zeros((tf, cols), dtype=self.dtype)
for frame in range(self.time_frames):
y[:, frame*rows:(frame+1)*rows] = x[gap*frame:gap*frame+tf, :]
return y
def pseudo_inverse(self, y):
"""This function returns a pseudo-inverse of the execute frame.
y == execute(x) is only ``True`` if y belongs to the domain of execute
and has been computed with a sufficently large x.
If gap > 1 some of the last rows will be filled with zeros.
:param y: The execute frame.
:type y: numpy.ndarray
:return: A pseudo-inverse of the given frame.
:rtype: numpy.ndarray
"""
self._if_training_stop_training()
# set the output dimension if necessary
if not self.output_dim:
# if the input_dim is not defined, raise an exception
if not self.input_dim:
errstr = ("Number of input dimensions undefined. Inversion"
"not possible.")
raise NodeException(errstr)
self.outputdim = self.input_dim
# control the dimension of y
self._check_output(y)
# cast
y = self._refcast(y)
gap = self.gap
exp_length = y.shape[0]
cols = self.input_dim
rest = (self.time_frames-1)*gap
rows = exp_length + rest
x = numx.zeros((rows, cols), dtype=self.dtype)
x[:exp_length, :] = y[:, :cols]
count = 1
# Note that if gap > 1 some of the last rows will be filled with zeros!
block_sz = min(gap, exp_length)
for row in range(max(exp_length, gap), rows, gap):
x[row:row+block_sz, :] = y[-block_sz:, count*cols:(count+1)*cols]
count += 1
return x
class TimeDelayNode(TimeFramesNode):
"""
Copy delayed version of the input signal on the space dimensions.
For example, for ``time_frames=3`` and ``gap=2``::
[ X(1) Y(1) [ X(1) Y(1) 0 0 0 0
X(2) Y(2) X(2) Y(2) 0 0 0 0
X(3) Y(3) --> X(3) Y(3) X(1) Y(1) 0 0
X(4) Y(4) X(4) Y(4) X(2) Y(2) 0 0
X(5) Y(5) X(5) Y(5) X(3) Y(3) X(1) Y(1)
X(6) Y(6) ... ... ... ... ... ... ]
X(7) Y(7)
X(8) Y(8)
... ... ]
This node provides similar functionality as the ``TimeFramesNode``, only
that it performs a time embedding into the past rather than into the future.
See ``TimeDelaySlidingWindowNode`` for a sliding window delay node for
application in a non-batch manner.
Original code contributed by Sebastian Hoefer.
Dec 31, 2010
"""
def __init__(self, time_frames, gap=1, input_dim=None, dtype=None):
"""Initializes an object of type 'TimeDelayNode'.
:param time_frames: Number of delayed copies.
:type time_frames: int
:param gap: Time delay between the copies.
:type gap: int
:param input_dim: The input dimensionality.
:type input_dim: int
:param dtype: The datatype.
:type dtype: numpy.dtype
"""
super(TimeDelayNode, self).__init__(time_frames, gap,
input_dim, dtype)
def _execute(self, x):
gap = self.gap
rows = x.shape[0]
cols = self.output_dim
n = self.input_dim
y = numx.zeros((rows, cols), dtype=self.dtype)
for frame in range(self.time_frames):
y[gap*frame:, frame*n:(frame+1)*n] = x[:rows-gap*frame, :]
return y
def pseudo_inverse(self, y):
"""
:raises mdp.NodeException: When called. This method overrides
the corresponding method of the ``TimeFramesNode*.
"""
raise NotImplementedError
class TimeDelaySlidingWindowNode(TimeDelayNode):
"""
``TimeDelaySlidingWindowNode`` is an alternative to ``TimeDelayNode``
which should be used for online learning/execution. Whereas the
``TimeDelayNode`` works in a batch manner, for online application
a sliding window is necessary which yields only one row per call.
Applied to the same data the collection of all returned rows of the
``TimeDelaySlidingWindowNode`` is equivalent to the result of the
``TimeDelayNode``.
Original code contributed by Sebastian Hoefer.
Dec 31, 2010
"""
def __init__(self, time_frames, gap=1, input_dim=None, dtype=None):
"""Initializes an object of type 'TimeDelaySlidingWindowNode'.
:param time_frames: Number of delayed copies.
:type time_frames: int
:param gap: Time delay between the copies.
:type gap: int
:param input_dim: The input dimensionality.
:type input_dim: int
:param dtype: The datatype.
:type dtype: numpy.dtype or str
"""
self.time_frames = time_frames
self.gap = gap
super(TimeDelaySlidingWindowNode, self).__init__(time_frames, gap,
input_dim, dtype)
self.sliding_wnd = None
self.cur_idx = 0
self.slide = False
def _init_sliding_window(self):
rows = self.gap+1
cols = self.input_dim*self.time_frames
self.sliding_wnd = numx.zeros((rows, cols), dtype=self.dtype)
def _execute(self, x):
assert x.shape[0] == 1
if self.sliding_wnd is None:
self._init_sliding_window()
gap = self.gap
rows = self.sliding_wnd.shape[0]
cols = self.output_dim
n = self.input_dim
new_row = numx.zeros(cols, dtype=self.dtype)
new_row[:n] = x
# Slide
if self.slide:
self.sliding_wnd[:-1, :] = self.sliding_wnd[1:, :]
# Delay
if self.cur_idx-gap >= 0:
new_row[n:] = self.sliding_wnd[self.cur_idx-gap, :-n]
# Add new row to matrix
self.sliding_wnd[self.cur_idx, :] = new_row
if self.cur_idx < rows-1:
self.cur_idx = self.cur_idx+1
else:
self.slide = True
return new_row[numx.newaxis,:]
class EtaComputerNode(Node):
"""Compute the eta values of the normalized training data.
The delta value of a signal is a measure of its temporal
variation, and is defined as the mean of the derivative squared,
i.e. ``delta(x) = mean(dx/dt(t)^2)``. ``delta(x)`` is zero if
``x`` is a constant signal, and increases if the temporal variation
of the signal is bigger.
The eta value is a more intuitive measure of temporal variation,
defined as::
eta(x) = T/(2*pi) * sqrt(delta(x))
If ``x`` is a signal of length ``T`` which consists of a sine function
that accomplishes exactly ``N`` oscillations, then ``eta(x)=N``.
``EtaComputerNode`` normalizes the training data to have unit
variance, such that it is possible to compare the temporal
variation of two signals independently from their scaling.
.. note::
- If a data chunk is tlen data points long, this node is
going to consider only the first tlen-1 points together with their
derivatives. This means in particular that the variance of the
signal is not computed on all data points. This behavior is
compatible with that of ``SFANode``.
- This is an analysis node, i.e. the data is analyzed during training
and the results are stored internally. Use the method
``get_eta`` to access them.
|
.. admonition:: Reference
*Wiskott, L. and Sejnowski, T.J. (2002)*.
Slow Feature Analysis: Unsupervised Learning of Invariances,
Neural Computation, 14(4):715-770.
"""
def __init__(self, input_dim=None, dtype=None):
"""Initializes an object of type 'EtaComputerNode'.
:param input_dim: The input dimensionality.
:type input_dim: int
:param dtype: The datatype.
:type dtype: numpy.dtype or str
"""
super(EtaComputerNode, self).__init__(input_dim, None, dtype)
self._initialized = 0
def _set_input_dim(self, n):
self._input_dim = n
self.output_dim = n
def _init_internals(self):
input_dim = self.input_dim
self._mean = numx.zeros((input_dim,), dtype='d')
self._var = numx.zeros((input_dim,), dtype='d')
self._tlen = 0
self._diff2 = numx.zeros((input_dim,), dtype='d')
self._initialized = 1
def _train(self, data):
# here SignalNode.train makes an automatic refcast
if not self._initialized:
self._init_internals()
rdata = data[:-1]
self._mean += rdata.sum(axis=0)
self._var += (rdata*rdata).sum(axis=0)
self._tlen += rdata.shape[0]
td_data = utils.timediff(data)
self._diff2 += (td_data*td_data).sum(axis=0)
def _stop_training(self):
var_tlen = self._tlen-1
# unbiased
var = old_div((self._var - self._mean*self._mean/self._tlen),var_tlen)
# biased
#var = (self._var - self._mean*self._mean/self._tlen)/self._tlen
# old formula: wrong! is neither biased nor unbiased
#var = (self._var/var_tlen) - (self._mean/self._tlen)**2
self._var = var
delta = old_div((old_div(self._diff2,self._tlen)),var)
self._delta = delta
self._eta = old_div(numx.sqrt(delta),(2*numx.pi))
def get_eta(self, t=1):
"""Return the eta values of the data received during the training
phase.
If the training phase has not been completed yet, call
stop_training.
:param t: Sampling frequency in Hz.
The original definition in (Wiskott and Sejnowski, 2002)
is obtained for ``t=self._tlen``, while for ``t=1`` (default),
this corresponds to the beta-value defined in
(Berkes and Wiskott, 2005).
:type t: int
:return: The eta values of the data received.
:rtype: numpy.ndarray
"""
self._if_training_stop_training()
return self._refcast(self._eta*t)
class NoiseNode(PreserveDimNode):
"""Inject multiplicative or additive noise into the input data.
Original code contributed by Mathias Franzius.
"""
def __init__(self, noise_func=mdp.numx_rand.normal, noise_args=(0, 1),
noise_type='additive',
input_dim=None, output_dim=None, dtype=None):
"""Initializes an object of type 'NoiseNode'.
:param noise_func: A function that generates noise. It must
take a ``size`` keyword argument and return
a random array of that size. Default is normal noise.
:type noise_func: function
:param noise_args: Tuple of additional arguments passed to `noise_func`.
Default is (0,1) for (mean, standard deviation)
of the normal distribution.
:type noise_args: tuple
:param noise_type: Either ``'additive'`` or ``'multiplicative'``.
:type noise_type: str
:param input_dim: The input dimensionality.
:type input_dim: int
:param output_dim: The output dimensionality.
:type output_dim: int
:param dtype: The datatype.
:type dtype: numpy.dtype or str
"""
super(NoiseNode, self).__init__(input_dim=input_dim,
output_dim=output_dim,
dtype=dtype)
self.noise_func = noise_func
self.noise_args = noise_args
valid_noise_types = ['additive', 'multiplicative']
if noise_type not in valid_noise_types:
err_str = '%s is not a valid noise type' % str(noise_type)
raise NodeException(err_str)
else:
self.noise_type = noise_type
def _get_supported_dtypes(self):
"""Return the data types supported by this node.
:return: The list of numpy.dtypes that this node supports.
:rtype: list
"""
return (mdp.utils.get_dtypes('Float') +
mdp.utils.get_dtypes('AllInteger'))
@staticmethod
def is_trainable():
return False
@staticmethod
def is_invertible():
return False
def _execute(self, x):
noise_mat = self._refcast(self.noise_func(*self.noise_args,
**{'size': x.shape}))
if self.noise_type == 'additive':
return x+noise_mat
elif self.noise_type == 'multiplicative':
return x*(1.+noise_mat)
def save(self, filename, protocol = -1):
"""Save a pickled serialization of the node to 'filename'.
If 'filename' is None, return a string.
Note: the pickled Node is not guaranteed to be upward or
backward compatible.
:param filename: The name of the file to save to.
:type filename: str
:param protocol: Whether to open the file in
binary mode (protocol != 0). Default is -1.
:type: numeric
"""
if filename is None:
# cPickle seems to create an error, probably due to the
# self.noise_func attribute.
return real_pickle.dumps(self, protocol)
else:
# if protocol != 0 open the file in binary mode
mode = 'w' if protocol == 0 else 'wb'
with open(filename, mode) as flh:
real_pickle.dump(self, flh, protocol)
class NormalNoiseNode(PreserveDimNode):
"""Special version of ``NoiseNode`` for Gaussian additive noise.
Unlike ``NoiseNode`` it does not store a noise function reference but simply
uses ``numx_rand.normal``.
"""
def __init__(self, noise_args=(0, 1),
input_dim=None, output_dim=None, dtype=None):
"""Initializes an object of type 'NormalNoiseNode'.
:param noise_args: Tuple of (mean, standard deviation) for the normal
distribution, default is (0,1).
:type noise_args: tuple
:param input_dim: The input dimensionality.
:type input_dim: int
:param output_dim: The output dimensionality.
:type output_dim: int
:param dtype: The datatype.
:type dtype: numpy.dtype or str
"""
super(NormalNoiseNode, self).__init__(input_dim=input_dim,
output_dim=output_dim,
dtype=dtype)
self.noise_args = noise_args
@staticmethod
def is_trainable():
return False
@staticmethod
def is_invertible():
return False
def _execute(self, x):
noise = self._refcast(mdp.numx_rand.normal(size=x.shape) *
self.noise_args[1]
+ self.noise_args[0])
return x + noise
class CutoffNode(PreserveDimNode):
"""Node to cut off values at specified bounds.
Works similar to ``numpy.clip``, but also works when only a lower or upper
bound is specified.
"""
def __init__(self, lower_bound=None, upper_bound=None,
input_dim=None, output_dim=None, dtype=None):
"""Initializes an object of type 'CutoffNode'.
:param lower_bound: Data values below this are cut to the
``lower_bound`` value. If ``lower_bound`` is ``None`` no cutoff
is performed. ``None`` is the default.
:type lower_bound: numeric
:param upper_bound: Works like ``lower_bound``.
``None`` is the default.
:type upper_bound: numeric
:param input_dim: The input dimensionality.
:type input_dim: int
:param output_dim: The output dimensionality.
:type output_dim: int
:param dtype: The datatype.
:type dtype: numpy.dtype or str
"""
super(CutoffNode, self).__init__(input_dim=input_dim,
output_dim=output_dim,
dtype=dtype)
self.lower_bound = lower_bound
self.upper_bound = upper_bound
@staticmethod
def is_trainable():
return False
@staticmethod
def is_invertible():
return False
def _get_supported_dtypes(self):
return (mdp.utils.get_dtypes('Float') +
mdp.utils.get_dtypes('AllInteger'))
def _execute(self, x):
"""Return the clipped data.
:param x: Data to clip.
:type x: numpy.ndarray
:return: The clipped data.
:rtype: numpy.ndarray
"""
# n.clip() does not work, since it does not accept None for one bound
if self.lower_bound is not None:
x = numx.where(x >= self.lower_bound, x, self.lower_bound)
if self.upper_bound is not None:
x = numx.where(x <= self.upper_bound, x, self.upper_bound)
return x
class HistogramNode(PreserveDimNode):
"""Node which stores a history of the data during its training phase.
The data history is stored in ``self.data_hist`` and can also be deleted to
free memory. Alternatively it can be automatically pickled to disk.
Note that data is only stored during training.
"""
def __init__(self, hist_fraction=1.0, hist_filename=None,
input_dim=None, output_dim=None, dtype=None):
"""Initializes an object of type 'HistogramNode'.
:param hist_fraction: Defines the fraction of the data that is stored
randomly. Default is 1.0.
:type hist_fraction: float
:param hist_filename: Filename for the file to which the data history will
be pickled after training. The data is pickled when stop_training
is called and data_hist is then cleared (to free memory).
If filename is None (default value) then data_hist is not cleared
and can be directly used after training.
:type hist_filename: str
:param input_dim: The input dimensionality.
:type input_dim: int
:param output_dim: The output dimensionality.
:type output_dim: int
:param dtype: The datatype.
:type dtype: numpy.dtype or str
"""
super(HistogramNode, self).__init__(input_dim=input_dim,
output_dim=output_dim,
dtype=dtype)
self._hist_filename = hist_filename
self.hist_fraction = hist_fraction
self.data_hist = None # stores the data history
def _get_supported_dtypes(self):
"""Return the data types supported by this node.
:return: The list of numpy.dtypes that this node supports.
:rtype: list
"""
return (mdp.utils.get_dtypes('AllFloat') +
mdp.utils.get_dtypes('AllInteger') +
mdp.utils.get_dtypes('Character'))
def _train(self, x):
"""Store the history data.
:param x: The history data.
:type x: numpy.ndarray
"""
if self.hist_fraction < 1.0:
x = x[numx.random.random(len(x)) < self.hist_fraction]
if self.data_hist is not None:
self.data_hist = numx.concatenate([self.data_hist, x])
else:
self.data_hist = x
def _stop_training(self):
"""Pickle the histogram data to file and clear it if required."""
super(HistogramNode, self)._stop_training()
if self._hist_filename:
pickle_file = open(self._hist_filename, "wb")
try:
pickle.dump(self.data_hist, pickle_file, protocol=-1)
finally:
pickle_file.close( )
self.data_hist = None
class AdaptiveCutoffNode(HistogramNode):
"""Node which uses the data history during training to learn cutoff values.
As opposed to the simple ``CutoffNode``, a different cutoff value is learned
for each data coordinate. For example if an upper cutoff fraction of
0.05 is specified, then the upper cutoff bound is set so that the upper
5% of the training data would have been clipped (in each dimension).
The cutoff bounds are then applied during execution.
This node also works as a ``HistogramNode``, so the histogram data is stored.
When ``stop_training`` is called the cutoff values for each coordinate are
calculated based on the collected histogram data.
"""
def __init__(self, lower_cutoff_fraction=None, upper_cutoff_fraction=None,
hist_fraction=1.0, hist_filename=None,
input_dim=None, output_dim=None, dtype=None):
"""Initializes an object of type 'AdaptiveCutoffNode'.
:param lower_cutoff_fraction: Fraction of data that will be cut off
after the training phase (assuming the data distribution does not
change). If set to ``None`` (default value) no cutoff is performed.
:type lower_cutoff_fraction: float
:param upper_cutoff_fraction: Works like `lower_cutoff_fraction`.
:type upper_cutoff_fraction: float
:param hist_fraction: Defines the fraction of the data that is stored
for the histogram.
:type hist_fraction: float
:param hist_filename: Filename for the file to which the data history
will be pickled after training. The data is pickled when
`stop_training` is called and ``data_hist`` is then
cleared (to free memory). If filename is ``None``
(default value) then ``data_hist`` is not cleared and can
be directly used after training.
:type hist_filename: str
:param input_dim: The input dimensionality.
:type input_dim: int
:param output_dim: The output dimensionality.
:type output_dim: int
:param dtype: The datatype.
:type dtype: numpy.dtype or str
"""
super(AdaptiveCutoffNode, self).__init__(hist_fraction=hist_fraction,
hist_filename=hist_filename,
input_dim=input_dim,
output_dim=output_dim,
dtype=dtype)
self.lower_cutoff_fraction = lower_cutoff_fraction
self.upper_cutoff_fraction = upper_cutoff_fraction
self.lower_bounds = None
self.upper_bounds = None
def _get_supported_dtypes(self):
"""Return the data types supported by this node.
:return: The list of numpy.dtypes that this node supports.
:rtype: list
"""
return (mdp.utils.get_dtypes('Float') +
mdp.utils.get_dtypes('AllInteger'))
def _stop_training(self):
"""Calculate the cutoff bounds based on collected histogram data."""
if self.lower_cutoff_fraction or self.upper_cutoff_fraction:
sorted_data = self.data_hist.copy()
sorted_data.sort(axis=0)
if self.lower_cutoff_fraction:
index = self.lower_cutoff_fraction * len(sorted_data)
self.lower_bounds = sorted_data[int(index)]
if self.upper_cutoff_fraction:
index = (len(sorted_data) -
self.upper_cutoff_fraction * len(sorted_data))
self.upper_bounds = sorted_data[int(index)]
super(AdaptiveCutoffNode, self)._stop_training()
def _execute(self, x):
"""Return the clipped data.
:param x: Data to clip.
:type x: numpy.ndarray
:return: The clipped data.
:rtype: numpy.ndarray
"""
if self.lower_bounds is not None:
x = numx.where(x >= self.lower_bounds, x, self.lower_bounds)
if self.upper_bounds is not None:
x = numx.where(x <= self.upper_bounds, x, self.upper_bounds)
return x | PypiClean |
/CPAT-3.0.4.tar.gz/CPAT-3.0.4/lib/cpmodule/FrameKmer.py | #import built-in modules
import math
from collections import Counter
import re
import itertools
from cpmodule import ireader
def word_generator(seq,word_size,step_size,frame=0):
'''
Generate DNA word from sequence using word_size and step_size. Frame is 0, 1 or 2.
'''
for i in range(frame,len(seq),step_size):
word = seq[i:i+word_size]
if len(word) == word_size:
yield word
def seq_generator(fastafile):
'''
DNA sequence only contains A,C,G,T,N. sequence with other characters will be removed.
'''
tmpseq=''
name=''
DNA_pat = re.compile(r'^[ACGTN]+$')
for line in ireader.reader(fastafile):
line=line.strip().upper()
if line.startswith(('#',' ','\n')):continue
if line.startswith(('>','@')):
if tmpseq:
yield [name,tmpseq]
tmpseq=''
name = line.split()[0][1:]
elif DNA_pat.match(line):
tmpseq += line
yield [name,tmpseq]
def all_possible_kmer(l):
'''
Return all possible combinations of A,C,G,T,N. only support A,C,G,T,N. l is length of kmer.
'''
for i in itertools.product(['A','C','G','T','N'],repeat=l):
yield ''.join(i)
def kmer_freq_file (fastafile,word_size,step_size=1,frame=0,min_count=0):
'''
Calculate kmer frequency from fasta file
'''
seq_num = 0
ret_dict={}
for n,s in seq_generator(fastafile):
seq_num += 1
if seq_num == 1:
count_table = Counter(word_generator(s,word_size=word_size,step_size=step_size,frame=frame))
else:
count_table.update( word_generator(s,word_size=word_size,step_size=step_size,frame=frame) )
#return count_table
for kmer in all_possible_kmer(word_size):
if kmer not in count_table: count_table[kmer]=0
if count_table[kmer] >= min_count:
#print kmer + '\t' + str(count_table[kmer])
if 'N' in kmer:continue
ret_dict[kmer] = count_table[kmer]
return ret_dict
def kmer_freq_seq (seq,word_size,step_size=1,frame=0,min_count=0):
'''
Calculate kmer frequency from DNA sequence. coding. genome is hexamer table calculated
from coding region and whole genome (as background control)
'''
count_table = Counter(word_generator(seq,word_size=word_size,step_size=step_size,frame=frame))
for kmer in all_possible_kmer(word_size):
if kmer not in count_table: count_table[kmer]=0
if count_table[kmer] >= min_count:
print(kmer + '\t' + str(count_table[kmer]))
def kmer_ratio(seq,word_size,step_size,coding,noncoding):
if len(seq) < word_size:
return 0
sum_of_log_ratio_0 = 0.0
frame0_count=0.0
for k in word_generator(seq=seq, word_size = word_size, step_size=step_size,frame=0):
if (k not in coding) or (k not in noncoding):
continue
if coding[k]>0 and noncoding[k] >0:
sum_of_log_ratio_0 += math.log( coding[k] / noncoding[k])
elif coding[k]>0 and noncoding[k] == 0:
sum_of_log_ratio_0 += 1
elif coding[k] == 0 and noncoding[k] == 0:
continue
elif coding[k] == 0 and noncoding[k] >0 :
sum_of_log_ratio_0 -= 1
else:
continue
frame0_count += 1
try:
return sum_of_log_ratio_0/frame0_count
except:
return -1 | PypiClean |
/BigJob2-0.54.post73.tar.gz/BigJob2-0.54.post73/bigjob/job_plugin/gcessh.py |
from bigjob import logger
from apiclient.discovery import build
from oauth2client.file import Storage
from oauth2client.client import OAuth2WebServerFlow
from oauth2client.tools import run
import httplib2
import os
import uuid
import time
import saga
"""
AN OAUTH2 Client Id must be created at the Google API console at:
https://code.google.com/apis/console/
=> API Access
More information with respect to OAUTH: https://developers.google.com/compute/docs/api/how-tos/authorization
"""
OAUTH2_CLIENT_ID='1004462711324-55akehip32m59u6omdfrt9s8u8ehb0hm.apps.googleusercontent.com'
OAUTH2_CLIENT_SECRET='EIMML1W7anu0XijVghws0DY-'
GCE_PROJECT_ID='bigjob-pilot'
"""
Google Compute Engine currently provides a default image with Ubuntu 12.04
To use BJ, a custom image containing gcc and build essentials needs to be
created.
$ apt-get update
$ apt-get install gcc python-all-dev
"""
GCE_IMAGE_URL="https://www.googleapis.com/compute/v1beta12/projects/bigjob-pilot/images/bigjob-image"
class gce_states:
PROVISIONING="PROVISIONING"
STAGING="STAGING"
RUNNING="RUNNING"
class Service(object):
""" Plugin for Google Compute Engine
Manages endpoint in the form of:
gce+ssh://api.google.com
"""
def __init__(self, resource_url, pilot_compute_description):
"""Constructor"""
self.resource_url = resource_url
self.pilot_compute_description = pilot_compute_description
def create_job(self, job_description):
j = Job(job_description, self.resource_url, self.pilot_compute_description)
return j
def __del__(self):
pass
class Job(object):
def __init__(self, job_description, saga_url, pilot_compute_description):
self.job_description = job_description
self.saga_url = saga_url
self.pilot_compute_description = pilot_compute_description
self.image_url = GCE_IMAGE_URL
if self.pilot_compute_description.has_key("vm_id"):
self.image_url = self.pilot_compute_description["vm_id"]
self.machine_type = "https://www.googleapis.com/compute/v1beta12/projects/bigjob-pilot/machine-types/n1-standard-1"
if self.pilot_compute_description.has_key("vm_type"):
self.machine_type = self.pilot_compute_description["vm_type"]
self.location = "https://www.googleapis.com/compute/v1beta12/projects/bigjob-pilot/zones/us-east1-a"
if self.pilot_compute_description.has_key("vm_location"):
self.location = self.pilot_compute_description["vm_location"]
self.id="bigjob-" + str(uuid.uuid1())
self.network_ip=None
# Do OAUTH authentication
storage = Storage('gce.dat')
self.credentials = storage.get()
if self.credentials is None or self.credentials.invalid == True:
flow = OAuth2WebServerFlow(
client_id=OAUTH2_CLIENT_ID,
client_secret=OAUTH2_CLIENT_SECRET,
scope='https://www.googleapis.com/auth/compute',
user_agent='bigjob-client/1.0')
self.credentials = run(flow, storage)
def run(self):
request_dict = {
"kind": "compute#instance",
"disks": [
{
"kind": "compute#instanceDisk",
"type": "PERSISTENT",
"mode": "READ",
"deviceName": "reference-genome",
"source": "https://www.googleapis.com/compute/v1beta12/projects/bigjob-pilot/disks/reference-genome"
}
],
"networkInterfaces": [
{
"kind": "compute#instanceNetworkInterface",
"accessConfigs": [
{
"name": "External NAT",
"type": "ONE_TO_ONE_NAT"
}
],
"network": "https://www.googleapis.com/compute/v1beta12/projects/bigjob-pilot/networks/default"
}
],
"serviceAccounts": [
{
"kind": "compute#serviceAccount",
"email": "default",
"scopes": [
"https://www.googleapis.com/auth/userinfo.email",
"https://www.googleapis.com/auth/compute",
"https://www.googleapis.com/auth/devstorage.full_control"
]
}
],
#"zone": "https://www.googleapis.com/compute/v1beta12/projects/bigjob-pilot/zones/us-east1-a",
"zone": self.location,
#"machineType": "https://www.googleapis.com/compute/v1beta12/projects/bigjob-pilot/machine-types/n1-standard-1",
"machineType": self.machine_type,
"name": self.id,
"image": self.image_url
}
http = httplib2.Http()
http = self.credentials.authorize(http)
gce = build("compute", "v1beta12", http=http)
#result = gce.instances().get(instance="bigjob-pilot", project="bigjob-pilot").execute()
gce.instances().insert(project=GCE_PROJECT_ID, body=request_dict).execute()
time.sleep(15) # wait for startup
#wait for compute instance to become active
self.wait_for_running()
# spawn BJ agent via SSH
compute_instance_details = self.__get_instance_resource()
logger.debug("Compute Instance Details: " + str(compute_instance_details))
self.network_ip = compute_instance_details["networkInterfaces"][0]["accessConfigs"][0]['natIP']
url = "ssh://" + str(self.network_ip)
logger.debug("Connect to: %s"%(url))
# Submit job
ctx = saga.Context("SSH")
#ctx.type = saga.Context.SSH
ctx.user_id = self.pilot_compute_description["vm_ssh_username"]
ctx.user_key = self.pilot_compute_description["vm_ssh_keyfile"]
#js.session.contexts = [ctx]
session = saga.Session()
session.add_context(ctx)
js = saga.job.Service(url, session=session)
job = js.create_job(self.job_description)
print "Submit pilot job to: " + str(url)
TRIAL_MAX=15
trials=0
while trials < TRIAL_MAX:
try:
logger.debug("Attempt: %d, submit pilot job to: %s "%(trials,str(url)))
job.run()
break
except:
trials = trials + 1
time.sleep(10)
if trials == TRIAL_MAX:
raise Exception("Submission of agent failed.")
logger.debug("Job State : %s" % (job.get_state()))
print "Job State : %s" % (job.get_state())
def wait_for_running(self):
while self.get_state()!=gce_states.RUNNING:
time.sleep(5)
def get_state(self):
result=self.__get_instance_resource()
return result["status"]
def cancel(self):
http = httplib2.Http()
http = self.credentials.authorize(http)
gce = build("compute", "v1beta12", http=http)
gce.instances().delete(project=GCE_PROJECT_ID, instance=self.id).execute()
def __get_instance_resource(self):
http = httplib2.Http()
http = self.credentials.authorize(http)
gce = build("compute", "v1beta12", http=http)
result = gce.instances().get(project=GCE_PROJECT_ID, instance=self.id).execute()
return result
if __name__ == "__main__":
gce_service = Service("gce+ssh://api.google.com")
j = gce_service.create_job(job_description)
gce.run()
print gce.get_state() | PypiClean |
/DisplaceNet-0.1.tar.gz/DisplaceNet-0.1/engine/object_detection_branch/retina_net/keras_retinanet/models/__init__.py | class Backbone(object):
""" This class stores additional information on backbones.
"""
def __init__(self, backbone):
# a dictionary mapping custom layer names to the correct classes
from .. import layers
from .. import losses
from .. import initializers
self.custom_objects = {
'UpsampleLike' : layers.UpsampleLike,
'PriorProbability' : initializers.PriorProbability,
'RegressBoxes' : layers.RegressBoxes,
'FilterDetections' : layers.FilterDetections,
'Anchors' : layers.Anchors,
'ClipBoxes' : layers.ClipBoxes,
'_smooth_l1' : losses.smooth_l1(),
'_focal' : losses.focal(),
}
self.backbone = backbone
self.validate()
def retinanet(self, *args, **kwargs):
""" Returns a retinanet model using the correct backbone.
"""
raise NotImplementedError('retinanet method not implemented.')
def download_imagenet(self):
""" Downloads ImageNet weights and returns path to weights file.
"""
raise NotImplementedError('download_imagenet method not implemented.')
def validate(self):
""" Checks whether the backbone string is correct.
"""
raise NotImplementedError('validate method not implemented.')
def backbone(backbone_name):
""" Returns a backbone object for the given backbone.
"""
if 'resnet' in backbone_name:
from .resnet import ResNetBackbone as b
elif 'mobilenet' in backbone_name:
from .mobilenet import MobileNetBackbone as b
elif 'vgg' in backbone_name:
from .vgg import VGGBackbone as b
elif 'densenet' in backbone_name:
from .densenet import DenseNetBackbone as b
else:
raise NotImplementedError('Backbone class for \'{}\' not implemented.'.format(backbone))
return b(backbone_name)
def load_model(filepath, backbone_name='resnet50', convert=False, nms=True):
""" Loads a retinanet model using the correct custom objects.
# Arguments
filepath: one of the following:
- string, path to the saved model, or
- h5py.File object from which to load the model
backbone_name: Backbone with which the model was trained.
convert: Boolean, whether to convert the model to an inference model.
nms: Boolean, whether to add NMS filtering to the converted model. Only valid if convert=True.
# Returns
A keras.models.Model object.
# Raises
ImportError: if h5py is not available.
ValueError: In case of an invalid savefile.
"""
import keras.models
model = keras.models.load_model(filepath, custom_objects=backbone(backbone_name).custom_objects)
if convert:
from .retinanet import retinanet_bbox
model = retinanet_bbox(model=model, nms=nms)
return model | PypiClean |
/DESpy-0.1.15-py3-none-any.whl/simkit/examples/basic.py | from math import nan
from random import Random
from heapq import heappush
from heapq import heappop
from simkit.base import SimEntityBase
from simkit.base import EventList
from simkit.base import Priority
from simkit.base import Entity
rng = Random(12345)
class SingleResourceModel(SimEntityBase):
def __init__(self, interarrival_time_generator, service_time_generator):
SimEntityBase.__init__(self)
self.interarrival_time_generator = interarrival_time_generator
self.service_time_generator = service_time_generator
self.number_in_queue = nan
self.number_available_resources = nan
def reset(self):
SimEntityBase.reset(self)
self.number_in_queue = 0
self.number_available_resources = 1
def run(self):
self.notify_state_change('number_in_queue', self.number_in_queue)
self.notify_state_change('number_available_resources', self.number_available_resources)
self.schedule('enter', 0.0)
def enter(self):
self.number_in_queue += 1
self.notify_state_change('number_in_queue', self.number_in_queue)
if (self.number_available_resources > 0):
self.schedule('start', 0.0)
self.schedule('enter', self.interarrival_time_generator.generate())
def start(self):
self.number_in_queue -= 1
self.notify_state_change('number_in_queue', self.number_in_queue)
self.number_available_resources -= 1
self.notify_state_change('number_available_resources', self.number_available_resources)
self.schedule('leave', self.service_time_generator.generate())
def leave(self):
self.number_available_resources += 1
self.notify_state_change('number_available_resources', self.number_available_resources)
if self.number_in_queue > 0:
self.schedule('start', 0.0)
class ClosingTimes(SimEntityBase):
def __init(self,interarrival_time_generator, service_time_generator, closing_time):
SimEntityBase.__init__(self)
self.interarrival_time_generator = interarrival_time_generator
self.service_time_generator = service_time_generator
self.closing_time = closing_time
def reset(self):
SimEntityBase.reset(self)
self.number_in_queue = 0
self.number_available_resources = 1
def run(self):
self.notify_state_change('number_in_queue', self.number_in_queue)
self.notify_state_change('number_available_resources', self.number_available_resources)
self.schedule('arrive', 0.0)
def arrive(self):
if EventList.simtime < self.closing_time:
self.schedule('enter', 0.0)
self.schedule('arrive', self.interarrival_time_generator.generate())
def enter(self):
self.number_in_queue += 1
self.notify_state_change('number_in_queue', self.number_in_queue)
if (self.number_available_resources == 1):
self.schedule('start', 0.0)
def start(self):
self.number_in_queue -= 1
self.notify_state_change('number_in_queue', self.number_in_queue)
self.number_available_resources -= 1
self.notify_state_change('number_available_resources', self.number_available_resources)
self.schedule('leave', self.service_time_generator.generate())
def leave(self):
self.number_available_resources += 1
self.notify_state_change('number_available_resources', self.number_available_resources)
if self.number_in_queue > 0:
self.schedule('start', 0.0)
class MultipleResourceModel(SimEntityBase):
def __init__(self, interarrival_time_generator, service_time_generator, number_resources):
SimEntityBase.__init__(self)
self.interarrival_time_generator = interarrival_time_generator
self.service_time_generator = service_time_generator
self.number_resources = number_resources
self.number_in_queue = nan
self.number_available_resources = nan
def reset(self):
SimEntityBase.reset(self)
self.number_in_queue = 0
self.number_available_resources = self.number_resources
def run(self):
self.notify_state_change('number_in_queue', self.number_in_queue)
self.notify_state_change('number_available_resources', self.number_available_resources)
self.schedule('enter', 0.0)
def enter(self):
self.number_in_queue += 1
self.notify_state_change('number_in_queue', self.number_in_queue)
if self.number_available_resources > 0:
self.schedule('start', 0.0)
self.schedule('enter', self.interarrival_time_generator.generate())
def start(self):
self.number_in_queue -= 1
self.notify_state_change('number_in_queue', self.number_in_queue)
self.number_available_resources -= 1
self.notify_state_change('number_available_resources', self.number_available_resources)
self.schedule('leave', self.service_time_generator.generate())
def leave(self):
self.number_available_resources += 1
self.notify_state_change('number_available_resources', self.number_available_resources)
if self.number_in_queue > 0:
self.schedule('start', 0.0)
class BatchedServiceModel(SimEntityBase):
def __init__(self, interarrival_time_generator, service_time_generator, number_resources, batch_size):
SimEntityBase.__init__(self)
self.interarrival_time_generator = interarrival_time_generator
self.service_time_generator = service_time_generator
self.number_resources = number_resources
self.batch_size = batch_size
self.number_in_queue = nan
self.number_available_resources = nan
def reset(self):
SimEntityBase.reset(self)
self.number_in_queue = 0
self.number_available_resources = self.number_resources
def run(self):
self.notify_state_change('number_in_queue', self.number_in_queue)
self.notify_state_change('number_available_resources', self.number_available_resources)
self.schedule('enter', 0.0)
def enter(self):
self.number_in_queue += 1
self.notify_state_change('number_in_queue', self.number_in_queue)
if self.number_available_resources > 0 & self.number_in_queue >= self.batch_size:
self.schedule('start', 0.0)
self.schedule('enter', self.interarrival_time_generator.generate())
def start(self):
self.number_in_queue -= 1
self.notify_state_change('number_in_queue', self.number_in_queue)
self.number_available_resources -= 1
self.notify_state_change('number_available_resources', self.number_available_resources)
self.schedule('leave', self.service_time_generator.generate())
def leave(self):
self.number_available_resources += 1
self.notify_state_change('number_available_resources', self.number_available_resources)
if self.number_in_queue > self.batch_size:
self.schedule('start', 0.0)
class ReworkModel(SimEntityBase):
def __init__(self, interarrival_time_generator, service_time_generator, prob_needs_rework):
SimEntityBase.__init__(self)
self.interarrival_time_generator = interarrival_time_generator
self.service_time_generator = service_time_generator
self.prob_needs_rework = prob_needs_rework
self.number_in_queue = nan
self.number_available_resources = nan
def reset(self):
SimEntityBase.reset(self)
self.number_in_queue = 0
self.number_available_resources = 1
def run(self):
self.notify_state_change('number_in_queue', self.number_in_queue)
self.notify_state_change('number_available_resources', self.number_available_resources)
self.schedule('enter', 0.0)
def enter(self):
self.number_in_queue += 1
self.notify_state_change('number_in_queue', self.number_in_queue)
if self.number_available_resources > 0:
self.schedule('start', 0.0)
self.schedule('enter', self.interarrival_time_generator.generate())
def start(self):
self.number_in_queue -= 1
self.notify_state_change('number_in_queue', self.number_in_queue)
self.number_available_resources -= 1
self.notify_state_change('number_available_resources', self.number_available_resources)
self.schedule('leave', self.service_time_generator.generate())
def leave(self):
self.number_available_resources += 1
self.notify_state_change('number_available_resources', self.number_available_resources)
rw = rng.random()
if self.number_in_queue > 0 & rw > self.prob_needs_rework:
self.schedule('start', 0.0)
if rw <= self.prob_needs_rework:
self.schedule('rework', 0.0)
def rework(self):
self.number_in_queue += 1;
self.notify_state_change('number_in_queue', self.number_in_queue)
if self.number_available_resources > 0:
self.schedule('start', 0.0)
class TandemQueueWithBlocking(SimEntityBase):
def __init__(self, interarrival_time_generator, number_server1, number_server2, \
service_time1_generator, service_time2_generator, buffer_size):
SimEntityBase.__init__(self)
# Parameters
self.interarrival_time_generator = interarrival_time_generator
self.number_server1 = number_server1
self.number_server2 = number_server2
self.service_time1_generator = service_time1_generator
self.service_time2_generator = service_time2_generator
self.buffer_size = buffer_size
# State variables
self.number_in_queue1 = nan
self.number_in_queue2 = nan
self.number_available_server1 = nan
self.number_available_server2 = nan
self.number_blocked = nan
def reset(self):
SimEntityBase.reset(self)
self.number_in_queue1 = 0
self.number_in_queue2 = 0
self.number_available_server1 = self.number_server1
self.number_available_server2 = self.number_server2
self.number_blocked = 0
def run(self):
self.notify_state_change('number_in_queue1', self.number_in_queue1)
self.notify_state_change('number_in_queue2', self.number_in_queue2)
self.notify_state_change('number_available_server1', self.number_available_server1)
self.notify_state_change('number_available_server2', self.number_available_server2)
self.notify_state_change('number_blocked', self.number_blocked)
self.schedule('enter1', 0.0)
def enter1(self):
self.number_in_queue1 += 1
self.notify_state_change('number_in_queue1', self.number_in_queue1)
if self.number_available_server1 > 0:
self.schedule('start1', 0.0)
self.schedule('enter1', self.interarrival_time_generator.generate())
def start1(self):
self.number_in_queue1 -= 1
self.notify_state_change('number_in_queue1', self.number_in_queue1)
self.number_available_server1 -= 1
self.notify_state_change('number_available_server1', self.number_available_server1)
self.schedule('leave1', self.service_time1_generator.generate())
def leave1(self):
self.number_blocked += 1
self.notify_state_change('number_blocked', self.number_blocked)
if self.number_in_queue2 < self.buffer_size:
self.schedule('enter2', 0.0)
def enter2(self):
self.number_available_server1 += 1
self.notify_state_change('number_available_server1', self.number_available_server1)
self.number_blocked -= 1
self.notify_state_change('number_blocked', self.number_blocked)
self.number_in_queue2 += 1;
self.notify_state_change('number_in_queue2', self.number_in_queue2)
if self.number_available_server2 > 0:
self.schedule('start2', 0.0)
if self.number_in_queue1 > 0:
self.schedule('start1', 0.0)
def start2(self):
self.number_in_queue2 -= 1;
self.notify_state_change('number_in_queue2', self.number_in_queue2)
self.number_available_server2 -= 1
self.notify_state_change('number_available_server2', self.number_available_server2)
self.schedule('leave2', self.service_time2_generator.generate())
if self.number_blocked > 0:
self.schedule('enter2', 0.0)
def leave2(self):
self.number_available_server2 += 1
self.notify_state_change('number_available_server2', self.number_available_server2)
if self.number_in_queue2 > 0:
self.schedule('start2', 0.0)
class MultipleServerQueue(SimEntityBase):
def __init__(self, interarrival_time_generator, number_servers, service_time_generator):
SimEntityBase.__init__(self)
self.interarrival_time_generator = interarrival_time_generator
self.number_servers = number_servers
self.service_time_generator = service_time_generator
self.number_arrivals = nan
self.number_available_servers = nan
self.queue = []
self.number_in_queue = nan
self.delay_in_queue = nan
self.time_in_system = nan
def reset(self):
SimEntityBase.reset(self)
self.number_arrivals = 0
self.number_available_servers = self.number_servers
self.queue = []
self.number_in_queue = 0
def run(self):
self.notify_state_change('number_arrivals', self.number_arrivals)
self.notify_state_change('number_available_servers', self.number_available_servers)
self.notify_state_change('queue', self.queue)
self.notify_state_change('number_in_queue', self.number_in_queue)
self.schedule('enter', 0.0)
def enter(self):
customer = Entity()
customer.stamp_time()
heappush(self.queue, customer);
self.notify_state_change('queue', self.queue)
self.number_in_queue = len(self.queue)
self.notify_state_change('number_in_queue', self.number_in_queue)
self.number_arrivals += 1;
self.notify_state_change('number_arrivals', self.number_arrivals)
if self.number_available_servers > 0:
self.schedule('start', 0.0)
self.schedule('enter', self.interarrival_time_generator.generate())
def start(self):
customer = heappop(self.queue)
self.notify_state_change('queue', self.queue)
self.number_in_queue = len(self.queue)
self.notify_state_change('number_in_queue', self.number_in_queue)
self.delay_in_queue = customer.elapsed_time()
self.notify_state_change('delay_in_queue', self.delay_in_queue)
self.number_available_servers -= 1
self.notify_state_change('number_available_servers', self.number_available_servers)
self.schedule('leave', self.service_time_generator.generate(), customer)
def leave(self, customer):
self.time_in_system = customer.elapsed_time()
self.notify_state_change('time_in_system', self.time_in_system)
self.number_available_servers += 1
self.notify_state_change('number_available_servers', self.number_available_servers)
if len(self.queue) > 0:
self.schedule('start', 0.0) | PypiClean |
/Frida_iOS_Hook-3.4-py3-none-any.whl/lib/listapp.py | import frida
import threading
import sys
def get_usb_iphone():
Type = 'usb'
if int(frida.__version__.split('.')[0]) < 12:
Type = 'tether'
device_manager = frida.get_device_manager()
changed = threading.Event()
def on_changed():
changed.set()
device_manager.on('changed', on_changed)
device = None
while device is None:
devices = [dev for dev in device_manager.enumerate_devices() if dev.type == Type]
if len(devices) == 0:
print('Waiting for USB device...')
changed.wait()
else:
device = devices[0]
device_manager.off('changed', on_changed)
return device
def compare_applications(a, b):
a_is_running = a.pid != 0
b_is_running = b.pid != 0
if a_is_running == b_is_running:
if a.name > b.name:
return 1
elif a.name < b.name:
return -1
else:
return 0
elif a_is_running:
return -1
else:
return 1
def cmp_to_key(mycmp):
"""Convert a cmp= function into a key= function"""
class K:
def __init__(self, obj):
self.obj = obj
def __lt__(self, other):
return mycmp(self.obj, other.obj) < 0
def __gt__(self, other):
return mycmp(self.obj, other.obj) > 0
def __eq__(self, other):
return mycmp(self.obj, other.obj) == 0
def __le__(self, other):
return mycmp(self.obj, other.obj) <= 0
def __ge__(self, other):
return mycmp(self.obj, other.obj) >= 0
def __ne__(self, other):
return mycmp(self.obj, other.obj) != 0
return K
def get_applications(device):
try:
applications = device.enumerate_applications()
except Exception as e:
sys.exit('Failed to enumerate applications: %s' % e)
return applications
def list_applications(device):
applications = get_applications(device)
if len(applications) > 0:
pid_column_width = max(map(lambda app: len('{}'.format(app.pid)), applications))
name_column_width = max(map(lambda app: len(app.name), applications))
identifier_column_width = max(map(lambda app: len(app.identifier), applications))
else:
pid_column_width = 0
name_column_width = 0
identifier_column_width = 0
header_format = '%' + str(pid_column_width) + 's ' + '%-' + str(name_column_width) + 's ' + '%-' + str(
identifier_column_width) + 's'
print(header_format % ('PID', 'Name', 'Identifier'))
print('%s %s %s' % (pid_column_width * '-', name_column_width * '-', identifier_column_width * '-'))
line_format = '%' + str(pid_column_width) + 's ' + '%-' + str(name_column_width) + 's ' + '%-' + str(
identifier_column_width) + 's'
for application in sorted(applications, key=cmp_to_key(compare_applications)):
if application.pid == 0:
print(line_format % ('-', application.name, application.identifier))
else:
print(line_format % (application.pid, application.name, application.identifier)) | PypiClean |
/BunkerSync-0.0.1.tar.gz/BunkerSync-0.0.1/src/utils.py | import os
import shutil
import subprocess
import urllib.request
import re
from distutils import dir_util
import stat
import tempfile
def onerror(func, path, exc_info):
"""
Error handler for ``shutil.rmtree``.
If the error is due to an access error (read only file)
it attempts to add write permission and then retries.
If the error is for another reason it re-raises the error.
Usage : ``shutil.rmtree(path, onerror=onerror)``
"""
# Is the error an access error?
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IWUSR)
func(path)
else:
raise
def rm_dir(path):
"""
Recursively removes a directory and all its contents.
Args:
path (str): The path of the directory to be removed.
Raises:
Exception: If an error occurs during removal and the `ignore_errors` parameter is set to `False`.
"""
if os.path.exists(path):
shutil.rmtree(path, ignore_errors=False, onerror=onerror )
pass
# def branch_browse(url):
# webUrl = urllib.request.urlopen(url)
def branch_exists(remote_repo_url, branch_name):
"""This function checks if a branch exists in a remote Git repository given its URL and branch name.
Args:
remote_repo_url (Path): The URL of the remote Git repository as a string.
branch_name (str): The name of the branch as a string.
Returns:
_type_: _description_
"""
branches = subprocess.run(['git', 'ls-remote' , remote_repo_url ],shell=False, capture_output=True).stdout
wc_l = len(re.findall('refs/heads/'+branch_name+'$', branches.decode('utf-8')))
if wc_l:
return True
else: return False
# def find_last_slash(str):
# """_summary_
# Args:
# str (_type_): _description_
# Returns:
# _type_: _description_
# """
# res = -1
# for i in range(len(str)):
# if str[i] == '/':
# res = i
# return res
# def parse_branch_name(full_ref):
# """_summary_
# Args:
# full_ref (_type_): _description_
# Returns:
# _type_: _description_
# """
# index = find_last_slash(full_ref)
# branch_name = ""
# if index > -1:
# branch_name = full_ref[index + 1:]
# return branch_name
def copy_dir(from_dir, to_dir):
"""Copy a directory from a source path to a destination path.
Args:
from_dir (str): The path of the source directory.
to_dir (str): The path of the destination directory.
"""
shutil.copytree(src=from_dir, dst=to_dir, dirs_exist_ok=True)
pass
def move_dir(src_dir, dst_dir):
"""Move the contents of the `src_dir` to the `dst_dir`.
If the `dst_dir` already exists, its contents will be replaced with the contents of `src_dir`.
Args:
src_dir (str): The source directory to move.
dst_dir (str): The destination directory to move the source directory to.
"""
shutil.copytree(src_dir, dst_dir)
if os.path.exists(src_dir):
shutil.rmtree(src_dir, ignore_errors=False, onerror=onerror )
pass
def push_to_remote(local_repo_path):
"""Pushes changes from a local Git repository to its remote counterpart.
Args:
local_repo_path (str): The path to the local Git repository.
Raises:
CalledProcessError: If a Git command fails.
"""
subprocess.run(['git', '-C',local_repo_path, 'add', '.'], shell=False)
subprocess.run(['git', '-C', local_repo_path, 'commit', '-m', 'Merge from: internal repo' ], shell=False)
subprocess.run(['git', '-C',local_repo_path, 'push' , 'origin'], shell=False)
pass
def get_temp_path():
tmp_dir = tempfile.gettempdir()
return tmp_dir
pass | PypiClean |
/3DeeCellTracker-0.5.2a0-py3-none-any.whl/CellTracker/track.py | import matplotlib.pyplot as plt
import numpy as np
from sklearn.neighbors import NearestNeighbors
def pr_gls_quick(X, Y, corr, BETA=300, max_iteration=20, LAMBDA=0.1, vol=1E8):
"""
Get coherent movements from the initial matching by PR-GLS algorithm
Parameters
----------
X : numpy.ndarray
Y : numpy.ndarray
positions of two point sets
corr : numpy.ndarray
initial matching
BETA : float
max_iteration : int
LAMBDA : float
vol : float
parameters of PR-GLS
Returns
-------
P : numpy.ndarray
updated matching
T_X : numpy.ndarray
transformed positions of X
C : numpy.ndarray
coefficients for transforming positions other than X.
"""
############################################################
# initiate Gram matrix, C, sigma_square, init_match, T_X, P
############################################################
# set parameters
gamma = 0.1
# Gram matrix quick (represents basis functions for transformation)
length_X = np.size(X, axis=0)
X_tile = np.tile(X, (length_X, 1, 1))
Gram_matrix = np.exp(-np.sum(np.square(X_tile - X_tile.transpose(1, 0, 2)), axis=2) / (2 * BETA * BETA))
# Vector C includes weights for each basis function
C = np.zeros((3, length_X))
# sigma square (quick): relates to the variance of differences between
# corresponding points in T_X and Y.
length_Y = np.size(Y, axis=0)
X_tile = np.tile(X, (length_Y, 1, 1))
Y_tile = np.tile(Y, (length_X, 1, 1)).transpose(1, 0, 2)
sigma_square = np.sum(np.sum(np.square(X_tile - Y_tile), axis=2)) / (3 * length_X * length_Y)
# set initial matching
# only the most possible pairs are set with probalility of 0.9
init_match = np.ones((length_Y, length_X)) / length_X
cc_ref_tgt_temp = np.copy(corr)
for ptr_num in range(length_X):
cc_max = cc_ref_tgt_temp.max()
if cc_max < 0.5:
break
cc_max_idx = np.unravel_index(cc_ref_tgt_temp.argmax(), cc_ref_tgt_temp.shape)
init_match[cc_max_idx[0], :] = 0.1 / (length_X - 1)
init_match[cc_max_idx[0], cc_max_idx[1]] = 0.9
cc_ref_tgt_temp[cc_max_idx[0], :] = 0;
cc_ref_tgt_temp[:, cc_max_idx[1]] = 0;
# initiate T_X, which equals to X+v(X).
T_X = X.copy()
############################################################################
# iteratively update T_X, gamma, sigma_square, and P. Plot and save results
############################################################################
for iteration in range(1, max_iteration):
# calculate P (quick)
T_X_tile = np.tile(T_X, (length_Y, 1, 1))
Y_tile = np.tile(Y, (length_X, 1, 1)).transpose(1, 0, 2)
dist_square = np.sum(np.square(T_X_tile - Y_tile), axis=2)
exp_dist_square = np.exp(-dist_square / (2 * sigma_square))
P1 = init_match * exp_dist_square
denominator = np.sum(P1, axis=1) + gamma * (2 * np.pi * sigma_square) ** 1.5 / ((1 - gamma) * vol)
denominator_tile = np.tile(denominator, (length_X, 1)).transpose()
P = P1 / denominator_tile
# solve the linear equations for vector C
diag_P = np.diag(np.reshape(np.dot(np.ones((1, length_Y)), P), (length_X)))
a = np.dot(Gram_matrix, diag_P) + LAMBDA * sigma_square * np.identity(length_X)
b = np.dot(np.matrix.transpose(Y), P) - np.dot(np.matrix.transpose(X), diag_P)
a = np.matrix.transpose(a)
b = np.matrix.transpose(b)
C = np.matrix.transpose(np.linalg.solve(a, b))
# calculate T_X
T_X = np.matrix.transpose(np.matrix.transpose(X) + np.dot(C, Gram_matrix))
# update gamma and sigma square (quick)
M_P = np.sum(P)
gamma = 1 - M_P / length_Y
T_X_tile = np.tile(T_X, (length_Y, 1, 1))
dist_square = np.sum(np.square(T_X_tile - Y_tile), axis=2)
sigma_square = np.sum(P * dist_square) / (3 * M_P)
# avoid using too small values of sigma_square (the sample error should be >=1 pixel)
if sigma_square < 1:
sigma_square = 1
return P, T_X, C
def initial_matching_quick(ffn_model, ref, tgt, k_ptrs):
"""
This function compute initial matching between all pairs of points in reference and target points set.
Parameters
----------
ffn_model : keras.Model
The pretrained FFN model
ref : numpy.ndarray
The positions of the cells in the first volume
tgt : numpy.ndarray
The positions of the cells in the second volume
k_ptrs : int
The number of neighboring points used for FFN
Returns
-------
corr : numpy.ndarray
The correspondence matrix between two point sets
"""
nbors_ref = NearestNeighbors(n_neighbors=k_ptrs + 1).fit(ref)
nbors_tgt = NearestNeighbors(n_neighbors=k_ptrs + 1).fit(tgt)
ref_x_flat_batch = np.zeros((ref.shape[0], k_ptrs * 3 + 1), dtype='float32')
tgt_x_flat_batch = np.zeros((tgt.shape[0], k_ptrs * 3 + 1), dtype='float32')
for ref_i in range(ref.shape[0]):
# Generate 20 (k_ptrs) points near the specific point
# in the ref points set
distance_ref, indices_ref = nbors_ref.kneighbors(ref[ref_i:ref_i + 1, :],
return_distance=True)
mean_dist_ref = np.mean(distance_ref)
ref_x = (ref[indices_ref[0, 1:k_ptrs + 1], :] - ref[indices_ref[0, 0], :]) / mean_dist_ref
ref_x_flat = np.zeros(k_ptrs * 3 + 1)
ref_x_flat[0:k_ptrs * 3] = ref_x.reshape(k_ptrs * 3)
ref_x_flat[k_ptrs * 3] = mean_dist_ref
ref_x_flat_batch[ref_i, :] = ref_x_flat.reshape(1, k_ptrs * 3 + 1)
ref_x_flat_batch_meshgrid = np.tile(ref_x_flat_batch, (tgt.shape[0], 1, 1)).reshape(
(ref.shape[0] * tgt.shape[0], k_ptrs * 3 + 1))
for tgt_i in range(tgt.shape[0]):
distance_tgt, indices_tgt = nbors_tgt.kneighbors(tgt[tgt_i:tgt_i + 1, :],
return_distance=True)
mean_dist_tgt = np.mean(distance_tgt)
tgt_x = (tgt[indices_tgt[0, 1:k_ptrs + 1], :] - tgt[indices_tgt[0, 0], :]) / mean_dist_tgt
tgt_x_flat = np.zeros(k_ptrs * 3 + 1)
tgt_x_flat[0:k_ptrs * 3] = tgt_x.reshape(k_ptrs * 3)
tgt_x_flat[k_ptrs * 3] = mean_dist_tgt
tgt_x_flat_batch[tgt_i, :] = tgt_x_flat.reshape(1, k_ptrs * 3 + 1)
tgt_x_flat_batch_meshgrid = np.tile(tgt_x_flat_batch, (ref.shape[0], 1, 1)).transpose(1, 0, 2).reshape(
(ref.shape[0] * tgt.shape[0], k_ptrs * 3 + 1))
corr = np.reshape(ffn_model.predict([ref_x_flat_batch_meshgrid, tgt_x_flat_batch_meshgrid], batch_size=1024),
(tgt.shape[0], ref.shape[0]))
return corr
def pr_gls(X,Y,corr,BETA=300, max_iteration=20, LAMBDA=0.1,vol=1E8):
"""
(Deprecated from v0.3) The old version of pr_gls_quick(). Much slower.
"""
############################################################
# initiate Gram matrix, C, sigma_square, init_match, T_X, P
############################################################
# set parameters
gamma = 0.1
# Gram matrix (represents basis functions for transformation)
length_X = np.size(X, axis=0)
Gram_matrix = np.zeros((length_X,length_X))
for idx_i in range(length_X):
for idx_j in range(length_X):
Gram_matrix[idx_i,idx_j] = np.exp(-np.sum(np.square(X[idx_i,:]-X[idx_j,:]))/
(2*BETA*BETA))
# Vector C includes weights for each basis function
C = np.zeros((3,length_X))
# sigma square: relates to the variance of differences between
# corresponding points in T_X and Y.
length_Y = np.size(Y,axis=0)
sigma_square=0
for idx_X in range(length_X):
for idx_Y in range(length_Y):
sigma_square = sigma_square + np.sum(np.square(X[idx_X,:]-Y[idx_Y,:]))
sigma_square = sigma_square/(3*length_X*length_Y)
# set initial matching
# only the most possible pairs are set with probalility of 0.9
init_match=np.ones((length_Y,length_X))/length_X
cc_ref_tgt_temp=np.copy(corr)
for ptr_num in range(length_X):
cc_max=cc_ref_tgt_temp.max()
if cc_max<0.5:
break
cc_max_idx=np.unravel_index(cc_ref_tgt_temp.argmax(),cc_ref_tgt_temp.shape)
init_match[cc_max_idx[0],:]=0.1/(length_X-1)
init_match[cc_max_idx[0],cc_max_idx[1]]=0.9
cc_ref_tgt_temp[cc_max_idx[0],:]=0;
cc_ref_tgt_temp[:,cc_max_idx[1]]=0;
# initiate T_X, which equals to X+v(X).
T_X=X.copy()
# initiate P
P=np.zeros((length_Y,length_X))
############################################################################
# iteratively update T_X, gamma, sigma_square, and P. Plot and save results
############################################################################
# loop start
for iteration in range(1,max_iteration):
# calculate P
for idx_Y in range(length_Y):
denominator=0
for idx_X in range(length_X):
P[idx_Y,idx_X]=init_match[idx_Y,idx_X]*np.exp(
-np.sum(np.square(Y[idx_Y,:]-T_X[idx_X,:]))/(2*sigma_square))
denominator=denominator+P[idx_Y,idx_X]
denominator=denominator+gamma*(2*np.pi*sigma_square)**1.5/((1-gamma)*vol)
P[idx_Y,:]=P[idx_Y,:]/denominator
# solve the linear equations for vector C
diag_P=np.diag(np.reshape(np.dot(np.ones((1,length_Y)),P),(length_X)))
a = np.dot(Gram_matrix,diag_P)+LAMBDA*sigma_square*np.identity(length_X)
b=np.dot(np.matrix.transpose(Y),P)-np.dot(np.matrix.transpose(X),diag_P)
a = np.matrix.transpose(a)
b = np.matrix.transpose(b)
C = np.matrix.transpose(np.linalg.solve(a, b))
# calculate T_X
T_X=np.matrix.transpose(np.matrix.transpose(X)+np.dot(C,Gram_matrix))
# update gamma and sigma square
M_P=np.sum(P)
gamma=1-M_P/length_Y
sigma_square=0
for idx_X in range(length_X):
for idx_Y in range(length_Y):
sigma_square=sigma_square+P[idx_Y,idx_X]*np.sum(
np.square(Y[idx_Y,:]-T_X[idx_X,:]))
sigma_square = sigma_square/(3*M_P)
# avoid using too small values of sigma_square (the sample error should be
# >=1 pixel)
if sigma_square<1:
sigma_square=1
# loop end
return [P, T_X, C]
def initial_matching(fnn_model,ref,tgt,k_ptrs):
"""
(Deprecated from v0.3) The old version of initial_matching_quick(). Much slower.
"""
nbors_ref=NearestNeighbors(n_neighbors=k_ptrs+1).fit(ref)
nbors_tgt=NearestNeighbors(n_neighbors=k_ptrs+1).fit(tgt)
corr=np.zeros((tgt.shape[0],ref.shape[0]),dtype='float32')
for ref_i in range(ref.shape[0]):
ref_x_flat_batch=np.zeros((tgt.shape[0],k_ptrs*3+1),dtype='float32')
tgt_x_flat_batch=np.zeros((tgt.shape[0],k_ptrs*3+1),dtype='float32')
# Generate 20 (k_ptrs) points near the specific point
# in the ref points set
distance_ref,indices_ref=nbors_ref.kneighbors(ref[ref_i:ref_i+1,:],
return_distance=True)
mean_dist_ref=np.mean(distance_ref)
ref_x=(ref[indices_ref[0,1:k_ptrs+1],:]-ref[indices_ref[0,0],:])/mean_dist_ref
ref_x_flat=np.zeros(k_ptrs*3+1)
ref_x_flat[0:k_ptrs*3]=ref_x.reshape(k_ptrs*3)
ref_x_flat[k_ptrs*3]=mean_dist_ref
for tgt_i in range(tgt.shape[0]):
distance_tgt,indices_tgt=nbors_tgt.kneighbors(tgt[tgt_i:tgt_i+1,:],
return_distance=True)
mean_dist_tgt=np.mean(distance_tgt)
tgt_x=(tgt[indices_tgt[0,1:k_ptrs+1],:]-tgt[indices_tgt[0,0],:])/mean_dist_tgt
tgt_x_flat=np.zeros(k_ptrs*3+1)
tgt_x_flat[0:k_ptrs*3]=tgt_x.reshape(k_ptrs*3)
tgt_x_flat[k_ptrs*3]=mean_dist_tgt
ref_x_flat_batch[tgt_i,:]=ref_x_flat.reshape(1,k_ptrs*3+1)
tgt_x_flat_batch[tgt_i,:]=tgt_x_flat.reshape(1,k_ptrs*3+1)
corr[:,ref_i]=np.reshape(fnn_model.predict([ref_x_flat_batch, tgt_x_flat_batch],batch_size=32),tgt.shape[0])
return corr
def gaussian_filter(img, z_scaling=10, smooth_sigma=5):
"""
Generate smoothed label image of cells
Parameters
----------
img : numpy.ndarray
Label image
z_scaling : int
Factor of interpolations along z axis, should be <10
smooth_sigma : float
sigma used for making Gaussian blur
Returns
-------
output_img : numpy.ndarray
Generated smoothed label image
mask : numpy.ndarray
Mask image indicating the overlapping of multiple cells (0: background; 1: one cell; >1: multiple cells)
"""
img_interp = np.repeat(img, z_scaling, axis=2)
shape_interp = img_interp.shape
output_img = np.zeros((shape_interp[0] + 10, shape_interp[1] + 10, shape_interp[2] + 10), dtype='int')
mask = output_img.copy()
for label in range(1, np.max(img) + 1):
from skimage.filters import gaussian
print(f"Interpolating... cell:{label}", end="\r")
x_max, x_min, y_max, y_min, z_max, z_min, subregion_pad, voxels = _get_coordinates(label, img_interp)
percentage = 1 - np.divide(voxels, np.size(subregion_pad), dtype='float')
img_smooth = gaussian(subregion_pad, sigma=smooth_sigma, mode='constant')
threshold = np.percentile(img_smooth, percentage * 100)
cell_region_interp = img_smooth > threshold
output_img[x_min:x_max + 11, y_min:y_max + 11, z_min:z_max + 11] += cell_region_interp * label
mask[x_min:x_max + 11, y_min:y_max + 11, z_min:z_max + 11] += cell_region_interp * 1
return output_img, mask
def transform_cells(img3d, vectors3d):
"""
Move individual cells in the label image.
Parameters
----------
img3d : numpy.ndarray
Label image, each cell with different labels.
vectors3d : numpy.ndarray
The movement vectors for each cell, of dtype 'int' (movement from input img to output img)
Returns
-------
output : numpy.ndarray
Transformed label image
mask : numpy.ndarray
Overlap between different labels (if value>1)
"""
shape = np.shape(img3d)
output = np.zeros((shape),dtype=np.dtype(img3d[0,0,0]))
mask = np.zeros((shape),dtype=np.dtype(img3d[0,0,0]))
for label in range(1, img3d.max()+1):
v1 = vectors3d[label-1,0]; v2 = vectors3d[label-1,1]; v3 = vectors3d[label-1,2];
if v1>=0:
idx_1_start=0;idx_1_end=shape[0]-v1
else:
idx_1_start=-v1;idx_1_end=shape[0]
if v2>=0:
idx_2_start=0;idx_2_end=shape[1]-v2
else:
idx_2_start=-v2;idx_2_end=shape[1]
if v3>=0:
idx_3_start=0;idx_3_end=shape[2]-v3
else:
idx_3_start=-v3;idx_3_end=shape[2]
image_temp = img3d[idx_1_start:idx_1_end, idx_2_start:idx_2_end, idx_3_start:idx_3_end]
idx_label = np.where(image_temp==label)
output[idx_label[0]+idx_1_start+v1,idx_label[1]+idx_2_start+v2,idx_label[2]+idx_3_start+v3] = image_temp[idx_label]
mask[idx_label[0]+idx_1_start+v1,idx_label[1]+idx_2_start+v2,
idx_label[2]+idx_3_start+v3] = mask[idx_label[0]+idx_1_start+v1,
idx_label[1]+idx_2_start+v2,idx_label[2]+idx_3_start+v3]+1
return output, mask
def plot_arrow(ax, x1, y1, x2, y2):
"""Draw the arrows from (x1, y1) to (x2,y2)"""
return ax.annotate("",
xy=(x2, y2), xycoords='axes fraction',
xytext=(x1, y1), textcoords='axes fraction',
arrowprops=dict(arrowstyle="wedge", color="C0"))
def plot_tracking_2d(T_ref, ax, draw_point, ref_ptrs, tgt_ptrs, x_axis, y_axis, sizes):
"""Draw the tracking process between two point sets with layer-based coordinates"""
element = []
ax.invert_yaxis()
if draw_point:
element.append(ax.scatter(ref_ptrs[:, x_axis], ref_ptrs[:, y_axis], facecolors='none', edgecolors='r'))
element.append(ax.plot(tgt_ptrs[:, x_axis], tgt_ptrs[:, y_axis], 'bx')[0])
length_X = np.size(ref_ptrs, axis=0)
for ptr_num in range(length_X):
element.append(plot_arrow(ax,
x1=ref_ptrs[ptr_num, x_axis]/sizes[0], y1=1-ref_ptrs[ptr_num, y_axis]/sizes[1],
x2=T_ref[ptr_num, x_axis]/sizes[0], y2=1-T_ref[ptr_num, y_axis]/sizes[1]))
ax.axis('equal')
return element
def plot_tracking_2d_realcoord(T_ref, ax, draw_point, ref_ptrs, tgt_ptrs, x_axis, y_axis):
"""Draw the tracking process between two point sets with real-resolution coordinates"""
ax.invert_yaxis()
element = []
if draw_point:
element.append(ax.scatter(ref_ptrs[:, x_axis], ref_ptrs[:, y_axis], facecolors='none', edgecolors='r'))
element.append(ax.plot(tgt_ptrs[:, x_axis], tgt_ptrs[:, y_axis], 'bx')[0])
length_X = np.size(ref_ptrs, axis=0)
for ptr_num in range(length_X):
element.append(ax.arrow(
x=ref_ptrs[ptr_num, x_axis], y=ref_ptrs[ptr_num, y_axis],
dx=T_ref[ptr_num, x_axis] - ref_ptrs[ptr_num, x_axis],
dy=T_ref[ptr_num, y_axis] - ref_ptrs[ptr_num, y_axis], color="C0", length_includes_head=True,
head_length=4, head_width=3))
ax.axis('equal')
return element
def tracking_plot_xy(ax, ref_ptrs, tgt_ptrs, T_ref, yx_sizes, draw_point=True, layercoord=False):
"""Draw the tracking process between two point sets in x-y plane"""
x_axis=1
y_axis=0
if layercoord:
element = plot_tracking_2d(T_ref, ax, draw_point, ref_ptrs, tgt_ptrs, x_axis, y_axis, yx_sizes)
else:
element = plot_tracking_2d_realcoord(T_ref, ax, draw_point, ref_ptrs, tgt_ptrs, x_axis, y_axis)
return element
def tracking_plot_zx(ax, ref_ptrs, tgt_ptrs, T_ref, yz_sizes, draw_point=True, layercoord=True):
"""Draw the tracking process between two point sets in z-x plane"""
x_axis=1
y_axis=2
if layercoord:
element = plot_tracking_2d(T_ref, ax, draw_point, ref_ptrs, tgt_ptrs, x_axis, y_axis, yz_sizes)
else:
element = plot_tracking_2d_realcoord(T_ref, ax, draw_point, ref_ptrs, tgt_ptrs, x_axis, y_axis)
return element
def FFN_matching_plot(ref_ptrs, tgt_ptrs, initial_match_score):
"""(Deprecated from v0.3) Draw the FFN_matching process"""
length_ref_ptrs = np.size(ref_ptrs, axis=0)
tgt_ptrs_y_bias = tgt_ptrs.copy()
bias = (np.max(tgt_ptrs[:,0])-np.min(tgt_ptrs[:,0]))*2
tgt_ptrs_y_bias[:,0] = tgt_ptrs_y_bias[:,0]+bias
plt.ion()
fig = plt.figure(figsize=(9,9))
plt.scatter(ref_ptrs[:,1],-ref_ptrs[:,0],facecolors='none',edgecolors='r')
plt.plot(tgt_ptrs_y_bias[:,1],-tgt_ptrs_y_bias[:,0],'x')
plt.axis('equal')
cc_ref_tgt_temp=np.copy(initial_match_score)
for ptr_num in range(length_ref_ptrs):
cc_max=cc_ref_tgt_temp.max()
if cc_max<0.5:
break
cc_max_idx=np.unravel_index(cc_ref_tgt_temp.argmax(),cc_ref_tgt_temp.shape)
plt.plot([ref_ptrs[cc_max_idx[1],1],tgt_ptrs_y_bias[cc_max_idx[0],1]],
[-ref_ptrs[cc_max_idx[1],0],-tgt_ptrs_y_bias[cc_max_idx[0],0]],'r-')
cc_ref_tgt_temp[cc_max_idx[0],:]=0
cc_ref_tgt_temp[:,cc_max_idx[1]]=0
return fig
def get_subregions(label_image, num):
"""
Get individual regions of segmented cells
Parameters
----------
label_image : numpy.ndarray
Image of segmented cells
num : int
Number of cells
Returns
-------
region_list : list
Cropped images of each cell
region_width : list
Width of each cell in x,y,and z axis
region_coord_min : list
Minimum coordinates of each element in region list
"""
region_list = []
region_width = []
region_coord_min = []
for label in range(1, num + 1):
if label < num:
print(f"Calculating subregions... cell: {label}", end="\r")
else:
print(f"Calculating subregions... cell: {label}")
x_max, x_min, y_max, y_min, z_max, z_min = _get_coordinates(label, label_image, get_subregion=False)
region_list.append(label_image[x_min:x_max + 1, y_min:y_max + 1, z_min:z_max + 1] == label)
region_width.append([x_max + 1 - x_min, y_max + 1 - y_min, z_max + 1 - z_min])
region_coord_min.append([x_min, y_min, z_min])
return region_list, region_width, region_coord_min
def _get_coordinates(label, label_image, get_subregion=True):
"""
Get the coordinates of a specific label
Parameters
----------
label : int
The number of the cell label
label_image :
The label image
get_subregion : bool
If True, return the image of the subregion and its size
Returns
-------
x_max : float
x_min : float
y_max : float
y_min : float
z_max : float
z_min : float
Coordinates for the subregion
subregion : numpy.ndarray
The subregion containing the label
np.size(region[0]) : int
The size of the subregion
"""
region = np.where(label_image == label)
x_max, x_min = np.max(region[0]), np.min(region[0])
y_max, y_min = np.max(region[1]), np.min(region[1])
z_max, z_min = np.max(region[2]), np.min(region[2])
if not get_subregion:
return x_max, x_min, y_max, y_min, z_max, z_min
else:
subregion = np.zeros((x_max-x_min+11,y_max-y_min+11,z_max-z_min+11))
subregion[region[0] - x_min + 5, region[1] - y_min + 5, region[2] - z_min + 5] = 0.5
return x_max, x_min, y_max, y_min, z_max, z_min, subregion, np.size(region[0])
def get_reference_vols(ensemble, vol, adjacent=False):
"""
Get the reference volumes to calculate multiple prediction from which
Parameters
----------
ensemble : int
The maximum number of predictions
vol : int
The current volume number at which the prediction was made
adjacent : bool
If True, get reference volumes from adjacent previous volumes. If False, from distributed previous volumes
Returns
-------
vols_list : list
The list of the reference volume numbers
"""
if not ensemble:
return [vol - 1]
if vol - 1 < ensemble:
vols_list = list(range(1, vol))
else:
if adjacent:
vols_list = list(range(vol - ensemble, vol))
else:
vols_list = get_remote_vols(ensemble, vol)
return vols_list
def get_remote_vols(ensemble, vol):
"""Get distributed previous volumes"""
interval = (vol - 1) // ensemble
start = np.mod(vol - 1, ensemble) + 1
vols_list = list(range(start, vol - interval+1, interval))
return vols_list | PypiClean |
/ArabicOcr-1.1.6.tar.gz/ArabicOcr-1.1.6/README.md | # ArabicOcr Package to convert any Arabic image text to text by ocr techniques
## about
Python Package to convert arabic images to text
# Installation
```
pip install ArabicOcr
or in colab google cloud
!pip install ArabicOcr
```
## Usage for get frames images
```
from ArabicOcr import arabicocr
```
```
image_path='1.jpg'
out_image='out.jpg'
results=arabicocr.arabic_ocr(image_path,out_image)
print(results)
words=[]
for i in range(len(results)):
word=results[i][1]
words.append(word)
with open ('file.txt','w',encoding='utf-8')as myfile:
myfile.write(str(words))
import cv2
img = cv2.imread('out.jpg', cv2.IMREAD_UNCHANGED)
cv2.imshow("arabic ocr",img)
cv2.waitKey(0)
```
## Tutorial
u can see tutorial in colab
https://colab.research.google.com/drive/1ay5KT9Za340_kN7fhS2xuJX8suCigaF6?usp=sharing
| PypiClean |
/LinkChecker-10.2.1-py3-none-any.whl/linkcheck/parser/__init__.py | from .. import strformat, url as urlutil
from ..htmlutil import linkparse
from ..bookmarks import firefox
def parse_url(url_data):
"""Parse a URL."""
if url_data.is_directory():
# both ftp and file links represent directories as HTML data
key = "html"
elif (
url_data.is_file()
and firefox.has_sqlite
and firefox.extension.search(url_data.url)
):
key = "firefox"
elif url_data.scheme == "itms-services":
key = "itms_services"
else:
# determine parse routine according to content types
mime = url_data.content_type
key = url_data.ContentMimetypes[mime]
funcname = "parse_" + key
if funcname in globals():
globals()[funcname](url_data)
else:
url_data.aggregate.plugin_manager.run_parser_plugins(url_data, pagetype=key)
def parse_html(url_data):
"""Parse into HTML content and search for URLs to check.
Found URLs are added to the URL queue.
"""
linkparse.find_links(url_data.get_soup(), url_data.add_url, linkparse.LinkTags)
def parse_opera(url_data):
"""Parse an opera bookmark file."""
from ..bookmarks.opera import parse_bookmark_data
for url, name, lineno in parse_bookmark_data(url_data.get_content()):
url_data.add_url(url, line=lineno, name=name)
def parse_chromium(url_data):
"""Parse a Chromium or Google Chrome bookmark file."""
from ..bookmarks.chromium import parse_bookmark_data
for url, name in parse_bookmark_data(url_data.get_content()):
url_data.add_url(url, name=name)
def parse_safari(url_data):
"""Parse a Safari bookmark file."""
from ..bookmarks.safari import parse_bookmark_data
for url, name in parse_bookmark_data(url_data.get_raw_content()):
url_data.add_url(url, name=name)
def parse_text(url_data):
"""Parse a text file with one url per line; comment and blank
lines are ignored."""
lineno = 0
for line in url_data.get_content().splitlines():
lineno += 1
line = line.strip()
if not line or line.startswith('#'):
continue
url_data.add_url(line, line=lineno)
def parse_css(url_data):
"""
Parse a CSS file for url() patterns.
"""
lineno = 0
linkfinder = linkparse.css_url_re.finditer
strip_comments = linkparse.strip_c_comments
for line in strip_comments(url_data.get_content()).splitlines():
lineno += 1
for mo in linkfinder(line):
column = mo.start("url")
url = strformat.unquote(mo.group("url").strip())
url_data.add_url(url, line=lineno, column=column)
def parse_swf(url_data):
"""Parse a SWF file for URLs."""
linkfinder = linkparse.swf_url_re.finditer
for mo in linkfinder(url_data.get_raw_content()):
# We're scraping binary data for anything that looks like an URL using
# a regex that matches only ASCII characters. Any non-ASCII characters
# in the URL are expected to be %-encoded.
url = mo.group().decode('ascii')
url_data.add_url(url)
def parse_wml(url_data):
"""Parse into WML content and search for URLs to check.
Found URLs are added to the URL queue.
"""
linkparse.find_links(url_data.get_soup(), url_data.add_url, linkparse.WmlTags)
def parse_firefox(url_data):
"""Parse a Firefox3 bookmark file."""
filename = url_data.get_os_filename()
for url, name in firefox.parse_bookmark_file(filename):
url_data.add_url(url, name=name)
def parse_itms_services(url_data):
"""Get "url" CGI parameter value as child URL."""
query = url_data.urlparts[3]
for k, v, sep in urlutil.parse_qsl(
query, encoding=url_data.encoding, keep_blank_values=True
):
if k == "url":
url_data.add_url(v)
break
from .sitemap import parse_sitemap, parse_sitemapindex # noqa: F401 | PypiClean |
/Linq-0.3.1-py3-none-any.whl/Linq-0.3.1.dist-info/DESCRIPTION.rst | Linq.py
=======
|Build Status| |License| |codecov| |Coverage Status| |PyPI version|
Install
-------
::
pip install -U Linq
Additional: Some magic here: `Mix Linq with
Pipe <https://github.com/Xython/Linq.py/blob/master/using_pipe.md>`__
Here is an example to get top 10 frequent pixels in a picture.
.. code:: python
from linq import Flow
import numpy as np
def most_frequent(arr: np.ndarray) -> np.ndarray:
return Flow(arr.flatten()) \
.GroupBy(lambda _: _) \
.Then(lambda x: x.items()) \
.Map(lambda k, v: (k, len(v))) \
.Sorted(by=lambda _, v: -v) \
.Take(10) \
.Map(lambda k, _: k) \
.ToList() \
.Then(np.array).Unboxed()
About Linq
----------
| The well-known EDSL in .NET, ``Language Integrated Query``, in my
opinion, is one of the best design in .NET environment.
| Here is an example of C# Linq.
.. code:: c#
// Calculate MSE loss.
/// <param name="Prediction"> the prediction of the neuron network</param>
/// <param name="Expected"> the expected target of the neuron network</param>
Prediction.Zip(Expected, (pred, expected)=> Math.Square(pred-expected)).Average()
It's so human readable and it doesn't cost much.
- Reference:
- Microsoft .NET general introduction => `LINQ: .NET
Language-Integrated
Query <https://msdn.microsoft.com/en-us/library/bb308959.aspx>`__.
- Wikipedia => `Language Integrated
Query <https://en.wikipedia.org/wiki/Language_Integrated_Query>`__.
And there are so many scenes very awkward to Python programmer, using
``Linq`` might help a lot.
Awkward Scenes in Python
------------------------
.. code:: python
seq1 = range(100)
seq2 = range(100, 200)
zipped = zip(seq1, seq2)
mapped = map(lambda ab: ab[0] / ab[1], zipped)
grouped = dict();
group_fn = lambda x: x // 0.2
for e in mapped:
group_id = group_fn(e)
if group_id not in grouped:
grouped[group_id] = [e]
continue
grouped[group_id].append(e)
for e in grouped.items():
print(e)
The codes seems to be too long...
Now we extract the function ``group_by``:
.. code:: python
def group_by(f, container):
grouped = dict()
for e in container:
group_id = f(e)
if group_id not in grouped:
grouped[group_id] = [e]
continue
grouped[group_id].append(e)
return grouped
res = group_by(lambda x: x//0.2, map(lambda ab[0]/ab[1], zip(seq1, seq2)))
Okay, it's not at fault, however, it makes me upset —— why do I have to
write these ugly codes?
**Now, let us try Linq!**
.. code:: python
from linq import Flow, extension_std
seq = Flow(range(100))
res = seq.Zip(range(100, 200)).Map(lambda fst, snd : fst/snd).GroupBy(lambda num: num//0.2).Unboxed()
How does `Linq.py <https://github.com/Xython/Linq.py>`__ work?
--------------------------------------------------------------
| There is a core class object, ``linq.core.flow.Flow``, which just has
one member ``stream``.
| When you want to get a specific extension method from ``Flow`` object,
the ``type`` of its ``stream`` member will be used to search whether
the extension method exists.
| In other words, extension methods are binded with the type(precisely,
``{type.__module__}.{type.__name__}``).
.. code:: python
class Flow:
__slots__ = ['stream']
def __init__(self, sequence):
self.stream = sequence
def __getattr__(self, k):
for cls in self.stream.__class__.__mro__:
namespace = Extension['{}.{}'.format(cls.__module__, cls.__name__)]
if k in namespace:
return partial(namespace[k], self)
raise NameError(
"No extension method named `{}` for {}.".format(
k, '{}.{}'.format(object.__module__, object.__name__)))
def __str__(self):
return self.stream.__str__()
def __repr__(self):
return self.__str__()
Extension Method
----------------
Here are three methods for you to do so.
- Firstly, you can use ``extension_std`` to add extension methods for
all Flow objects.
- Next, you use ``extension_class(cls: type)`` to add extension methods
for all Flow objects whose member ``stream``'s type is named
``{cls.__module}.{cls.__name__}``.
- Finally, you can use
``extension_class(cls_name: str, of_module='builtins')`` to add
extension methods for all Flow objects whose member ``stream``'s type
is named is named ``{of_module}.{cls_name}``.
(This way to make extension methods is for the **implicit types** in
Python, each of which cannot be got except from its instances' meta
member ``__class__``.)
.. code:: python
@extension_std # For all Flow objects
def Add(self, i):
return Flow(self.stream + (i.stream if isinstance(i, Flow) else i)))
@extension_class(int) # Just for type `int`
def Add(self, i):
return Flow(self.stream + (i.stream if isinstance(i, Flow) else i)))
@extension_class_name('int', of_module=int.__module__) # Also for type `int`.
def Add(self, i):
return Flow(self.stream + (i.stream if isinstance(i, Flow) else i)))
Documents of Standard Extension Methods
---------------------------------------
Note: Docs haven't been finished yet.
- General(can be used by all Flow objects)
- `Unboxed <https://github.com/Xython/Linq.py/blob/master/docs/general.md#unboxed>`__
- `Sum <https://github.com/Xython/Linq.py/blob/master/docs/general.md#sum>`__
- `Enum <https://github.com/Xython/Linq.py/blob/master/docs/general.md#enum>`__
- `Map <https://github.com/Xython/Linq.py/blob/master/docs/general.md#map>`__
- `Reduce <https://github.com/Xython/Linq.py/blob/master/docs/general.md#reduce>`__
- `Then <https://github.com/Xython/Linq.py/blob/master/docs/general.md#then>`__
- `Each <https://github.com/Xython/Linq.py/blob/master/docs/general.md#each>`__
- `Aggregate <https://github.com/Xython/Linq.py/blob/master/docs/general.md#aggregate>`__
- `Zip <https://github.com/Xython/Linq.py/blob/master/docs/general.md#zip>`__
- `Sorted <https://github.com/Xython/Linq.py/blob/master/docs/general.md#sorted>`__
- `ArgSorted <https://github.com/Xython/Linq.py/blob/master/docs/general.md#argsorted>`__
- `Group <https://github.com/Xython/Linq.py/blob/master/docs/general.md#group>`__
- `GroupBy <https://github.com/Xython/Linq.py/blob/master/docs/general.md#groupby>`__
- `Take <https://github.com/Xython/Linq.py/blob/master/docs/general.md#take>`__
- `TakeWhile <https://github.com/Xython/Linq.py/blob/master/docs/general.md#takewhile>`__
- `Drop\|Skip <https://github.com/Xython/Linq.py/blob/master/docs/general.md#drop%7Cskip>`__
- `Concat <https://github.com/Xython/Linq.py/blob/master/docs/general.md#concat>`__
- `ToList <https://github.com/Xython/Linq.py/blob/master/docs/general.md#tolist>`__
- `ToTuple <https://github.com/Xython/Linq.py/blob/master/docs/general.md#totuple>`__
- `ToDict <https://github.com/Xython/Linq.py/blob/master/docs/general.md#todict>`__
- `ToSet <https://github.com/Xython/Linq.py/blob/master/docs/general.md#toset>`__
- `All <https://github.com/Xython/Linq.py/blob/master/docs/general.md#all>`__
- `Any <https://github.com/Xython/Linq.py/blob/master/docs/general.md#any>`__
- List
- `Extended <https://github.com/Xython/Linq.py/blob/master/docs/list.md#extended>`__
- `Extend <https://github.com/Xython/Linq.py/blob/master/docs/list.md#extend>`__
- `Sort <https://github.com/Xython/Linq.py/blob/master/docs/list.md#sort>`__
- `Reversed <https://github.com/Xython/Linq.py/blob/master/docs/list.md#reversed>`__
- `Reverse <https://github.com/Xython/Linq.py/blob/master/docs/list.md#reverse>`__
- Set
- `Intersects <https://github.com/Xython/Linq.py/blob/master/docs/set.md#intersects>`__
- `Union <https://github.com/Xython/Linq.py/blob/master/docs/set.md#union>`__
How to Contribute
-----------------
- Design the `standard
library <https://github.com/Xython/Linq.py/tree/master/linq/standard>`__
for `Linq.py <https://github.com/Xython/Linq.py>`__.
- Write documents for the standard library and tutorials about how to
use `Linq.py <https://github.com/Xython/Linq.py>`__.
- Join `LinqPy Room <https://gitter.im/LinqPy/Lobby>`__ to discuss
about any aspects of `Linq.py <https://github.com/Xython/Linq.py>`__.
Feel free to pull requests here.
.. |Build Status| image:: https://travis-ci.org/Xython/Linq.py.svg?branch=master
:target: https://travis-ci.org/Xython/Linq.py
.. |License| image:: https://img.shields.io/badge/license-MIT-yellow.svg
:target: https://github.com/Xython/Linq.py/blob/master/LICENSE
.. |codecov| image:: https://codecov.io/gh/Xython/Linq.py/branch/master/graph/badge.svg
:target: https://codecov.io/gh/Xython/Linq.py
.. |Coverage Status| image:: https://coveralls.io/repos/github/Xython/Linq.py/badge.svg?branch=master
:target: https://coveralls.io/github/Xython/Linq.py?branch=master
.. |PyPI version| image:: https://img.shields.io/pypi/v/Linq.svg
:target: https://pypi.python.org/pypi/Linq
| PypiClean |
/IPython-Dashboard-0.1.5.tar.gz/IPython-Dashboard-0.1.5/dashboard/static/js/theme-sqlserver.js | define("ace/theme/sqlserver",["require","exports","module","ace/lib/dom"], function(require, exports, module) {
exports.isDark = false;
exports.cssClass = "ace-sqlserver";
exports.cssText = ".ace-sqlserver .ace_gutter {\
background: #ebebeb;\
color: #333;\
overflow: hidden;\
}\
.ace-sqlserver .ace_print-margin {\
width: 1px;\
background: #e8e8e8;\
}\
.ace-sqlserver {\
background-color: #FFFFFF;\
color: black;\
}\
.ace-sqlserver .ace_identifier {\
color: black;\
}\
.ace-sqlserver .ace_keyword {\
color: #0000FF;\
}\
.ace-sqlserver .ace_numeric {\
color: black;\
}\
.ace-sqlserver .ace_storage {\
color: #11B7BE;\
}\
.ace-sqlserver .ace_keyword.ace_operator,\
.ace-sqlserver .ace_lparen,\
.ace-sqlserver .ace_rparen,\
.ace-sqlserver .ace_punctuation {\
color: #808080;\
}\
.ace-sqlserver .ace_set.ace_statement {\
color: #0000FF;\
text-decoration: underline;\
}\
.ace-sqlserver .ace_cursor {\
color: black;\
}\
.ace-sqlserver .ace_invisible {\
color: rgb(191, 191, 191);\
}\
.ace-sqlserver .ace_constant.ace_buildin {\
color: rgb(88, 72, 246);\
}\
.ace-sqlserver .ace_constant.ace_language {\
color: #979797;\
}\
.ace-sqlserver .ace_constant.ace_library {\
color: rgb(6, 150, 14);\
}\
.ace-sqlserver .ace_invalid {\
background-color: rgb(153, 0, 0);\
color: white;\
}\
.ace-sqlserver .ace_support.ace_function {\
color: #FF00FF;\
}\
.ace-sqlserver .ace_support.ace_constant {\
color: rgb(6, 150, 14);\
}\
.ace-sqlserver .ace_class {\
color: #008080;\
}\
.ace-sqlserver .ace_support.ace_other {\
color: #6D79DE;\
}\
.ace-sqlserver .ace_variable.ace_parameter {\
font-style: italic;\
color: #FD971F;\
}\
.ace-sqlserver .ace_comment {\
color: #008000;\
}\
.ace-sqlserver .ace_constant.ace_numeric {\
color: black;\
}\
.ace-sqlserver .ace_variable {\
color: rgb(49, 132, 149);\
}\
.ace-sqlserver .ace_xml-pe {\
color: rgb(104, 104, 91);\
}\
.ace-sqlserver .ace_support.ace_storedprocedure {\
color: #800000;\
}\
.ace-sqlserver .ace_heading {\
color: rgb(12, 7, 255);\
}\
.ace-sqlserver .ace_list {\
color: rgb(185, 6, 144);\
}\
.ace-sqlserver .ace_marker-layer .ace_selection {\
background: rgb(181, 213, 255);\
}\
.ace-sqlserver .ace_marker-layer .ace_step {\
background: rgb(252, 255, 0);\
}\
.ace-sqlserver .ace_marker-layer .ace_stack {\
background: rgb(164, 229, 101);\
}\
.ace-sqlserver .ace_marker-layer .ace_bracket {\
margin: -1px 0 0 -1px;\
border: 1px solid rgb(192, 192, 192);\
}\
.ace-sqlserver .ace_marker-layer .ace_active-line {\
background: rgba(0, 0, 0, 0.07);\
}\
.ace-sqlserver .ace_gutter-active-line {\
background-color: #dcdcdc;\
}\
.ace-sqlserver .ace_marker-layer .ace_selected-word {\
background: rgb(250, 250, 255);\
border: 1px solid rgb(200, 200, 250);\
}\
.ace-sqlserver .ace_meta.ace_tag {\
color: #0000FF;\
}\
.ace-sqlserver .ace_string.ace_regex {\
color: #FF0000;\
}\
.ace-sqlserver .ace_string {\
color: #FF0000;\
}\
.ace-sqlserver .ace_entity.ace_other.ace_attribute-name {\
color: #994409;\
}\
.ace-sqlserver .ace_indent-guide {\
background: url(\"data:image/png;base64,iVBORw0KGgoAAAANSUhEUgAAAAEAAAACCAYAAACZgbYnAAAAE0lEQVQImWP4////f4bLly//BwAmVgd1/w11/gAAAABJRU5ErkJggg==\") right repeat-y;\
}\
";
var dom = require("../lib/dom");
dom.importCssString(exports.cssText, exports.cssClass);
}); | PypiClean |
/Glances-3.4.0.3.tar.gz/Glances-3.4.0.3/docs/gw/statsd.rst | .. _statsd:
StatsD
======
You can export statistics to a ``StatsD`` server (welcome to Graphite!).
The connection should be defined in the Glances configuration file as
following:
.. code-block:: ini
[statsd]
host=localhost
port=8125
prefix=glances
.. note:: The ``prefix`` is optional (``glances`` by default)
and run Glances with:
.. code-block:: console
$ glances --export statsd
Glances will generate stats as:
::
'glances.cpu.user': 12.5,
'glances.cpu.total': 14.9,
'glances.load.cpucore': 4,
'glances.load.min1': 0.19,
...
| PypiClean |
/Flask-Fixtures-0.3.8.tar.gz/Flask-Fixtures-0.3.8/README.rst | Flask-Fixtures
==============
A simple library that allows you to add database fixtures for your unit
tests using nothing but JSON or YAML.
Installation
------------
Installing Flask-Fixtures is simple, just do a typical pip install like
so:
::
pip install flask-fixtures
If you are going to use JSON as your data serialization format, you
should also consider installing the dateutil package since it will
add much more powerful and flexible parsing of dates and times.
To install the library from source simply download the source code, or
check it out if you have git installed on your system, then just run the
install command.
::
git clone https://github.com/croach/Flask-Fixtures.git
cd /path/to/flask-fixtures
python setup.py install
Setup
-----
To setup the library, you simply need to tell Flask-Fixtures where it
can find the fixtures files for your tests. Fixtures can reside anywhere
on the file system, but by default, Flask-Fixtures looks for these files
in a directory called ``fixtures`` in your app's root directory. To add
more directories to the list to be searched, just add an attribute
called ``FIXTURES_DIRS`` to your app's config object. This attribute
should be a list of strings, where each string is a path to a fixtures
directory. Absolute paths are added as is, but reltative paths will be
relative to your app's root directory.
Once you have configured the extension, you can begin adding fixtures
for your tests.
Adding Fixtures
---------------
To add a set of fixtures, you simply add any number of JSON or YAML
files describing the individual fixtures to be added to your test
database into one of the directories you specified in the
``FIXTURES_DIRS`` attribute, or into the default fixtures directory. As
an example, I'm going to assume we have a Flask application with the
following directory structure.
::
/myapp
__init__.py
config.py
models.py
/fixtures
authors.json
The ``__init__.py`` file will be responsible for creating our Flask
application object.
.. code:: python
# myapp/__init__.py
from flask import Flask
app = Flask(__name__)
The ``config.py`` object holds our test configuration file.
.. code:: python
# myapp/config.py
class TestConfig(object):
SQLALCHEMY_DATABASE_URI = 'sqlite://'
testing = True
debug = True
And, finally, inside of the ``models.py`` files we have the following
database models.
.. code:: python
# myapp/models.py
from flask_sqlalchemy import SQLAlchemy
from myapp import app
db = SQLAlchemy(app)
class Author(db.Model):
id = db.Column(db.Integer, primary_key=True)
first_name = db.Column(db.String(30))
last_name = db.Column(db.String(30))
class Book(db.Model):
id = db.Column(db.Integer, primary_key=True)
title = db.Column(db.String(200))
author_id = db.Column(db.Integer, db.ForeignKey('author.id'))
author = db.relationship('Author', backref='books')
Given the model classes above, if we wanted to mock up some data for our
database, we could do so in single file, or we could even split our
fixtures into multiple files each corresponding to a single model class.
For this simple example, we'll go with one file that we'll call
``authors.json``.
A fixtures file contains a list of objects. Each object contains a key
called ``records`` that holds another list of objects each representing
either a row in a table, or an instance of a model. If you wish to work
with tables, you'll need to specify the name of the table with the
``table`` key. If you'd prefer to work with models, specify the
fully-qualified class name of the model using the ``model`` key. Once
you've specified the table or model you want to work with, you'll need
to specify the data associated with each table row, or model instance.
Each object in the ``records`` list will hold the data for a single row
or model. The example below is the JSON for a single author record and a
few books associated with that author. Create a file called
``myapp/fixtures/authors.json`` and copy and paste the fixtures JSON
below into that file.
.. code:: json
[
{
"table": "author",
"records": [{
"id": 1,
"first_name": "William",
"last_name": "Gibson",
}]
},
{
"model": "myapp.models.Book",
"records": [{
"title": "Neuromancer",
"author_id": 1
},
{
"title": "Count Zero",
"author_id": 1
},
{
"title": "Mona Lisa Overdrive",
"author_id": 1
}]
}
]
Another option, if you have `PyYAML <http://pyyaml.org/>`__ installed,
is to write your fixtures using the YAML syntax instead of JSON.
Personally, I prefer to use YAML; I find its syntax is easier to read,
and I find the ability to add comments to my fixtures to be invaluable.
If you'd prefer to use YAML, I've added a version of the authors.json
file written in YAML below. Just copy and paste it into a file called
``myapp/fixtures/authors.yaml`` in place of creating the JSON file
above.
.. code:: yaml
- table: author
records:
- id: 1
first_name: William
last_name: Gibson
- model: myapp.models.Book
records:
- title: Neuromancer
author_id: 1
published_date: 1984-07-01
- title: Count Zero
author_id: 1
published_date: 1986-03-01
- title: Neuromancer
author_id: 1
published_date: 1988-10-01
After reading over the previous section, you might be asking yourself
why the library supports two methods for adding records to the database.
There are a few good reasons for supporting both tables and models when
creating fixtures. Using tables is faster, since we can take advantage
of SQLAlchemy's bulk insert to add several records at once. However, to
do so, you must first make sure that the records list is homegenous.
**In other words, every object in the ``records`` list must have the
same set of key/value pairs, otherwise the bulk insert will not work.**
Using models, however, allows you to have a heterogenous list of record
objects.
The other reason you may want to use models instead of tables is that
you'll be able to take advantage of any python-level defaults, checks,
etc. that you have setup on the model. Using a table, bypasses the model
completely and inserts the data directly into the database, which means
you'll need to think on a lower level when creating table-based
fixtures.
Usage
-----
To use Flask-Fixtures in your unit tests, you'll need to make sure your
test class inherits from ``FixturesMixin`` and that you've specified a
list of fixtures files to load. The sample code below shows how to do
each these steps.
First, make sure the app that you're testing is initialized with the proper
configuration. Then import and initialize the ``FixturesMixin`` class, create
a new test class, and inherit from ``FixturesMixin``. Now you just need to
tell Flask-Fixtures which fixtures files to use for your tests. You can do so
by setting the ``fixtures`` class variable. Doing so will setup and tear down
fixtures between each test. To persist fixtures across tests, i.e., to setup
fixtures only when the class is first created and tear them down after all
tests have finished executing, you'll need to set the ``persist_fixtures``
variable to True. The ``fixtures`` variable should be set to a list of
strings, each of which is the name of a fixtures file to load. Flask-Fixtures
will then search the default fixtures directory followed by each directory in
the ``FIXTURES_DIRS`` config variable, in order, for a file matching each name
in the list and load each into the test database.
.. code:: python
# myapp/fixtures/test_fixtures.py
import unittest
from myapp import app
from myapp.models import db, Book, Author
from flask_fixtures import FixturesMixin
# Configure the app with the testing configuration
app.config.from_object('myapp.config.TestConfig')
# Make sure to inherit from the FixturesMixin class
class TestFoo(unittest.TestCase, FixturesMixin):
# Specify the fixtures file(s) you want to load.
# Change the list below to ['authors.yaml'] if you created your fixtures
# file using YAML instead of JSON.
fixtures = ['authors.json']
# Specify the Flask app and db we want to use for this set of tests
app = app
db = db
# Your tests go here
def test_authors(self):
authors = Author.query.all()
assert len(authors) == Author.query.count() == 1
assert len(authors[0].books) == 3
def test_books(self):
books = Book.query.all()
assert len(books) == Book.query.count() == 3
gibson = Author.query.filter(Author.last_name=='Gibson').one()
for book in books:
assert book.author == gibson
Examples
--------
To see the library in action, you can find a simple Flask application
and set of unit tests matching the ones in the example above in the
``tests/myapp`` directory. To run these examples yourself, just follow
the directions below for "Contributing to Flask-Fixtures".
Contributing to Flask-Fixtures
------------------------------
Currently, Flask-Fixtures supports python versions 2.6 and 2.7 and the
py.test, nose, and unittest (included in the python standard library)
libraries. To contribute bug fixes and features to Flask-Fixtures,
you'll need to make sure that any code you contribute does not break any
of the existing unit tests in any of these environments.
To run unit tests in all six of the supported environments, I suggest
you install `tox <https://testrun.org/tox/latest/>`__ and simply run the
``tox`` command. If, however, you insist on running things by hand,
you'll need to create a virtualenv for both python 2.6 and python 2.7.
Then, install nose and py.test in each virtualenv. Finally, you can run
the tests with the commands in the table below.
+------------+-------------------------------------------------------+
| Library | Command |
+============+=======================================================+
| py.test | py.test |
+------------+-------------------------------------------------------+
| nose | nosetests |
+------------+-------------------------------------------------------+
| unittest | python -m unittest discover --start-directory tests |
+------------+-------------------------------------------------------+
| PypiClean |
/DobbyStockSimulation-0.1.1.tar.gz/DobbyStockSimulation-0.1.1/.history/Test_Stock_Main_20221224102249.py | import unittest
from main_package.Stock_main import *
class TestStock(unittest.TestCase): # test class
@classmethod
def setUpClass(cls):
print('setupClass')
def setUp(self):
print('Set up')
def test_high_price(self):
self.high_price_list = [20,30,40]
self.assertEqual(self.high_price_list, [20,30,40])
def test_low_price(self):
self.low_price_list = [5,10,15]
self.assertEqual(self.low_price_list, [5,10,15])
def test_vol(self):
self.volume_list = [1,2,3]
self.assertEqual(self.volume_list, [1,2,3])
def test_size(self):
self.n = 5
self.high_price_list = [random.randint(201, 500) for i in range(int(self.n))]
self.low_price_list = [random.randint(50, 200) for i in range(int(self.n))]
self.volume_list = [random.randint(1, 200) for i in range(int(self.n))]
# self.assertEqual(self.n , 4) # it will has error
def test_get_high_price(self):
stock = Stock()
high_prices = stock.get_high_price()
self.assertIsInstance(high_prices, list)
self.assertGreaterEqual(max(high_prices), 201)
self.assertLessEqual(min(high_prices), 500)
def test_get_low_price(self):
stock = Stock()
low_prices = stock.get_low_price()
self.assertIsInstance(low_prices, list)
self.assertGreaterEqual(max(low_prices), 50)
self.assertLessEqual(min(low_prices), 200)
def test_get_volume(self):
stock = Stock()
volumes = stock.get_volume()
self.assertIsInstance(volumes, list)
self.assertGreaterEqual(max(volumes), 1)
self.assertLessEqual(min(volumes), 200)
def test_get_size(self):
stock = Stock()
size = stock.get_size()
self.assertIsInstance(size, int)
self.assertGreaterEqual(size, 0)
def tearDown(self):
print('Tear Down')
@classmethod
def tearDownClass(cls):
print('teardownClass')
unittest.main() | PypiClean |
/HolmesIV-2021.9.8a1.tar.gz/HolmesIV-2021.9.8a1/mycroft/configuration/config.py |
import inflection
import json
from os.path import exists, isfile, join
import re
from requests import RequestException
import xdg.BaseDirectory
from mycroft.util.json_helper import load_commented_json, merge_dict
from mycroft.util.log import LOG
from mycroft.configuration.locations import (DEFAULT_CONFIG, SYSTEM_CONFIG,
USER_CONFIG, OLD_USER_CONFIG,
WEB_CONFIG_CACHE)
def is_remote_list(values):
"""Check if list corresponds to a backend formatted collection of dicts
"""
for v in values:
if not isinstance(v, dict):
return False
if "@type" not in v.keys():
return False
return True
def translate_remote(config, setting):
"""Translate config names from server to equivalents for mycroft-core.
Args:
config: base config to populate
settings: remote settings to be translated
"""
IGNORED_SETTINGS = ["uuid", "@type", "active", "user", "device"]
for k, v in setting.items():
if k not in IGNORED_SETTINGS:
# Translate the CamelCase values stored remotely into the
# Python-style names used within mycroft-core.
key = inflection.underscore(re.sub(r"Setting(s)?", "", k))
if isinstance(v, dict):
config[key] = config.get(key, {})
translate_remote(config[key], v)
elif isinstance(v, list):
if is_remote_list(v):
if key not in config:
config[key] = {}
translate_list(config[key], v)
else:
config[key] = v
else:
config[key] = v
def translate_list(config, values):
"""Translate list formated by mycroft server.
Args:
config (dict): target config
values (list): list from mycroft server config
"""
for v in values:
module = v["@type"]
if v.get("active"):
config["module"] = module
config[module] = config.get(module, {})
translate_remote(config[module], v)
class LocalConf(dict):
"""Config dictionary from file."""
def __init__(self, path):
super(LocalConf, self).__init__()
if path:
self.path = path
self.load_local(path)
def load_local(self, path):
"""Load local json file into self.
Args:
path (str): file to load
"""
if exists(path) and isfile(path):
try:
config = load_commented_json(path)
for key in config:
self.__setitem__(key, config[key])
LOG.debug("Configuration {} loaded".format(path))
except Exception as e:
LOG.error("Error loading configuration '{}'".format(path))
LOG.error(repr(e))
else:
LOG.debug("Configuration '{}' not defined, skipping".format(path))
def store(self, path=None):
"""Cache the received settings locally.
The cache will be used if the remote is unreachable to load settings
that are as close to the user's as possible.
"""
path = path or self.path
with open(path, 'w') as f:
json.dump(self, f, indent=2)
def merge(self, conf):
merge_dict(self, conf)
class RemoteConf(LocalConf):
"""Config dictionary fetched from mycroft.ai."""
def __init__(self, cache=None):
super(RemoteConf, self).__init__(None)
cache = cache or WEB_CONFIG_CACHE
from mycroft.api import is_paired
if not is_paired():
self.load_local(cache)
return
try:
# Here to avoid cyclic import
from mycroft.api import DeviceApi
from mycroft.api import is_backend_disabled
if is_backend_disabled():
# disable options that require backend
config = {
"server": {
"metrics": False,
"sync_skill_settings": False
},
"skills": {"upload_skill_manifest": False},
"opt_in": False
}
for key in config:
self.__setitem__(key, config[key])
else:
api = DeviceApi()
setting = api.get_settings()
location = None
try:
location = api.get_location()
except RequestException as e:
LOG.error("RequestException fetching remote location: {}"
.format(str(e)))
if exists(cache) and isfile(cache):
location = load_commented_json(cache).get('location')
if location:
setting["location"] = location
# Remove server specific entries
config = {}
translate_remote(config, setting)
for key in config:
self.__setitem__(key, config[key])
self.store(cache)
except RequestException as e:
LOG.error("RequestException fetching remote configuration: {}"
.format(str(e)))
self.load_local(cache)
except Exception as e:
LOG.error("Failed to fetch remote configuration: %s" % repr(e),
exc_info=True)
self.load_local(cache)
def _log_old_location_deprecation():
LOG.warning("\n ===============================================\n"
" == DEPRECATION WARNING ==\n"
" ===============================================\n"
f" You still have a config file at {OLD_USER_CONFIG}\n"
" Note that this location is deprecated and will"
" not be used in the future\n"
" Please move it to "
f"{xdg.BaseDirectory.save_config_path('mycroft')}")
class Configuration:
"""Namespace for operations on the configuration singleton."""
__config = {} # Cached config
__patch = {} # Patch config that skills can update to override config
@staticmethod
def get(configs=None, cache=True, remote=True):
"""Get configuration
Returns cached instance if available otherwise builds a new
configuration dict.
Args:
configs (list): List of configuration dicts
cache (boolean): True if the result should be cached
remote (boolean): False if the Remote settings shouldn't be loaded
Returns:
(dict) configuration dictionary.
"""
if Configuration.__config:
return Configuration.__config
else:
return Configuration.load_config_stack(configs, cache, remote)
@staticmethod
def load_config_stack(configs=None, cache=False, remote=True):
"""Load a stack of config dicts into a single dict
Args:
configs (list): list of dicts to load
cache (boolean): True if result should be cached
remote (boolean): False if the Mycroft Home settings shouldn't
be loaded
Returns:
(dict) merged dict of all configuration files
"""
if not configs:
configs = []
# First use the patched config
configs.append(Configuration.__patch)
# Then use XDG config
# This includes both the user config and
# /etc/xdg/mycroft/mycroft.conf
for conf_dir in xdg.BaseDirectory.load_config_paths('mycroft'):
configs.append(LocalConf(join(conf_dir, 'mycroft.conf')))
# Then check the old user config
if isfile(OLD_USER_CONFIG):
_log_old_location_deprecation()
configs.append(LocalConf(OLD_USER_CONFIG))
# Then use remote config
if remote:
configs.append(RemoteConf())
# Then use the system config (/etc/mycroft/mycroft.conf)
configs.append(LocalConf(SYSTEM_CONFIG))
# Then use the config that comes with the package
configs.append(LocalConf(DEFAULT_CONFIG))
# Make sure we reverse the array, as merge_dict will put every new
# file on top of the previous one
configs = reversed(configs)
else:
# Handle strings in stack
for index, item in enumerate(configs):
if isinstance(item, str):
configs[index] = LocalConf(item)
# Merge all configs into one
base = {}
for c in configs:
merge_dict(base, c)
# copy into cache
if cache:
Configuration.__config.clear()
for key in base:
Configuration.__config[key] = base[key]
return Configuration.__config
else:
return base
@staticmethod
def set_config_update_handlers(bus):
"""Setup websocket handlers to update config.
Args:
bus: Message bus client instance
"""
bus.on("configuration.updated", Configuration.updated)
bus.on("configuration.patch", Configuration.patch)
bus.on("configuration.patch.clear", Configuration.patch_clear)
@staticmethod
def updated(message):
"""Handler for configuration.updated,
Triggers an update of cached config.
"""
Configuration.load_config_stack(cache=True)
@staticmethod
def patch(message):
"""Patch the volatile dict usable by skills
Args:
message: Messagebus message should contain a config
in the data payload.
"""
config = message.data.get("config", {})
merge_dict(Configuration.__patch, config)
Configuration.load_config_stack(cache=True)
@staticmethod
def patch_clear(message):
"""Clear the config patch space.
Args:
message: Messagebus message should contain a config
in the data payload.
"""
Configuration.__patch = {}
Configuration.load_config_stack(cache=True) | PypiClean |
/0-orchestrator-1.1.0a7.tar.gz/0-orchestrator-1.1.0a7/zeroos/orchestrator/sal/healthchecks/ssh_cleanup.py | from ..healthcheck import HealthCheckRun
from js9 import j
descr = """
Clean up ssh deamons and tcp services from migration
"""
class SSHCleanup(HealthCheckRun):
def __init__(self, node, job):
resource = '/nodes/{}'.format(node.name)
super().__init__('ssh-cleanup', 'SSH Cleanup', 'System Load', resource)
self.node = node
self.service = job.service
self.job = job
def run(self):
status = 'OK'
text = 'Migration Cleanup Succesful'
finished = []
try:
for job in self.service.aysrepo.jobsList():
job_dict = job.to_dict()
if job_dict['actionName'] == 'processChange' and job_dict['actorName'] == 'vm':
if job_dict['state'] == 'running':
continue
vm = self.service.aysrepo.serviceGet(instance=job_dict['serviceName'], role=job_dict['actorName'])
finished.append("ssh.config_%s" % vm.name)
for proc in self.node.client.process.list():
for partial in finished:
if partial not in proc['cmdline']:
continue
config_file = proc['cmdline'].split()[-1]
port = config_file.split('_')[-1]
self.node.client.process.kill(proc['pid'])
tcp_name = "tcp_%s_%s" % (self.node.name, port)
tcp_service = self.service.aysrepo.serviceGet(role='tcp', instance=tcp_name)
j.tools.async.wrappers.sync(tcp_service.executeAction("drop"), context=self.job.context)
tcp_service.delete()
if self.node.client.filesystem.exists('/tmp'):
self.node.client.filesystem.remove(config_file)
except Exception as e:
text = "Error happened, Can not clean ssh process "
status = "ERROR"
self.add_message(self.id, status, text) | PypiClean |
/FlaskCms-0.0.4.tar.gz/FlaskCms-0.0.4/flask_cms/static/js/ace/mode-dot.js | ace.define("ace/mode/matching_brace_outdent",["require","exports","module","ace/range"], function(require, exports, module) {
"use strict";
var Range = require("../range").Range;
var MatchingBraceOutdent = function() {};
(function() {
this.checkOutdent = function(line, input) {
if (! /^\s+$/.test(line))
return false;
return /^\s*\}/.test(input);
};
this.autoOutdent = function(doc, row) {
var line = doc.getLine(row);
var match = line.match(/^(\s*\})/);
if (!match) return 0;
var column = match[1].length;
var openBracePos = doc.findMatchingBracket({row: row, column: column});
if (!openBracePos || openBracePos.row == row) return 0;
var indent = this.$getIndent(doc.getLine(openBracePos.row));
doc.replace(new Range(row, 0, row, column-1), indent);
};
this.$getIndent = function(line) {
return line.match(/^\s*/)[0];
};
}).call(MatchingBraceOutdent.prototype);
exports.MatchingBraceOutdent = MatchingBraceOutdent;
});
ace.define("ace/mode/doc_comment_highlight_rules",["require","exports","module","ace/lib/oop","ace/mode/text_highlight_rules"], function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
var DocCommentHighlightRules = function() {
this.$rules = {
"start" : [ {
token : "comment.doc.tag",
regex : "@[\\w\\d_]+" // TODO: fix email addresses
}, {
token : "comment.doc.tag",
regex : "\\bTODO\\b"
}, {
defaultToken : "comment.doc"
}]
};
};
oop.inherits(DocCommentHighlightRules, TextHighlightRules);
DocCommentHighlightRules.getStartRule = function(start) {
return {
token : "comment.doc", // doc comment
regex : "\\/\\*(?=\\*)",
next : start
};
};
DocCommentHighlightRules.getEndRule = function (start) {
return {
token : "comment.doc", // closing comment
regex : "\\*\\/",
next : start
};
};
exports.DocCommentHighlightRules = DocCommentHighlightRules;
});
ace.define("ace/mode/dot_highlight_rules",["require","exports","module","ace/lib/oop","ace/lib/lang","ace/mode/text_highlight_rules","ace/mode/doc_comment_highlight_rules"], function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var lang = require("../lib/lang");
var TextHighlightRules = require("./text_highlight_rules").TextHighlightRules;
var DocCommentHighlightRules = require("./doc_comment_highlight_rules").DocCommentHighlightRules;
var DotHighlightRules = function() {
var keywords = lang.arrayToMap(
("strict|node|edge|graph|digraph|subgraph").split("|")
);
var attributes = lang.arrayToMap(
("damping|k|url|area|arrowhead|arrowsize|arrowtail|aspect|bb|bgcolor|center|charset|clusterrank|color|colorscheme|comment|compound|concentrate|constraint|decorate|defaultdist|dim|dimen|dir|diredgeconstraints|distortion|dpi|edgeurl|edgehref|edgetarget|edgetooltip|epsilon|esep|fillcolor|fixedsize|fontcolor|fontname|fontnames|fontpath|fontsize|forcelabels|gradientangle|group|headurl|head_lp|headclip|headhref|headlabel|headport|headtarget|headtooltip|height|href|id|image|imagepath|imagescale|label|labelurl|label_scheme|labelangle|labeldistance|labelfloat|labelfontcolor|labelfontname|labelfontsize|labelhref|labeljust|labelloc|labeltarget|labeltooltip|landscape|layer|layerlistsep|layers|layerselect|layersep|layout|len|levels|levelsgap|lhead|lheight|lp|ltail|lwidth|margin|maxiter|mclimit|mindist|minlen|mode|model|mosek|nodesep|nojustify|normalize|nslimit|nslimit1|ordering|orientation|outputorder|overlap|overlap_scaling|pack|packmode|pad|page|pagedir|pencolor|penwidth|peripheries|pin|pos|quadtree|quantum|rank|rankdir|ranksep|ratio|rects|regular|remincross|repulsiveforce|resolution|root|rotate|rotation|samehead|sametail|samplepoints|scale|searchsize|sep|shape|shapefile|showboxes|sides|size|skew|smoothing|sortv|splines|start|style|stylesheet|tailurl|tail_lp|tailclip|tailhref|taillabel|tailport|tailtarget|tailtooltip|target|tooltip|truecolor|vertices|viewport|voro_margin|weight|width|xlabel|xlp|z").split("|")
);
this.$rules = {
"start" : [
{
token : "comment",
regex : /\/\/.*$/
}, {
token : "comment",
regex : /#.*$/
}, {
token : "comment", // multi line comment
merge : true,
regex : /\/\*/,
next : "comment"
}, {
token : "string",
regex : "'(?=.)",
next : "qstring"
}, {
token : "string",
regex : '"(?=.)',
next : "qqstring"
}, {
token : "constant.numeric",
regex : /[+\-]?\d+(?:(?:\.\d*)?(?:[eE][+\-]?\d+)?)?\b/
}, {
token : "keyword.operator",
regex : /\+|=|\->/
}, {
token : "punctuation.operator",
regex : /,|;/
}, {
token : "paren.lparen",
regex : /[\[{]/
}, {
token : "paren.rparen",
regex : /[\]}]/
}, {
token: "comment",
regex: /^#!.*$/
}, {
token: function(value) {
if (keywords.hasOwnProperty(value.toLowerCase())) {
return "keyword";
}
else if (attributes.hasOwnProperty(value.toLowerCase())) {
return "variable";
}
else {
return "text";
}
},
regex: "\\-?[a-zA-Z_][a-zA-Z0-9_\\-]*"
}
],
"comment" : [
{
token : "comment", // closing comment
regex : ".*?\\*\\/",
merge : true,
next : "start"
}, {
token : "comment", // comment spanning whole line
merge : true,
regex : ".+"
}
],
"qqstring" : [
{
token : "string",
regex : '[^"\\\\]+',
merge : true
}, {
token : "string",
regex : "\\\\$",
next : "qqstring",
merge : true
}, {
token : "string",
regex : '"|$',
next : "start",
merge : true
}
],
"qstring" : [
{
token : "string",
regex : "[^'\\\\]+",
merge : true
}, {
token : "string",
regex : "\\\\$",
next : "qstring",
merge : true
}, {
token : "string",
regex : "'|$",
next : "start",
merge : true
}
]
};
};
oop.inherits(DotHighlightRules, TextHighlightRules);
exports.DotHighlightRules = DotHighlightRules;
});
ace.define("ace/mode/folding/cstyle",["require","exports","module","ace/lib/oop","ace/range","ace/mode/folding/fold_mode"], function(require, exports, module) {
"use strict";
var oop = require("../../lib/oop");
var Range = require("../../range").Range;
var BaseFoldMode = require("./fold_mode").FoldMode;
var FoldMode = exports.FoldMode = function(commentRegex) {
if (commentRegex) {
this.foldingStartMarker = new RegExp(
this.foldingStartMarker.source.replace(/\|[^|]*?$/, "|" + commentRegex.start)
);
this.foldingStopMarker = new RegExp(
this.foldingStopMarker.source.replace(/\|[^|]*?$/, "|" + commentRegex.end)
);
}
};
oop.inherits(FoldMode, BaseFoldMode);
(function() {
this.foldingStartMarker = /(\{|\[)[^\}\]]*$|^\s*(\/\*)/;
this.foldingStopMarker = /^[^\[\{]*(\}|\])|^[\s\*]*(\*\/)/;
this.getFoldWidgetRange = function(session, foldStyle, row, forceMultiline) {
var line = session.getLine(row);
var match = line.match(this.foldingStartMarker);
if (match) {
var i = match.index;
if (match[1])
return this.openingBracketBlock(session, match[1], row, i);
var range = session.getCommentFoldRange(row, i + match[0].length, 1);
if (range && !range.isMultiLine()) {
if (forceMultiline) {
range = this.getSectionRange(session, row);
} else if (foldStyle != "all")
range = null;
}
return range;
}
if (foldStyle === "markbegin")
return;
var match = line.match(this.foldingStopMarker);
if (match) {
var i = match.index + match[0].length;
if (match[1])
return this.closingBracketBlock(session, match[1], row, i);
return session.getCommentFoldRange(row, i, -1);
}
};
this.getSectionRange = function(session, row) {
var line = session.getLine(row);
var startIndent = line.search(/\S/);
var startRow = row;
var startColumn = line.length;
row = row + 1;
var endRow = row;
var maxRow = session.getLength();
while (++row < maxRow) {
line = session.getLine(row);
var indent = line.search(/\S/);
if (indent === -1)
continue;
if (startIndent > indent)
break;
var subRange = this.getFoldWidgetRange(session, "all", row);
if (subRange) {
if (subRange.start.row <= startRow) {
break;
} else if (subRange.isMultiLine()) {
row = subRange.end.row;
} else if (startIndent == indent) {
break;
}
}
endRow = row;
}
return new Range(startRow, startColumn, endRow, session.getLine(endRow).length);
};
}).call(FoldMode.prototype);
});
ace.define("ace/mode/dot",["require","exports","module","ace/lib/oop","ace/mode/text","ace/mode/matching_brace_outdent","ace/mode/dot_highlight_rules","ace/mode/folding/cstyle"], function(require, exports, module) {
"use strict";
var oop = require("../lib/oop");
var TextMode = require("./text").Mode;
var MatchingBraceOutdent = require("./matching_brace_outdent").MatchingBraceOutdent;
var DotHighlightRules = require("./dot_highlight_rules").DotHighlightRules;
var DotFoldMode = require("./folding/cstyle").FoldMode;
var Mode = function() {
this.HighlightRules = DotHighlightRules;
this.$outdent = new MatchingBraceOutdent();
this.foldingRules = new DotFoldMode();
};
oop.inherits(Mode, TextMode);
(function() {
this.lineCommentStart = ["//", "#"];
this.blockComment = {start: "/*", end: "*/"};
this.getNextLineIndent = function(state, line, tab) {
var indent = this.$getIndent(line);
var tokenizedLine = this.getTokenizer().getLineTokens(line, state);
var tokens = tokenizedLine.tokens;
var endState = tokenizedLine.state;
if (tokens.length && tokens[tokens.length-1].type == "comment") {
return indent;
}
if (state == "start") {
var match = line.match(/^.*(?:\bcase\b.*\:|[\{\(\[])\s*$/);
if (match) {
indent += tab;
}
}
return indent;
};
this.checkOutdent = function(state, line, input) {
return this.$outdent.checkOutdent(line, input);
};
this.autoOutdent = function(state, doc, row) {
this.$outdent.autoOutdent(doc, row);
};
this.$id = "ace/mode/dot";
}).call(Mode.prototype);
exports.Mode = Mode;
}); | PypiClean |
/Flask-Prose-0.1.56.tar.gz/Flask-Prose-0.1.56/flask_prose/storage.py | import re
try:
from builtins import str
except ImportError:
pass
import os.path
import logging
import json
import uuid
from flask_jsontools import DynamicJSONEncoder
from sqlalchemy.ext.serializer import loads, dumps
from sqlalchemy import (
select,
desc,
func,
and_,
not_,
)
from sqlalchemy.orm import (
sessionmaker,
load_only,
aliased,
subqueryload,
joinedload
)
from sqlalchemy.exc import IntegrityError
from .models import (
Base,
Prose,
Grock,
ProseType,
Corpora,
MarkovText,
ProseCorpora
)
from .prosemaker import (
ProseMakerText,
ProseMakerSen
)
class Storage():
def __init__(self, engine=None, bind_key=None):
self._logger = logging.getLogger(__name__)
self.generate_prose_callback = self.corpora_generate_prose
self.generate_markov_callback = self.corpora_generate_markov
if engine is None:
raise ValueError('engine is required')
self._engine = engine
# __bind_key__ is a custom attribute set in the model
# it is used by wrapper extentions like flask-sqlalchemy and flask-alchy
# to bind the model to a engine connection
if bind_key:
Base.__bind_key__ = bind_key
Session = sessionmaker(bind=engine)
self._session = Session()
# the models have inherited Base, we have imported base from there.
Base.metadata.create_all(engine)
def close(self):
'''
each time session.commit() is called an implict tansaction is newly created.
this sqlalchemy behavior causes an issue with issuing DDL
commands in newer versions of mysql.
error 'Waiting for table metadata lock'
'''
self._session.close()
def prosetype_add(self, label):
if not label:
raise ValueError('label requried')
pt = self._session.query(ProseType) \
.filter(ProseType.label == label) \
.one_or_none()
if pt:
return pt.id
try:
pt = ProseType(label)
self._session.add(pt)
self._session.commit()
return pt.id
except:
self._session.rollback()
raise
def corpora_save(self, label=None, source=None, text=None):
rx = re.compile('[^\w\s\.\,\?\!\'\"]')
res = rx.sub(' ', text).strip()
if not res:
raise ValueError('text invalid.')
self._logger.debug('label:%s text:%s...', label, res[0:20])
corpora = Corpora(text=res, label=label, source=source)
try:
self._session.add(corpora)
self._session.commit()
self._logger.debug('corpora.id:%s', corpora.id)
except:
self._session.rollback()
raise
if self.generate_markov_callback:
self._logger.debug('calling generate_markov_callback:%s',
self.generate_markov_callback)
self.generate_markov_callback(corpora.id)
if self.generate_prose_callback:
self._logger.debug('calling generate_prose_callback:%s',
self.generate_prose_callback)
self.generate_prose_callback((corpora.id,))
return corpora.id
def corpora_list(self, uuid=None):
corpora = self._session.query( \
Corpora.id,
Corpora.label,
Corpora.post_date,
Corpora.source,
func.count(ProseCorpora.prose_id).label('prose'),
) \
.select_from(Corpora) \
.join(ProseCorpora, ProseCorpora.corpora_id == Corpora.id) \
.group_by(Corpora.id)
if uuid:
corpora = corpora.filter(Corpora.id == uuid)
self._logger.debug('corpora_list:%s', str(corpora))
corpora_result = corpora.all()
if corpora_result:
return json.loads(json.dumps(corpora_result, cls=DynamicJSONEncoder))
else:
return []
def corpora_delete(self, uuid):
try:
corpora = self._session.query(Corpora) \
.options(load_only('id')) \
.filter(Corpora.id == uuid).one_or_none()
if corpora:
self._session.delete(corpora)
self._session.commit()
return True
return False
except:
self._session.rollback()
raise
def generate_markov(self, callback):
self.generate_markov_callback = callback
return callback
def corpora_generate_markov(self, corpora_id):
"""
create markov sentences from corpora text
"""
try:
corpora = self._session.query(Corpora) \
.filter(Corpora.id == corpora_id) \
.one_or_none()
if not corpora:
raise Exception('corpora not found')
pm = ProseMakerText(text=corpora.text)
markovtext = [MarkovText(corpora.id, sen) for sen in pm.get_sentences()]
self._logger.debug('markovtext count:%s',len(markovtext))
self._session.bulk_save_objects(markovtext)
self._session.commit()
except:
self._session.rollback()
raise
def generate_prose(self, callback):
self.generate_prose_callback = callback
return callback
def corpora_generate_prose(self, corpora=set(), MAX_PROSE_COUNT = 5):
"""
arg:corpora = set of corpora ids
"""
MAX_SENTENCE_COUNT = 1000
corpora_result = self._session.query(Corpora) \
.filter(Corpora.id.in_(list(corpora))) \
.all()
corpora_ids = [str(x.id) for x in corpora_result]
self._logger.debug('corpora_ids:%s',corpora_ids)
mtext = self._session.query(MarkovText) \
.filter(MarkovText.corpora_id.in_(corpora_ids)) \
.filter(MarkovText.used == False) \
.order_by(func.random()) \
.limit(MAX_SENTENCE_COUNT) \
.all()
self._logger.debug('mtext:%s',len(mtext))
if not mtext:
raise Exception('MarkovText not found for corpora:{}'.format(corpora_ids))
# get the json of the markovtext model
pm = ProseMakerSen([m.json() for m in mtext])
for x in range(MAX_PROSE_COUNT):
self.insert_prose(pm, 'stanza')
for x in range(MAX_PROSE_COUNT):
self.insert_prose(pm, 'haiku')
def insert_prose(self,
prosemaker,
prose_type,
):
prosetype_id = self.prosetype_add(prose_type)
prose_json = prosemaker.get_prose(prose_type)
if not prose_json:
self._logger.warning('prosemaker generator error.')
return
self._logger.debug('insert_prose:%s', prose_json)
try:
prose = Prose(prosetype_id=prosetype_id,
title=prose_json['title'],
text=prose_json['prose'])
self._session.add(prose)
self._session.commit()
self._logger.debug('prose %s:%s', prose_type, prose.id)
# update markovtext used to True
markovtext_ids = [(x['id']) for x in prose_json['prose']]
markovtexts = self._session.query(MarkovText) \
.filter(MarkovText.id.in_(markovtext_ids)) \
.update({MarkovText.used:True}, synchronize_session='fetch')
self._session.commit()
# create the association ProseCorpora objects
corpora_ids = {(x['corpora_id']) for x in prose_json['prose']}
prose_corpora = []
prose_corpora.extend([ProseCorpora(prose.id, c_id) for c_id in corpora_ids])
self._session.bulk_save_objects(prose_corpora)
self._session.commit()
except:
self._session.rollback()
raise
def prose(self, uuid=None, corpora=()):
self._logger.debug('prose uuid:%s corpora:%s', uuid, corpora)
prose = None
if uuid:
prose = self._prose(uuid)
elif corpora:
prose = self._prose_corpora_random(corpora)
else:
prose = self._prose_fresh()
if not prose:
prose = self._prose_random()
return self._prose_data(prose)
def _prose(self, uuid):
if not uuid:
raise ValueError('uuid required.')
prose = self._session.query(Prose.id) \
.filter(Prose.id == uuid)
return prose.first()
def _prose_random(self):
"""
return random prose
"""
self._logger.debug('_prose_random')
prose = self._session.query(Prose.id) \
.order_by(func.random()) \
.limit(1)
return prose.first()
def _prose_fresh(self):
"""
left join grock with prose
filter so only unseen prose.
"""
self._logger.debug('_prose_fresh')
prose = self._session.query(Prose.id) \
.outerjoin(Grock, Grock.prose_id == Prose.id) \
.filter(Grock.id.is_(None)) \
.limit(1)
return prose.first()
def _prose_corpora_random(self, corpora):
if not corpora:
raise ValueError('corpora required.')
prose = self._session.query(Prose.id) \
.join(ProseCorpora, ProseCorpora.prose_id == Prose.id) \
.filter(ProseCorpora.corpora_id == corpora) \
.order_by(func.random()) \
.limit(1) \
.first()
return prose
def _prose_data(self, prose_id):
if not prose_id:
return {}
prose = self._session.query(Prose) \
.filter(Prose.id == prose_id[0]) \
.first()
if not prose:
raise Exception('prose not found.')
prosetype = self._session.query(ProseType) \
.filter(prose.prosetype_id == ProseType.id) \
.first()
corpora = self._session.query(Corpora) \
.join(ProseCorpora, ProseCorpora.corpora_id == Corpora.id) \
.filter(ProseCorpora.prose_id == prose.id) \
.all()
prose_data = json.loads(json.dumps(prose, cls=DynamicJSONEncoder))
prose_data['prosetype'] = json.loads(json.dumps(prosetype, cls=DynamicJSONEncoder))
prose_data['corporas'] = json.loads(json.dumps(corpora, cls=DynamicJSONEncoder))
self._grock(prose.id, reaction='saw')
return prose_data
def _grock(self, prose_id, reaction='saw'):
self._logger.debug('prose_id:%s reaction:%s', prose_id, reaction)
reactions = ['omg', 'meh', 'saw']
if reaction not in reactions:
raise ValueError("Expected one of: %s" % reactions)
try:
self._session.add(Grock(prose_id, reaction))
self._session.commit()
except:
self._session.rollback()
raise
def grock(self, prose_id, reaction='saw'):
"""
"""
prose = self._prose(prose_id)
if not prose:
raise Exception('prose not found.')
self._grock(prose.id, reaction)
def ratings(self, rate_type='omg', prose_id=None, limit=10):
"""
arg: rate_type = omg | meh
controls if the query looks for meh or omg results
arg: prose_id: just returns result for this prose id
arg: limit: always applied defaulting to 10
select prose_id, saw,omg,meh
from (
select prose_id,
count(*) as saw,
omg - meh as total,
(
select count(*)
from grock omg
where omg.reaction = 'omg'
and omg.prose_id = grock.prose_id
) as omg,
(
select count(*)
from grock meh where meh.reaction = 'meh'
and meh.prose_id = grock.prose_id
) as meh
from grock
where reaction = 'saw'
group by(prose_id)
)
where omg - meh > 0
order by (omg - meh) desc, saw desc;
"""
omg = aliased(Grock)
meh = aliased(Grock)
saw = aliased(Grock)
prose = aliased(Prose)
_query = select([saw.prose_id,
func.count().label('saw'),
select([prose.title]) \
.select_from(prose) \
.where(saw.prose_id == prose.id) \
.as_scalar() \
.label('title'),
select([func.count()]) \
.select_from(meh) \
.where(meh.reaction == 'meh') \
.where(meh.prose_id == saw.prose_id) \
.as_scalar() \
.label('meh'),
select([func.count()]) \
.select_from(omg) \
.where(omg.reaction == 'omg') \
.where(omg.prose_id == saw.prose_id) \
.as_scalar() \
.label('omg'),
]) \
.select_from(saw) \
.where(saw.reaction == 'saw') \
.group_by(saw.prose_id)
# if asking for a single prose constrain in subquery
if prose_id:
_query = _query.where(saw.prose_id == prose_id)
# postgres requires subquery AS to be valid
_query = _query.alias('xray')
query = select([_query, (_query.c.omg - _query.c.meh).label('total')])
# if asking for a single prose always return no constraint
if not prose_id and rate_type == 'omg':
query = query.where((_query.c.omg - _query.c.meh) > 0) \
.order_by((_query.c.omg - _query.c.meh).desc())
if not prose_id and rate_type == 'meh':
query = query.where((_query.c.omg - _query.c.meh) < 0) \
.order_by((_query.c.omg - _query.c.meh))
query = query.limit(limit) \
.alias('gamma')
self._logger.debug('query:%s %s', prose_id, str(query))
try:
rating_result = self._session.query(query).all()
if rating_result:
return json.loads(json.dumps(rating_result, cls=DynamicJSONEncoder))
else:
return []
except:
self._session.rollback()
raise
def ratings_all(self):
omg = self.ratings(rate_type='omg')
meh = self.ratings(rate_type='meh')
omg.extend(meh)
return omg | PypiClean |
/Nuitka_fixed-1.1.2-cp310-cp310-win_amd64.whl/nuitka/build/inline_copy/lib/scons-3.1.2/SCons/Scanner/C.py |
__revision__ = "src/engine/SCons/Scanner/C.py bee7caf9defd6e108fc2998a2520ddb36a967691 2019-12-17 02:07:09 bdeegan"
import SCons.Node.FS
import SCons.Scanner
import SCons.Util
import SCons.cpp
class SConsCPPScanner(SCons.cpp.PreProcessor):
"""
SCons-specific subclass of the cpp.py module's processing.
We subclass this so that: 1) we can deal with files represented
by Nodes, not strings; 2) we can keep track of the files that are
missing.
"""
def __init__(self, *args, **kw):
SCons.cpp.PreProcessor.__init__(self, *args, **kw)
self.missing = []
def initialize_result(self, fname):
self.result = SCons.Util.UniqueList([fname])
def finalize_result(self, fname):
return self.result[1:]
def find_include_file(self, t):
keyword, quote, fname = t
result = SCons.Node.FS.find_file(fname, self.searchpath[quote])
if not result:
self.missing.append((fname, self.current_file))
return result
def read_file(self, file):
try:
with open(str(file.rfile())) as fp:
return fp.read()
except EnvironmentError as e:
self.missing.append((file, self.current_file))
return ''
def dictify_CPPDEFINES(env):
cppdefines = env.get('CPPDEFINES', {})
if cppdefines is None:
return {}
if SCons.Util.is_Sequence(cppdefines):
result = {}
for c in cppdefines:
if SCons.Util.is_Sequence(c):
result[c[0]] = c[1]
else:
result[c] = None
return result
if not SCons.Util.is_Dict(cppdefines):
return {cppdefines : None}
return cppdefines
class SConsCPPScannerWrapper(object):
"""
The SCons wrapper around a cpp.py scanner.
This is the actual glue between the calling conventions of generic
SCons scanners, and the (subclass of) cpp.py class that knows how
to look for #include lines with reasonably real C-preprocessor-like
evaluation of #if/#ifdef/#else/#elif lines.
"""
def __init__(self, name, variable):
self.name = name
self.path = SCons.Scanner.FindPathDirs(variable)
def __call__(self, node, env, path = ()):
cpp = SConsCPPScanner(current = node.get_dir(),
cpppath = path,
dict = dictify_CPPDEFINES(env))
result = cpp(node)
for included, includer in cpp.missing:
fmt = "No dependency generated for file: %s (included from: %s) -- file not found"
SCons.Warnings.warn(SCons.Warnings.DependencyWarning,
fmt % (included, includer))
return result
def recurse_nodes(self, nodes):
return nodes
def select(self, node):
return self
def CScanner():
"""Return a prototype Scanner instance for scanning source files
that use the C pre-processor"""
# Here's how we would (or might) use the CPP scanner code above that
# knows how to evaluate #if/#ifdef/#else/#elif lines when searching
# for #includes. This is commented out for now until we add the
# right configurability to let users pick between the scanners.
#return SConsCPPScannerWrapper("CScanner", "CPPPATH")
cs = SCons.Scanner.ClassicCPP("CScanner",
"$CPPSUFFIXES",
"CPPPATH",
'^[ \t]*#[ \t]*(?:include|import)[ \t]*(<|")([^>"]+)(>|")')
return cs
# Local Variables:
# tab-width:4
# indent-tabs-mode:nil
# End:
# vim: set expandtab tabstop=4 shiftwidth=4: | PypiClean |
/GhettoRecorder-3.0-py3-none-any.whl/ghettorecorder/__main__.py | import os
import time
import json
import socket
import threading
from http.server import BaseHTTPRequestHandler, HTTPServer
import ghettorecorder.cmd as cmd
import ghettorecorder.ghetto_utils as utils
import ghettorecorder.ghetto_procenv as procenv
from ghettorecorder.cmd import entry # instance [GLOBAL] [STATIONS] ini sections
from ghettorecorder.ghetto_api import ghettoApi
dir_name = os.path.dirname(__file__)
class Helper:
def __init__(self):
self.content_type = None
self.server_shutdown = False
helper = Helper()
class Handler(BaseHTTPRequestHandler):
def do_POST(self):
"""Ajax SENDs id or name strings.
self.path is independent of post and get
"""
if self.path == '/radio_btn_id':
self.post_switch_radio()
elif self.path == '/title_get':
self.post_title_get()
elif self.path == '/write_config_file':
self.post_write_config_file()
elif self.path == '/get_config_file':
self.post_get_config_file()
elif self.path == '/write_blacklist_file':
self.post_write_blacklist_file()
elif self.path == '/get_blacklist_file':
self.post_get_blacklist_file()
elif self.path == '/server_shutdown':
self.post_server_shutdown()
elif self.path == '/wait_shutdown':
self.post_wait_shutdown()
else:
self.send_error(404, '[POST] wrong endpoint /<endpoint_name>')
def do_GET(self):
if self.path == '/':
self.get_index_html()
elif '/sound/' in self.path:
radio = self.path[7:] # skip 7 chars, read string to end
self.get_sound(radio)
elif '/shutdown/' in self.path:
radio = self.path[10:]
self.get_shutdown(radio)
elif '/static/js/' in self.path:
js = self.path[11:]
self.get_js(js)
elif self.path == '/static/css/style.css':
self.get_style_css()
elif '/static/images/' in self.path:
img = self.path[15:]
self.get_image(img)
else:
self.send_error(404, '[GET] wrong endpoint /<endpoint_name>')
def post_wait_shutdown(self):
"""JS has ajax timeout."""
self.data_string_get()
dct = wait_shutdown()
self.data_json_send(dct)
def post_server_shutdown(self):
"""Shutdown is fast now.
Must send answer before action.
"""
self.data_string_get()
self.data_json_send({'server_shutdown': ' recorder_shutdown_init'})
server_shutdown()
def post_get_blacklist_file(self):
"""blacklist to browser"""
self.data_string_get()
dct = read_blacklist_file()
self.data_json_send(dct)
def post_write_blacklist_file(self):
"""Write changes made by browser to blacklist."""
file_content = self.data_string_get()
dct = write_blacklist_file(file_content.decode('utf-8'))
self.data_json_send(dct)
def post_get_config_file(self):
"""settings.int to browser"""
self.data_string_get()
dct = read_config_file()
self.data_json_send(dct)
def post_write_config_file(self):
"""Write changes to settings.ini."""
file_content = self.data_string_get()
dct = write_config_file(file_content.decode('utf-8'))
self.data_json_send(dct)
def post_title_get(self):
"""data_string_get contains name of radio we want to check for new title. {'title': new_title}"""
active_radio_name = self.data_string_get()
dct = radio_title_get(active_radio_name.decode('utf-8'))
self.data_json_send(dct)
def post_switch_radio(self):
"""data_string_get contains name of radio we want to switch online.
Contains Zero int '0' if first call. We disable cover to enable audio, browser demands this step.
"""
radio_name = self.data_string_get()
dct = switch_local_buffer(radio_name.decode('utf-8'))
self.data_json_send(dct)
def data_json_send(self, data):
"""Send a dictionary here.
| First key can be identifier for ajax to validate correct delivery. {'foo_transfer': null, 'bar': 'fake_news'}
| if (!data.foo_transfer) {return;}
"""
json_string = json.dumps(data)
self.send_header('Content-Type', 'application/json')
self.send_header('Access-Control-Allow-Origin', '*')
self.end_headers()
try:
self.wfile.write(bytes(json_string, "utf-8"))
except OSError: # browser dropped connection, supress broken pipe error
pass
def data_string_get(self):
"""Read the binary content of request."""
length = int(self.headers.get_all('content-length')[0])
data_string = self.rfile.read(length)
self.send_response(200)
return data_string
def get_js(self, js):
"""Browser reads index.html line by line. We send JavaScript content (link or src) to browser."""
self.send_response(200)
self.send_header('Content-type', 'text/javascript')
self.end_headers()
with open(os.path.join(dir_name, 'static', 'js', js), 'r', encoding='utf-8') as f:
txt = f.read()
self.wfile.write(bytes(txt, "utf-8"))
def get_style_css(self):
"""Browser reads index.html. Send Style Sheet to browser."""
self.send_response(200)
self.send_header('Content-type', 'text/css')
self.end_headers()
with open(os.path.join(dir_name, 'static', 'css', 'style.css'), 'r', encoding='utf-8') as f:
txt = f.read()
self.wfile.write(bytes(txt, "utf-8"))
def get_image(self, img):
"""Image to browser."""
self.send_response(200)
self.send_header('Content-type', 'image/svg+xml')
self.end_headers()
with open(os.path.join(dir_name, 'static', 'images', img), 'r', encoding='utf-8') as f:
txt = f.read()
self.wfile.write(bytes(txt, "utf-8"))
def get_shutdown(self, radio):
"""Radio instance shutdown and removal from dict."""
self.send_response(200)
self.end_headers()
procenv.del_radio_instance(radio)
def get_sound(self, radio=None):
"""The browser audio element (net client) auto connects /sound and is served here, no json return
| We stuck here in a loop and THIS Handler Thread, module, is not able to respond to other requests.
| Absorb errors from JS, minus sign in front of radio is stop radio button id
| first char is minus if stop radio button, btn id is -radio name
| None and empty on whatever
:returns: Nothing, on error None;
"""
if radio is None or radio == '' or radio[0:1] == '-':
return
self.get_send_header(helper.content_type)
timeout = 5 # absorb minor network outages
start = time.perf_counter()
while 1:
if radio in ghettoApi.radio_inst_dict.keys():
audio_out_q = ghettoApi.radio_inst_dict[radio].audio_out
if not audio_out_q:
break
if not audio_out_q.empty():
start = time.perf_counter() # reset
try:
self.wfile.write(audio_out_q.get())
except OSError: # browser dropped connection, supress broken pipe error
while not audio_out_q.empty():
audio_out_q.get()
audio_out_q.cancel_join_thread() # py q feeder thread, q content is already removed
break
idle = round((time.perf_counter() - start))
if helper.server_shutdown or idle >= timeout:
print(f'\tGhetto HTTP Handler - release connection {radio}') # thread is no more locked and can go down
break
time.sleep(.1)
@staticmethod
def generate_index_html():
"""Generate page line by line. We can change content if keyword string is found."""
with open(os.path.join(dir_name, 'index.html'), 'r', encoding='utf-8') as f:
while 1:
line = f.readline()
if line == '':
break
yield line
def get_index_html(self):
"""First call, we build the page. That's all.
Button press on page will ajax 'do_POST' and update page.
Ajax feed radio name, 'do_POST' calls py func and updates page.
Java has to update the audio control element with new source URL (ghetto_simple stream srv on port 124....).
:params: _o__radio_names____: write two radio buttons for a radio, stop (-) radio and run with to listen radio
"""
self.get_send_header('text/html')
generator = self.generate_index_html()
while 1:
try:
next_line = next(generator)
if '_o__gr_sky____' in next_line:
next_line = f"<img src='data:image/svg+xml;base64,{convert_img('gr_sky.svg')}'/>"
if '_o__gr_basket____' in next_line:
next_line = f"<img src='data:image/svg+xml;base64,{convert_img('gr_sky_basket.svg')}'/>"
if '_o__radio_names____' in next_line:
self.wfile.write(bytes("<div>stop 🌺 listen</div>", "utf-8"))
for radio_name in entry.config_file_radio_url_dict.keys():
radio_names_line = f"<div class='divRadioBtn' id='div{radio_name}'>" \
"<label><input type='radio' name='da' " \
f"id='-{radio_name}' onclick=ajax_switch_radio(id)></label> " \
"<label><input type='radio' name='da' " \
f"id='{radio_name}' onclick=ajax_switch_radio(id)>{radio_name}</label>" \
"</div> "
self.wfile.write(bytes(radio_names_line, "utf-8"))
continue
except StopIteration: # last line already send, break in get_content()
break
self.wfile.write(bytes(next_line, "utf-8"))
def get_send_header(self, content_type):
"""Send header with Access control tag."""
self.send_response(200)
self.send_header('Access-Control-Allow-Origin', '*') # absolute essential for using gain and analyzer
self.send_header('Cache-Control', 'no-cache, no-store') # absolute essential to not replay old saved stuff
self.send_header('Content-type', content_type)
self.end_headers()
def wait_shutdown():
"""Return a string for ajax to show we are still alive after shutdown command."""
return {'wait_shutdown': 'alive'}
def server_shutdown():
"""Shutdown all radio instances command line style and tell server to shut down."""
cmd.shutdown()
helper.server_shutdown = True
def radio_title_get(radio):
"""Active radio interval title request."""
title = procenv.radio_attribute_get(radio=radio, attribute='new_title')
return {'title': title}
def switch_local_buffer(radio):
"""Radio checked if exists.
Server checked by port number add digit (radio index in radio list) if exists already.
"""
helper.content_type = None
is_alive = True
if radio == '0': # disable cover div
radio_instance_lst = procenv.radio_instances_get()
elif radio[:1] == '-': # del radio, name has a leading minus
procenv.del_radio_instance(radio[1:])
radio_instance_lst = procenv.radio_instances_get()
else: # add radio
url = entry.config_file_radio_url_dict[radio]
is_alive = procenv.radio_instance_create(radio, url, **entry.__dict__)
radio_instance_lst = procenv.radio_instances_get()
rv_dct = procenv.user_display_dict_get(radio)
helper.content_type = rv_dct['content']
rv_dct['recorder'] = radio_instance_lst
rv_dct['server_port'] = server_port
if not is_alive:
print(f'----------- {radio} fail -------------')
rv_dct['content'] = 'no_response' # ajax knows an error
return rv_dct
def start_radio_if_off(name, url):
"""feed content type to helper instance.
create, fire and forget if error
:returns: list of started radio instances names
"""
is_online = procenv.radio_instance_create(name, url)
active_radios_lst = procenv.radio_instances_get()
if is_online:
helper.content_type = procenv.radio_attribute_get(name, 'content_type')
return active_radios_lst if is_online else False
def convert_img(file_name):
"""Base64 string converter.
Remnant of first attempt to generate the page only from a python list, no file system.
Still used.
"""
file_path = os.path.join(dir_name, 'static', 'images', file_name)
base_64_str = utils.convert_ascii(file_path)
return base_64_str
def read_config_file():
"""Ajax send content of config file settings.ini.
"""
file = entry.config_name
folder = entry.config_dir # changed from entry.dir_name
conf_path = os.path.join(folder, file)
with open(conf_path, 'r', encoding='utf-8') as reader:
file_cont = reader.read()
return {'get_config_file': file_cont, 'path': conf_path}
def write_config_file(file_content):
"""entry.config_dir is either our package folder or
container folder.
"""
file = entry.config_name
folder = entry.config_dir
conf_path = os.path.join(folder, file)
with open(conf_path, 'w', encoding='utf-8') as writer:
writer.write(file_content)
return {'write_config_file': 'Done: ' + str(time.ctime())}
def read_blacklist_file():
"""Ajax send content of config file settings.ini"""
file = entry.blacklist_name
folder = entry.config_dir # changed from radios_parent_dir to config_dir, keep conf and blist together
file_path = os.path.join(folder, file)
with open(file_path, 'r', encoding='utf-8') as reader:
file_cont = reader.read()
return {'get_blacklist_file': file_cont, 'path': file_path}
def write_blacklist_file(file_content):
""""""
file = entry.blacklist_name
folder = entry.config_dir
file_path = os.path.join(folder, file)
with open(file_path, 'w', encoding='utf-8') as writer:
writer.write(file_content)
return {'write_blacklist_file': 'Done: ' + str(time.ctime())}
# Create ONE socket.
server_port = 1242
addr = ('', server_port)
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(addr)
sock.listen(5)
# Launch listener threads.
class Thread(threading.Thread):
def __init__(self):
threading.Thread.__init__(self)
self.daemon = True # self kill on prog exit
self.start()
def run(self):
httpd = HTTPServer(addr, Handler, False)
# Prevent the HTTP server from re-binding every handler.
# https://stackoverflow.com/questions/46210672/
httpd.socket = sock
httpd.server_bind = self.server_close = lambda self: None
httpd.serve_forever()
def main():
"""
| Need more than one thread to not get blocked on serving one stream and answer requests.
| 1st thread accept request and serves endless stream as listen connection.
| 2nd thread accept request, browser drops connection -> 1st thread exit, 2nd thread start stream.
| 3rd thread is for an unknown blocking error. Proceed with normal operation.
:methods: run_ghetto: same function call as command line, but skip input() loops
"""
cmd.run_ghetto(frontend=True)
[Thread() for _ in range(3)] # all on same port, means if range(2) one can connect 2 browser tabs = 2 connections
print(f"\n\tUser Interface at " + f"http://localhost:{server_port}/\n")
while 1: # keep the show running until ajax sends shutdown command
time.sleep(1)
if helper.server_shutdown:
ghettoApi.blacklist.stop_blacklist_writer = True
break # Process finished with exit code 0, if all threads are down
if __name__ == '__main__':
main() | PypiClean |
/KataRangesPack-0.1-py3-none-any.whl/KataRangePack/__init__.py | class RangeClass:
def __init__(self, range):
self.range = range
def split(self):
firstInterval = self.range[0]
firstNo = self.range[1:self.range.find(',')]
secNo = self.range[self.range.find(',') + 1: -1]
secInterval = self.range[-1]
return [firstInterval, firstNo, secNo, secInterval]
def range_validation(self):
valid1 = (self.split()[0] == '(' or self.split()[0] == '[')
valid2 = (self.split()[3] == ')' or self.split()[3] == ']')
valid3 = (self.split()[1].isdigit() and self.split()[2].isdigit())
valid4 = (int(self.split()[1]) <= int(self.split()[2]))
if(valid1 == True and valid2 == True and valid3 == True and valid4 == True):
return True
else:
return False
def LenghtRange(self):
L1 = int(self.split()[1])
L2 = int(self.split()[2])
if(self.split()[0] == '('):
L1 += 1
if(self.split()[3] == ']'):
L2 += 1
return range(L1, L2)
def EndPoints(self):
L1 = int(self.split()[1])
L2 = int(self.split()[2])
if(self.split()[0] == '('):
L1 += 1
if(self.split()[3] == ']'):
L2 += 1
return "{" + str(L1) + "," + str(L2 - 1) + "}"
def allPoints(self):
result = "{"
var = self.LenghtRange()
for i in var:
if i < int(self.split()[2]) - 1 :
result += str(i) + ","
else:
result += str(i)
result += '}'
return result
def Equals(self, other: 'RangeClass'):
var1 = self.LenghtRange()
var2 = other.LenghtRange()
if(var1 == var2):
return True
else:
return False
def overlapsRange(self, other: 'RangeClass'):
choice = False
var1 = self.LenghtRange()
var2 = other.LenghtRange()
for i in var1:
for j in var2:
if(i == j):
choice = True
return choice | PypiClean |
/DPark-0.5.0.tar.gz/DPark-0.5.0/dpark/broadcast.py | from __future__ import absolute_import
import os
import zmq
import uuid as uuid_pkg
import time
import binascii
import random
import socket
import struct
import marshal
import mmap
from multiprocessing import Manager, Condition
from mmap import ACCESS_WRITE, ACCESS_READ
from dpark.utils.log import get_logger
from dpark.utils import compress, decompress, spawn, mkdir_p
from dpark.cache import Cache
from dpark.serialize import marshalable
from dpark.env import env
import six
from six.moves import range, map, cPickle
try:
from itertools import izip
except ImportError:
izip = zip
logger = get_logger(__name__)
MARSHAL_TYPE, PICKLE_TYPE = list(range(2))
BLOCK_SHIFT = 20
BLOCK_SIZE = 1 << BLOCK_SHIFT
GUIDE_ADDR = 'NewBroadcastGuideAddr'
DOWNLOAD_ADDR = 'NewDownloadAddr'
BATCHED_BLOCKS = 3
GUIDE_STOP, GUIDE_GET_SOURCES, GUIDE_SET_SOURCES, GUIDE_REPORT_BAD = list(range(4))
SERVER_STOP, SERVER_FETCH, SERVER_FETCH_FAIL, SERVER_FETCH_OK, \
DATA_GET, DATA_GET_OK, DATA_GET_FAIL, DATA_DOWNLOADING, SERVER_CLEAR_ITEM = list(range(9))
class GuideManager(object):
def __init__(self):
self._started = False
self.guides = {}
self.host = socket.gethostname()
self.guide_thread = None
self.guide_addr = None
self.register_addr = {}
self.ctx = zmq.Context()
def start(self):
if self._started:
return
self._started = True
self.guide_thread = self.start_guide()
env.register(GUIDE_ADDR, self.guide_addr)
def start_guide(self):
sock = self.ctx.socket(zmq.REP)
port = sock.bind_to_random_port('tcp://0.0.0.0')
self.guide_addr = 'tcp://%s:%d' % (self.host, port)
def run():
logger.debug("guide start at %s", self.guide_addr)
while self._started:
if not sock.poll(1000, zmq.POLLIN):
continue
type_, msg = sock.recv_pyobj()
if type_ == GUIDE_STOP:
sock.send_pyobj(0)
break
elif type_ == GUIDE_GET_SOURCES:
uuid = msg
sources = None
if uuid in self.guides:
sources = self.guides[uuid]
else:
logger.warning('uuid %s NOT REGISTERED in guide server', uuid)
sock.send_pyobj(sources)
elif type_ == GUIDE_SET_SOURCES:
uuid, addr, bitmap = msg
if any(bitmap):
sources = None
if uuid in self.guides:
sources = self.guides[uuid]
if sources:
sources[addr] = bitmap
else:
self.guides[uuid] = {addr: bitmap}
self.register_addr[uuid] = addr
sock.send_pyobj(None)
elif type_ == GUIDE_REPORT_BAD:
uuid, addr = msg
sources = self.guides[uuid]
if addr in sources:
if addr != self.register_addr[uuid]:
del sources[addr]
else:
logger.warning('The addr %s to delete is the register Quit!!!', addr)
sock.send_pyobj(None)
else:
logger.error('Unknown guide message: %s %s', type_, msg)
sock.send_pyobj(None)
return spawn(run)
def shutdown(self):
if not self._started:
return
self._started = False
if self.guide_thread and self.guide_addr. \
startswith('tcp://%s:' % socket.gethostname()):
self.guide_thread.join(timeout=1)
if self.guide_thread.is_alive():
logger.warning("guide_thread not stopped.")
self.guide_addr = None
def check_memory(location):
try:
import psutil
pid = os.getpid()
p = psutil.Process(pid)
rss = p.memory_info().rss >> 20
logger.info('memory rss %d MB in host %s at ',
rss, socket.gethostname(), location)
except ImportError:
logger.warning('import psutil failed')
def decide_dir(work_dirs):
return work_dirs[-1]
def gen_broadcast_path(work_dirs, uuid):
work_dir = decide_dir(work_dirs)
broadcast_dir = os.path.join(work_dir, 'broadcast')
mkdir_p(broadcast_dir)
uuid_path = '%s_%d' % (uuid, os.getpid())
broadcast_path = os.path.join(broadcast_dir, uuid_path)
return broadcast_path
class DownloadManager(object):
def __init__(self):
self._started = False
self.server_thread = None
self.download_threads = {}
self.uuid_state_dict = None
self.uuid_map_dict = None
self.guide_addr = None
self.server_addr = None
self.host = None
self.ctx = None
self.random_inst = None
self.work_dirs = []
self.master_broadcast_blocks = {}
def start(self):
if self._started:
return
self.manager = manager = Manager()
self.shared_uuid_fn_dict = manager.dict()
self.shared_uuid_map_dict = manager.dict()
self.shared_master_blocks = manager.dict()
self.download_cond = Condition()
self._started = True
self.ctx = zmq.Context()
self.host = socket.gethostname()
if GUIDE_ADDR not in env.environ:
start_guide_manager()
self.guide_addr = env.get(GUIDE_ADDR)
self.random_inst = random.SystemRandom()
self.server_addr, self.server_thread = self.start_server()
self.uuid_state_dict = {}
self.uuid_map_dict = {}
self.work_dirs = env.get('WORKDIR')
self.master_broadcast_blocks = {}
env.register(DOWNLOAD_ADDR, self.server_addr)
def start_server(self):
sock = self.ctx.socket(zmq.REP)
sock.setsockopt(zmq.LINGER, 0)
port = sock.bind_to_random_port("tcp://0.0.0.0")
server_addr = 'tcp://%s:%d' % (self.host, port)
guide_sock = self.ctx.socket(zmq.REQ)
guide_sock.setsockopt(zmq.LINGER, 0)
guide_sock.connect(self.guide_addr)
def run():
logger.debug("server started at %s", server_addr)
while self._started:
if not sock.poll(1000, zmq.POLLIN):
continue
type_, msg = sock.recv_pyobj()
logger.debug('server recv: %s %s', type_, msg)
if type_ == SERVER_STOP:
sock.send_pyobj(None)
break
elif type_ == SERVER_FETCH:
uuid, indices, client_addr = msg
if uuid in self.master_broadcast_blocks:
block_num = len(self.master_broadcast_blocks[uuid])
bls = []
for index in indices:
if index >= block_num:
logger.warning('input index too big %s for '
'len of blocks %d from host %s',
str(indices), block_num, client_addr)
sock.send_pyobj((SERVER_FETCH_FAIL, None))
else:
bls.append(self.master_broadcast_blocks[uuid][index])
sock.send_pyobj((SERVER_FETCH_OK, (indices, bls)))
elif uuid in self.uuid_state_dict:
fd = os.open(self.uuid_state_dict[uuid][0], os.O_RDONLY)
mmfp = mmap.mmap(fd, 0, access=ACCESS_READ)
os.close(fd)
bitmap = self.uuid_map_dict[uuid]
block_num = len(bitmap)
bls = []
for index in indices:
if index >= block_num:
logger.warning('input index too big %s for '
'len of blocks %d from host %s',
str(indices), block_num, client_addr)
sock.send_pyobj((SERVER_FETCH_FAIL, None))
else:
mmfp.seek(bitmap[index][0])
block = mmfp.read(bitmap[index][1])
bls.append(block)
mmfp.close()
sock.send_pyobj((SERVER_FETCH_OK, (indices, bls)))
else:
logger.warning('server fetch failed for uuid %s '
'not exists in server %s from host %s',
uuid, socket.gethostname(), client_addr)
sock.send_pyobj((SERVER_FETCH_FAIL, None))
elif type_ == DATA_GET:
uuid, compressed_size = msg
if uuid not in self.uuid_state_dict or not self.uuid_state_dict[uuid][1]:
if uuid not in self.download_threads:
sources = self._get_sources(uuid, guide_sock)
if not sources:
logger.warning('get sources from guide server failed in host %s',
socket.gethostname())
sock.send_pyobj(DATA_GET_FAIL)
continue
self.download_threads[uuid] = spawn(self._download_blocks,
*[sources, uuid, compressed_size])
sock.send_pyobj(DATA_DOWNLOADING)
else:
sock.send_pyobj(DATA_DOWNLOADING)
else:
sock.send_pyobj(DATA_GET_OK)
elif type_ == SERVER_CLEAR_ITEM:
uuid = msg
self.clear(uuid)
sock.send_pyobj(None)
else:
logger.error('Unknown server message: %s %s', type_, msg)
sock.send_pyobj(None)
sock.close()
logger.debug("stop Broadcast server %s", server_addr)
for uuid in list(self.uuid_state_dict.keys()):
self.clear(uuid)
return server_addr, spawn(run)
def get_blocks(self, uuid):
if uuid in self.master_broadcast_blocks:
return self.master_broadcast_blocks[uuid]
if uuid in self.shared_master_blocks:
return self.shared_master_blocks[uuid]
def register_blocks(self, uuid, blocks):
if uuid in self.master_broadcast_blocks:
logger.warning('the block uuid %s exists in dict', uuid)
return
self.master_broadcast_blocks[uuid] = blocks
self.shared_master_blocks[uuid] = blocks
def _get_sources(self, uuid, source_sock):
try:
source_sock.send_pyobj((GUIDE_GET_SOURCES,
uuid))
sources = source_sock.recv_pyobj()
except:
logger.warning('GET sources failed for addr %s with ZMQ ERR',
self.server_addr)
sources = {}
return sources
def _update_sources(self, uuid, bitmap, source_sock):
try:
source_sock.send_pyobj((GUIDE_SET_SOURCES,
(uuid, self.server_addr, bitmap)))
source_sock.recv_pyobj()
except:
pass
def _download_blocks(self, sources, uuid, compressed_size):
block_num = 0
bitmap = [0]
write_mmap_handler = None
download_guide_sock = self.ctx.socket(zmq.REQ)
download_guide_sock.setsockopt(zmq.LINGER, 0)
download_guide_sock.connect(self.guide_addr)
def _report_bad(addr):
logger.debug('fetch blocks failed from server %s', addr)
download_guide_sock.send_pyobj((GUIDE_REPORT_BAD, (uuid, addr)))
download_guide_sock.recv_pyobj()
def _fetch(addr, indices, bit_map):
sock = self.ctx.socket(zmq.REQ)
try:
sock.setsockopt(zmq.LINGER, 0)
sock.connect(addr)
sock.send_pyobj((SERVER_FETCH, (uuid, indices, self.server_addr)))
avail = sock.poll(1 * 1000, zmq.POLLIN)
check_sock = None
if not avail:
try:
check_sock = socket.socket()
addr_list = addr[len('tcp://'):].split(':')
addr_list[1] = int(addr_list[1])
check_sock.connect(tuple(addr_list))
except Exception as e:
logger.warning('connect the addr %s failed with exception %s',
addr, e)
_report_bad(addr)
else:
logger.debug("%s recv broadcast %s from %s timeout",
self.server_addr, str(indices), addr)
finally:
if check_sock:
check_sock.close()
return
result, msg = sock.recv_pyobj()
if result == SERVER_FETCH_FAIL:
_report_bad(addr)
return
if result == SERVER_FETCH_OK:
indices, blocks = msg
for rank, index in enumerate(indices):
if blocks[rank] is not None:
write_mmap_handler.seek(bit_map[index][0])
write_mmap_handler.write(blocks[rank])
bitmap[index] = bit_map[index]
else:
raise RuntimeError('Unknown server response: %s %s' % (result, msg))
finally:
sock.close()
final_path = gen_broadcast_path(self.work_dirs, uuid)
self.uuid_state_dict[uuid] = final_path, False
fp = open(final_path, 'wb')
fp.truncate(compressed_size)
fp.close()
fd = os.open(final_path, os.O_RDWR)
write_mmap_handler = mmap.mmap(fd, 0,
access=ACCESS_WRITE)
os.close(fd)
while not all(bitmap):
remote = []
for _addr, _bitmap in six.iteritems(sources):
if block_num == 0:
block_num = len(_bitmap)
bitmap = [0] * block_num
self.uuid_map_dict[uuid] = bitmap
if not _addr.startswith('tcp://%s:' % self.host):
remote.append((_addr, _bitmap))
self.random_inst.shuffle(remote)
for _addr, _bitmap in remote:
_indices = [i for i in range(block_num) if not bitmap[i] and _bitmap[i]]
if _indices:
self.random_inst.shuffle(_indices)
_fetch(_addr, _indices[:BATCHED_BLOCKS], _bitmap)
self._update_sources(uuid, bitmap, download_guide_sock)
sources = self._get_sources(uuid, download_guide_sock)
write_mmap_handler.flush()
write_mmap_handler.close()
self.shared_uuid_map_dict[uuid] = bitmap
self.shared_uuid_fn_dict[uuid] = self.uuid_state_dict[uuid][0]
self.uuid_state_dict[uuid] = self.uuid_state_dict[uuid][0], True
download_guide_sock.close()
with self.download_cond:
self.download_cond.notify_all()
def clear(self, uuid):
if uuid in self.master_broadcast_blocks:
del self.master_broadcast_blocks[uuid]
del self.shared_master_blocks[uuid]
if uuid in self.uuid_state_dict:
del self.uuid_state_dict[uuid]
if uuid in self.shared_uuid_fn_dict:
del self.shared_uuid_fn_dict[uuid]
del self.shared_uuid_map_dict[uuid]
def shutdown(self):
if not self._started:
return
self._started = False
if self.server_thread and self.server_addr. \
startswith('tcp://%s:' % socket.gethostname()):
for _, th in six.iteritems(self.download_threads):
th.join(timeout=0.1) # only in executor, not needed
self.server_thread.join(timeout=1)
if self.server_thread.is_alive():
logger.warning("Download mananger server_thread not stopped.")
self.manager.shutdown() # shutdown will try join and terminate server process
def accumulate_list(l):
acc = 0
acc_l = []
for item in l:
acc_l.append(acc)
acc += item
acc_l.append(acc)
return acc_l
class BroadcastManager(object):
header_fmt = '>BI'
header_len = struct.calcsize(header_fmt)
def __init__(self):
self._started = False
self.guide_addr = None
self.download_addr = None
self.cache = None
self.shared_uuid_fn_dict = None
self.shared_uuid_map_dict = None
self.download_cond = None
self.ctx = None
def start(self):
if self._started:
return
self._started = True
start_download_manager()
self.guide_addr = env.get(GUIDE_ADDR)
self.download_addr = env.get(DOWNLOAD_ADDR)
self.cache = Cache()
self.ctx = zmq.Context()
self.shared_uuid_fn_dict = _download_manager.shared_uuid_fn_dict
self.shared_uuid_map_dict = _download_manager.shared_uuid_map_dict
self.download_cond = _download_manager.download_cond
def register(self, uuid, value):
self.start()
if uuid in self.shared_uuid_fn_dict:
raise RuntimeError('broadcast %s has already registered' % uuid)
blocks, size, block_map = self.to_blocks(uuid, value)
_download_manager.register_blocks(uuid, blocks)
self._update_sources(uuid, block_map)
self.cache.put(uuid, value)
return size
def _update_sources(self, uuid, bitmap):
guide_sock = self.ctx.socket(zmq.REQ)
try:
guide_sock.setsockopt(zmq.LINGER, 0)
guide_sock.connect(self.guide_addr)
guide_sock.send_pyobj((GUIDE_SET_SOURCES,
(uuid, self.download_addr, bitmap)))
guide_sock.recv_pyobj()
finally:
guide_sock.close()
def clear(self, uuid):
assert self._started
self.cache.put(uuid, None)
sock = self.ctx.socket(zmq.REQ)
sock.connect(self.download_addr)
sock.send_pyobj((SERVER_CLEAR_ITEM, uuid))
sock.recv_pyobj()
sock.close()
def fetch(self, uuid, compressed_size):
start_download_manager()
self.start()
value = self.cache.get(uuid)
if value is not None:
return value
blocks = _download_manager.get_blocks(uuid)
if blocks is None:
blocks = self.fetch_blocks(uuid, compressed_size)
value = self.from_blocks(uuid, blocks)
return value
@staticmethod
def _get_blocks_by_filename(file_name, block_map):
fp = open(file_name, 'rb')
buf = fp.read()
blocks = [buf[offset: offset + size] for offset, size in block_map]
fp.close()
return blocks
def fetch_blocks(self, uuid, compressed_size):
if uuid in self.shared_uuid_fn_dict:
return self._get_blocks_by_filename(self.shared_uuid_fn_dict[uuid],
self.shared_uuid_map_dict[uuid])
download_sock = self.ctx.socket(zmq.REQ)
download_sock.connect(self.download_addr)
download_sock.send_pyobj((DATA_GET,
(uuid, compressed_size)))
res = download_sock.recv_pyobj()
if res == DATA_GET_OK:
return self._get_blocks_by_filename(self.shared_uuid_fn_dict[uuid],
self.shared_uuid_map_dict[uuid])
if res == DATA_GET_FAIL:
raise RuntimeError('Data GET failed for uuid:%s' % uuid)
while True:
with self.download_cond:
if uuid not in self.shared_uuid_fn_dict:
self.download_cond.wait()
else:
break
if uuid in self.shared_uuid_fn_dict:
return self._get_blocks_by_filename(self.shared_uuid_fn_dict[uuid],
self.shared_uuid_map_dict[uuid])
else:
raise RuntimeError('get blocks failed')
def to_blocks(self, uuid, obj):
try:
if marshalable(obj):
buf = marshal.dumps((uuid, obj))
type_ = MARSHAL_TYPE
else:
buf = cPickle.dumps((uuid, obj), -1)
type_ = PICKLE_TYPE
except Exception:
buf = cPickle.dumps((uuid, obj), -1)
type_ = PICKLE_TYPE
checksum = binascii.crc32(buf) & 0xFFFF
stream = struct.pack(self.header_fmt, type_, checksum) + buf
blockNum = (len(stream) + (BLOCK_SIZE - 1)) >> BLOCK_SHIFT
blocks = [compress(stream[i * BLOCK_SIZE:(i + 1) * BLOCK_SIZE]) for i in range(blockNum)]
sizes = [len(block) for block in blocks]
size_l = accumulate_list(sizes)
block_map = list(izip(size_l[:-1], sizes))
return blocks, size_l[-1], block_map
def from_blocks(self, uuid, blocks):
stream = b''.join(map(decompress, blocks))
type_, checksum = struct.unpack(self.header_fmt, stream[:self.header_len])
buf = stream[self.header_len:]
_checksum = binascii.crc32(buf) & 0xFFFF
if _checksum != checksum:
raise RuntimeError('Wrong blocks: checksum: %s, expected: %s' % (
_checksum, checksum))
if type_ == MARSHAL_TYPE:
_uuid, value = marshal.loads(buf)
elif type_ == PICKLE_TYPE:
_uuid, value = cPickle.loads(buf)
else:
raise RuntimeError('Unknown serialization type: %s' % type_)
if uuid != _uuid:
raise RuntimeError('Wrong blocks: uuid: %s, expected: %s' % (_uuid, uuid))
return value
def shutdown(self):
if not self._started:
return
self._started = False
_manager = BroadcastManager()
_download_manager = DownloadManager()
_guide_manager = GuideManager()
def start_guide_manager():
_guide_manager.start()
def start_download_manager():
_download_manager.start()
def stop_manager():
_manager.shutdown()
_download_manager.shutdown()
_guide_manager.shutdown()
env.environ.pop(GUIDE_ADDR, None)
env.environ.pop(DOWNLOAD_ADDR, None)
class Broadcast(object):
def __init__(self, value):
assert value is not None, 'broadcast object should not been None'
self.uuid = str(uuid_pkg.uuid4())
self.value = value
self.compressed_size = _manager.register(self.uuid, self.value)
block_num = (self.compressed_size + BLOCK_SIZE - 1) >> BLOCK_SHIFT
self.bytes = block_num * BLOCK_SIZE
logger.info("broadcast %s in %d blocks, %d bytes", self.uuid, block_num, self.compressed_size)
def clear(self):
_manager.clear(self.uuid)
def __getstate__(self):
return self.uuid, self.compressed_size
def __setstate__(self, v):
self.uuid, self.compressed_size = v
def __getattr__(self, name):
if name != 'value':
return getattr(self.value, name)
t = time.time()
value = _manager.fetch(self.uuid, self.compressed_size)
if value is None:
raise RuntimeError("fetch broadcast failed")
env.task_stats.secs_broadcast += time.time() - t
self.value = value
return value
def __len__(self):
return len(self.value)
def __iter__(self):
return self.value.__iter__()
def __getitem__(self, key):
return self.value.__getitem__(key)
def __contains__(self, item):
return self.value.__contains__(item)
def __missing__(self, key):
return self.value.__missing__(key)
def __reversed__(self):
return self.value.__reversed__() | PypiClean |
/Flask_AdminLTE3-1.0.9-py3-none-any.whl/flask_adminlte3/static/build/js/Treeview.js | import $ from 'jquery'
/**
* Constants
* ====================================================
*/
const NAME = 'Treeview'
const DATA_KEY = 'lte.treeview'
const EVENT_KEY = `.${DATA_KEY}`
const JQUERY_NO_CONFLICT = $.fn[NAME]
const EVENT_EXPANDED = `expanded${EVENT_KEY}`
const EVENT_COLLAPSED = `collapsed${EVENT_KEY}`
const EVENT_LOAD_DATA_API = `load${EVENT_KEY}`
const SELECTOR_LI = '.nav-item'
const SELECTOR_LINK = '.nav-link'
const SELECTOR_TREEVIEW_MENU = '.nav-treeview'
const SELECTOR_OPEN = '.menu-open'
const SELECTOR_DATA_WIDGET = '[data-widget="treeview"]'
const CLASS_NAME_OPEN = 'menu-open'
const CLASS_NAME_IS_OPENING = 'menu-is-opening'
const CLASS_NAME_SIDEBAR_COLLAPSED = 'sidebar-collapse'
const Default = {
trigger: `${SELECTOR_DATA_WIDGET} ${SELECTOR_LINK}`,
animationSpeed: 300,
accordion: true,
expandSidebar: false,
sidebarButtonSelector: '[data-widget="pushmenu"]'
}
/**
* Class Definition
* ====================================================
*/
class Treeview {
constructor(element, config) {
this._config = config
this._element = element
}
// Public
init() {
$(`${SELECTOR_LI}${SELECTOR_OPEN} ${SELECTOR_TREEVIEW_MENU}${SELECTOR_OPEN}`).css('display', 'block')
this._setupListeners()
}
expand(treeviewMenu, parentLi) {
const expandedEvent = $.Event(EVENT_EXPANDED)
if (this._config.accordion) {
const openMenuLi = parentLi.siblings(SELECTOR_OPEN).first()
const openTreeview = openMenuLi.find(SELECTOR_TREEVIEW_MENU).first()
this.collapse(openTreeview, openMenuLi)
}
parentLi.addClass(CLASS_NAME_IS_OPENING)
treeviewMenu.stop().slideDown(this._config.animationSpeed, () => {
parentLi.addClass(CLASS_NAME_OPEN)
$(this._element).trigger(expandedEvent)
})
if (this._config.expandSidebar) {
this._expandSidebar()
}
}
collapse(treeviewMenu, parentLi) {
const collapsedEvent = $.Event(EVENT_COLLAPSED)
parentLi.removeClass(`${CLASS_NAME_IS_OPENING} ${CLASS_NAME_OPEN}`)
treeviewMenu.stop().slideUp(this._config.animationSpeed, () => {
$(this._element).trigger(collapsedEvent)
treeviewMenu.find(`${SELECTOR_OPEN} > ${SELECTOR_TREEVIEW_MENU}`).slideUp()
treeviewMenu.find(SELECTOR_OPEN).removeClass(`${CLASS_NAME_IS_OPENING} ${CLASS_NAME_OPEN}`)
})
}
toggle(event) {
const $relativeTarget = $(event.currentTarget)
const $parent = $relativeTarget.parent()
let treeviewMenu = $parent.find(`> ${SELECTOR_TREEVIEW_MENU}`)
if (!treeviewMenu.is(SELECTOR_TREEVIEW_MENU)) {
if (!$parent.is(SELECTOR_LI)) {
treeviewMenu = $parent.parent().find(`> ${SELECTOR_TREEVIEW_MENU}`)
}
if (!treeviewMenu.is(SELECTOR_TREEVIEW_MENU)) {
return
}
}
event.preventDefault()
const parentLi = $relativeTarget.parents(SELECTOR_LI).first()
const isOpen = parentLi.hasClass(CLASS_NAME_OPEN)
if (isOpen) {
this.collapse($(treeviewMenu), parentLi)
} else {
this.expand($(treeviewMenu), parentLi)
}
}
// Private
_setupListeners() {
const elementId = this._element.attr('id') !== undefined ? `#${this._element.attr('id')}` : ''
$(document).on('click', `${elementId}${this._config.trigger}`, event => {
this.toggle(event)
})
}
_expandSidebar() {
if ($('body').hasClass(CLASS_NAME_SIDEBAR_COLLAPSED)) {
$(this._config.sidebarButtonSelector).PushMenu('expand')
}
}
// Static
static _jQueryInterface(config) {
return this.each(function () {
let data = $(this).data(DATA_KEY)
const _options = $.extend({}, Default, $(this).data())
if (!data) {
data = new Treeview($(this), _options)
$(this).data(DATA_KEY, data)
}
if (config === 'init') {
data[config]()
}
})
}
}
/**
* Data API
* ====================================================
*/
$(window).on(EVENT_LOAD_DATA_API, () => {
$(SELECTOR_DATA_WIDGET).each(function () {
Treeview._jQueryInterface.call($(this), 'init')
})
})
/**
* jQuery API
* ====================================================
*/
$.fn[NAME] = Treeview._jQueryInterface
$.fn[NAME].Constructor = Treeview
$.fn[NAME].noConflict = function () {
$.fn[NAME] = JQUERY_NO_CONFLICT
return Treeview._jQueryInterface
}
export default Treeview | PypiClean |
/FlaskFarm-4.0.104-py3-none-any.whl/flaskfarm/lib/support/expand/ffmpeg.py | import enum
import os
import platform
import re
import shutil
import subprocess
import threading
import time
import traceback
from datetime import datetime
from support import SupportFile, SupportSubprocess, SupportUtil, logger
class SupportFfmpeg(object):
__instance_list = []
__ffmpeg_path = None
__idx = 1
total_callback_function = None
temp_path = None
@classmethod
def initialize(cls, __ffmpeg_path, temp_path, total_callback_function, max_pf_count=-1):
cls.__ffmpeg_path = __ffmpeg_path
cls.temp_path = temp_path
cls.total_callback_function = total_callback_function
cls.max_pf_count = max_pf_count
# retry : 재시도 횟수
# max_error_packet_count : 이 숫자 초과시 중단
# where : 호출 모듈
def __init__(self, url, filename, save_path=None, max_pf_count=None, headers=None, timeout_minute=60, proxy=None, callback_id=None, callback_function=None):
self.__idx = str(SupportFfmpeg.__idx)
SupportFfmpeg.__idx += 1
self.url = url
self.filename = filename
self.save_path = save_path
self.max_pf_count = max_pf_count
self.headers = headers
self.timeout_minute = int(timeout_minute)
self.proxy = proxy
self.callback_id = callback_id
if callback_id == None:
self.callback_id = str(self.__idx)
self.callback_function = callback_function
self.temp_fullpath = os.path.join(self.temp_path, filename)
self.save_fullpath = os.path.join(self.save_path, filename)
self.thread = None
self.process = None
self.log_thread = None
self.status = SupportFfmpeg.Status.READY
self.duration = 0
self.duration_str = ''
self.current_duration = 0
self.percent = 0
#self.log = []
self.current_pf_count = 0
self.current_bitrate = ''
self.current_speed = ''
self.start_time = None
self.end_time = None
self.download_time = None
self.start_event = threading.Event()
self.exist = False
self.filesize = 0
self.filesize_str = ''
self.download_speed = ''
SupportFfmpeg.__instance_list.append(self)
if len(SupportFfmpeg.__instance_list) > 30:
for instance in SupportFfmpeg.__instance_list:
if instance.thread is None and instance.status != SupportFfmpeg.Status.READY:
SupportFfmpeg.__instance_list.remove(instance)
break
else:
logger.debug('remove fail %s %s', instance.thread, self.status)
def start(self):
self.thread = threading.Thread(target=self.thread_fuction, args=())
self.thread.start()
self.start_time = datetime.now()
return self.get_data()
def start_and_wait(self):
self.start()
self.thread.join(timeout=60*70)
def stop(self):
try:
self.status = SupportFfmpeg.Status.USER_STOP
self.kill()
#logger.warning('stop')
except Exception as e:
logger.error(f'Exception:{str(e)}')
logger.error(traceback.format_exc())
def kill(self):
try:
if self.process is not None and self.process.poll() is None:
import psutil
process = psutil.Process(self.process.pid)
for proc in process.children(recursive=True):
proc.kill()
process.kill()
except Exception as e:
logger.error(f'Exception:{str(e)}')
logger.error(traceback.format_exc())
def thread_fuction(self):
try:
header_count = 0
if self.proxy is None:
if self.headers is None:
command = [self.__ffmpeg_path, '-y', '-i', self.url, '-c', 'copy', '-bsf:a', 'aac_adtstoasc']
else:
headers_command = []
tmp = ""
for key, value in self.headers.items():
if key.lower() == 'user-agent':
headers_command.append('-user_agent')
headers_command.append(f"{value}")
pass
else:
#headers_command.append('-headers')
if platform.system() == 'Windows':
tmp += f'{key}:{value}\r\n'
header_count += 1
else:
#tmp.append(f'{key}:{value}')
tmp += f'{key}:{value}\r\n'
if len(tmp) > 0:
headers_command.append('-headers')
headers_command.append(f'{tmp}')
command = [self.__ffmpeg_path, '-y'] + headers_command + ['-i', self.url, '-c', 'copy', '-bsf:a', 'aac_adtstoasc']
else:
command = [self.__ffmpeg_path, '-y', '-http_proxy', self.proxy, '-i', self.url, '-c', 'copy', '-bsf:a', 'aac_adtstoasc']
if platform.system() == 'Windows':
now = str(datetime.now()).replace(':', '').replace('-', '').replace(' ', '-')
filename = ('%s' % now) + '.mp4'
self.temp_fullpath = os.path.join(self.temp_path, filename)
command.append(self.temp_fullpath)
else:
command.append(self.temp_fullpath)
try:
#logger.debug(' '.join(command))
if os.path.exists(self.temp_fullpath):
for f in SupportFfmpeg.__instance_list:
if f.__idx != self.__idx and f.temp_fullpath == self.temp_fullpath and f.status in [SupportFfmpeg.Status.DOWNLOADING, SupportFfmpeg.Status.READY]:
self.status = SupportFfmpeg.Status.ALREADY_DOWNLOADING
return
except:
pass
#logger.error(' '.join(command))
command = SupportSubprocess.command_for_windows(command)
if platform.system() == 'Windows' and header_count > 1:
if os.environ.get('FF'):
from framework import F
batfilepath = os.path.join(F.config['path_data'], 'tmp', f"{time.time()}.bat")
else:
batfilepath = f"{time.time()}.bat"
tmp = command.replace('\r\n', '!CRLF!')
text = f"""setlocal enabledelayedexpansion
SET CRLF=^
{tmp}"""
SupportFile.write_file(batfilepath, text)
command = batfilepath
self.process = subprocess.Popen(command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, universal_newlines=True, encoding='utf8')
self.status = SupportFfmpeg.Status.READY
self.log_thread = threading.Thread(target=self.log_thread_fuction, args=())
self.log_thread.start()
self.start_event.wait(timeout=60)
if self.log_thread is None:
if self.status == SupportFfmpeg.Status.READY:
self.status = SupportFfmpeg.Status.ERROR
self.kill()
elif self.status == SupportFfmpeg.Status.READY:
self.status = SupportFfmpeg.Status.ERROR
self.kill()
else:
process_ret = self.process.wait(timeout=60*self.timeout_minute)
# 2022-10-25
time.sleep(3)
logger.info(f"{process_ret=}")
if process_ret is None: # timeout
if self.status != SupportFfmpeg.Status.COMPLETED and self.status != SupportFfmpeg.Status.USER_STOP and self.status != SupportFfmpeg.Status.PF_STOP:
self.status = SupportFfmpeg.Status.TIME_OVER
self.kill()
else:
if self.status == SupportFfmpeg.Status.DOWNLOADING:
self.status = SupportFfmpeg.Status.FORCE_STOP
self.end_time = datetime.now()
self.download_time = self.end_time - self.start_time
try:
if self.status == SupportFfmpeg.Status.COMPLETED:
if self.save_fullpath != self.temp_fullpath:
if os.path.exists(self.save_fullpath):
os.remove(self.save_fullpath)
if platform.system() != 'Windows':
os.system('chmod 777 "%s"' % self.temp_fullpath)
shutil.move(self.temp_fullpath, self.save_fullpath)
self.filesize = os.stat(self.save_fullpath).st_size
else:
if os.path.exists(self.temp_fullpath):
os.remove(self.temp_fullpath)
except Exception as e:
logger.error(f"Exception:{str(e)}")
logger.error(traceback.format_exc())
arg = {'type':'last', 'status':self.status, 'data' : self.get_data()}
self.send_to_listener(**arg)
self.process = None
self.thread = None
except Exception as e:
logger.error(f'Exception:{str(e)}')
logger.error(traceback.format_exc())
try:
self.status = SupportFfmpeg.Status.EXCEPTION
arg = {'type':'last', 'status':self.status, 'data' : self.get_data()}
self.send_to_listener(**arg)
except:
pass
def log_thread_fuction(self):
with self.process.stdout:
for line in iter(self.process.stdout.readline, ''):
line = line.strip()
#logger.error(line)
try:
if self.status == SupportFfmpeg.Status.READY:
if line.find('Server returned 404 Not Found') != -1 or line.find('Unknown error') != -1:
self.status = SupportFfmpeg.Status.WRONG_URL
self.start_event.set()
elif line.find('No such file or directory') != -1:
self.status = SupportFfmpeg.Status.WRONG_DIRECTORY
self.start_event.set()
else:
match = re.compile(r'Duration\:\s(\d{2})\:(\d{2})\:(\d{2})\.(\d{2})\,\sstart').search(line)
if match:
self.duration_str = '%s:%s:%s' % ( match.group(1), match.group(2), match.group(3))
self.duration = int(match.group(4))
self.duration += int(match.group(3)) * 100
self.duration += int(match.group(2)) * 100 * 60
self.duration += int(match.group(1)) * 100 * 60 * 60
if match:
self.status = SupportFfmpeg.Status.READY
arg = {'type':'status_change', 'status':self.status, 'data' : self.get_data()}
self.send_to_listener(**arg)
continue
match = re.compile(r'time\=(\d{2})\:(\d{2})\:(\d{2})\.(\d{2})\sbitrate\=\s*(?P<bitrate>\d+).*?[$|\s](\s?speed\=\s*(?P<speed>.*?)x)?').search(line)
if match:
self.status = SupportFfmpeg.Status.DOWNLOADING
arg = {'type':'status_change', 'status':self.status, 'data' : self.get_data()}
self.send_to_listener(**arg)
self.start_event.set()
elif self.status == SupportFfmpeg.Status.DOWNLOADING:
if line.find('PES packet size mismatch') != -1:
self.current_pf_count += 1
if self.current_pf_count > self.max_pf_count:
self.status = SupportFfmpeg.Status.PF_STOP
self.kill()
continue
if line.find('HTTP error 403 Forbidden') != -1:
self.status = SupportFfmpeg.Status.HTTP_FORBIDDEN
self.kill()
continue
match = re.compile(r'time\=(\d{2})\:(\d{2})\:(\d{2})\.(\d{2})\sbitrate\=\s*(?P<bitrate>\d+).*?[$|\s](\s?speed\=\s*(?P<speed>.*?)x)?').search(line)
if match:
self.current_duration = int(match.group(4))
self.current_duration += int(match.group(3)) * 100
self.current_duration += int(match.group(2)) * 100 * 60
self.current_duration += int(match.group(1)) * 100 * 60 * 60
try:
self.percent = int(self.current_duration * 100 / self.duration)
except: pass
self.current_bitrate = match.group('bitrate')
self.current_speed = match.group('speed')
self.download_time = datetime.now() - self.start_time
arg = {'type':'normal', 'status':self.status, 'data' : self.get_data()}
self.send_to_listener(**arg)
continue
match = re.compile(r'video\:\d*kB\saudio\:\d*kB').search(line)
if match:
self.status = SupportFfmpeg.Status.COMPLETED
self.end_time = datetime.now()
self.download_time = self.end_time - self.start_time
self.percent = 100
arg = {'type':'status_change', 'status':self.status, 'data' : self.get_data()}
self.send_to_listener(**arg)
continue
except Exception as e:
logger.error(f'Exception:{str(e)}')
logger.error(traceback.format_exc())
self.start_event.set()
self.log_thread = None
def get_data(self):
data = {
'url' : self.url,
'filename' : self.filename,
'max_pf_count' : self.max_pf_count,
'callback_id' : self.callback_id,
'temp_path' : self.temp_path,
'save_path' : self.save_path,
'temp_fullpath' : self.temp_fullpath,
'save_fullpath' : self.save_fullpath,
'status' : int(self.status),
'status_str' : self.status.name,
'status_kor' : str(self.status),
'duration' : self.duration,
'duration_str' : self.duration_str,
'current_duration' : self.current_duration,
'percent' : self.percent,
'current_pf_count' : self.current_pf_count,
'idx' : self.__idx,
#'log' : self.log,
'current_bitrate' : self.current_bitrate,
'current_speed' : self.current_speed,
'start_time' : '' if self.start_time is None else str(self.start_time).split('.')[0][5:],
'end_time' : '' if self.end_time is None else str(self.end_time).split('.')[0][5:],
'download_time' : '' if self.download_time is None else '%02d:%02d' % (self.download_time.seconds/60, self.download_time.seconds%60),
'exist' : os.path.exists(self.save_fullpath),
}
if self.status == SupportFfmpeg.Status.COMPLETED:
data['filesize'] = self.filesize
data['filesize_str'] = SupportUtil.sizeof_fmt(self.filesize)
if self.download_time.seconds != 0:
data['download_speed'] = SupportUtil.sizeof_fmt(self.filesize/self.download_time.seconds, suffix='Bytes/Second')
else:
data['download_speed'] = '0Bytes/Second'
return data
def send_to_listener(self, **arg):
if self.total_callback_function != None:
self.total_callback_function(**arg)
if self.callback_function is not None and self.callback_function != self.total_callback_function:
arg['callback_id'] = self.callback_id
self.callback_function(**arg)
@classmethod
def stop_by_idx(cls, idx):
try:
for __instance in SupportFfmpeg.__instance_list:
if __instance.__idx == idx:
__instance.stop()
break
except Exception as e:
logger.error(f'Exception:{str(e)}')
logger.error(traceback.format_exc())
@classmethod
def stop_by_callback_id(cls, callback_id):
try:
for __instance in SupportFfmpeg.__instance_list:
if __instance.callback_id == callback_id:
__instance.stop()
break
except Exception as e:
logger.error(f'Exception:{str(e)}')
logger.error(traceback.format_exc())
@classmethod
def get_instance_by_idx(cls, idx):
try:
for __instance in SupportFfmpeg.__instance_list:
if __instance.__idx == idx:
return __instance
except Exception as e:
logger.error(f'Exception:{str(e)}')
logger.error(traceback.format_exc())
@classmethod
def get_instance_by_callback_id(cls, callback_id):
try:
for __instance in SupportFfmpeg.__instance_list:
if __instance.callback_id == callback_id:
return __instance
except Exception as e:
logger.error(f'Exception:{str(e)}')
logger.error(traceback.format_exc())
@classmethod
def all_stop(cls):
try:
for __instance in SupportFfmpeg.__instance_list:
__instance.stop()
except Exception as e:
logger.error(f'Exception:{str(e)}')
logger.error(traceback.format_exc())
@classmethod
def get_list(cls):
return cls.__instance_list
class Status(enum.Enum):
READY = 0
WRONG_URL = 1
WRONG_DIRECTORY = 2
EXCEPTION = 3
ERROR = 4
HTTP_FORBIDDEN = 11
DOWNLOADING = 5
USER_STOP = 6
COMPLETED = 7
TIME_OVER = 8
PF_STOP = 9
FORCE_STOP = 10 #강제중단
ALREADY_DOWNLOADING = 12 #이미 목록에 있고 다운로드중
def __int__(self):
return self.value
def __str__(self):
kor = ['준비', 'URL에러', '폴더에러', '실패(Exception)', '실패(에러)', '다운로드중', '사용자중지', '완료', '시간초과', 'PF중지', '강제중지',
'403에러', '임시파일이 이미 있음']
return kor[int(self)]
def __repr__(self):
return self.name
@staticmethod
def get_instance(value):
tmp = [
SupportFfmpeg.Status.READY,
SupportFfmpeg.Status.WRONG_URL,
SupportFfmpeg.Status.WRONG_DIRECTORY,
SupportFfmpeg.Status.EXCEPTION,
SupportFfmpeg.Status.ERROR,
SupportFfmpeg.Status.DOWNLOADING,
SupportFfmpeg.Status.USER_STOP,
SupportFfmpeg.Status.COMPLETED,
SupportFfmpeg.Status.TIME_OVER,
SupportFfmpeg.Status.PF_STOP,
SupportFfmpeg.Status.FORCE_STOP,
SupportFfmpeg.Status.HTTP_FORBIDDEN,
SupportFfmpeg.Status.ALREADY_DOWNLOADING ]
return tmp[value] | PypiClean |
/BuzzAlgoTrade-0.0.2.tar.gz/BuzzAlgoTrade-0.0.2/pyalgotrade/websocket/pusher.py | import json
import urllib
import pyalgotrade
from pyalgotrade.websocket import client
import pyalgotrade.logger
logger = pyalgotrade.logger.getLogger("pusher")
# Pusher protocol reference: http://pusher.com/docs/pusher_protocol
# Every message on a Pusher WebSocket connection is packaged as an event.
# The data field is sent as a string (check 'Double encoding' in the protocol reference). If dataIsJSON is True, it is decoded.
class Event(object):
def __init__(self, eventDict, dataIsJSON):
self.__eventDict = eventDict
self.__data = eventDict.get("data")
if self.__data is not None and dataIsJSON:
self.__data = json.loads(self.__data)
def __str__(self):
return str(self.__eventDict)
def getDict(self):
return self.__eventDict
def getData(self):
return self.__data
def getType(self):
return self.__eventDict.get("event")
class PingKeepAliveMgr(client.KeepAliveMgr):
def __init__(self, wsClient, maxInactivity, responseTimeout):
super(PingKeepAliveMgr, self).__init__(wsClient, maxInactivity, responseTimeout)
# Override to send the keep alive msg.
def sendKeepAlive(self):
logger.debug("Sending pusher:ping.")
self.getWSClient().sendPing()
# Return True if the response belongs to a keep alive message, False otherwise.
def handleResponse(self, msg):
ret = msg.get("event") == "pusher:pong"
if ret:
logger.debug("Received pusher:pong.")
return ret
class WebSocketClient(client.WebSocketClientBase):
def __init__(self, appKey, protocol=5, maxInactivity=120, responseTimeout=30):
params = {
"protocol": protocol,
"client": "Python-PyAlgoTrade",
"version": pyalgotrade.__version__
}
url = "ws://ws.pusherapp.com/app/%s?%s" % (appKey, urllib.urlencode(params))
super(WebSocketClient, self).__init__(url)
self.setKeepAliveMgr(PingKeepAliveMgr(self, maxInactivity, responseTimeout))
def sendEvent(self, eventType, eventData):
msgDict = {"event": eventType}
if eventData:
msgDict["data"] = eventData
msg = json.dumps(msgDict)
self.send(msg, False)
def subscribeChannel(self, channel):
self.sendEvent("pusher:subscribe", {"channel": channel})
def sendPing(self):
self.sendEvent("pusher:ping", None)
def sendPong(self):
self.sendEvent("pusher:pong", None)
def onMessage(self, msg):
eventType = msg.get("event")
if eventType == "pusher:error":
self.onError(Event(msg, False))
elif eventType == "pusher:ping":
self.sendPong()
elif eventType == "pusher:connection_established":
self.onConnectionEstablished(Event(msg, True))
elif eventType == "pusher_internal:subscription_succeeded":
self.onSubscriptionSucceeded(Event(msg, True))
else:
# If we can't handle the message, notify the most concrete class.
self.onUnknownEvent(Event(msg, False))
######################################################################
# Override for Pusher specific events.
def onConnectionEstablished(self, event):
pass
def onSubscriptionSucceeded(self, event):
pass
def onError(self, event):
raise NotImplementedError()
def onUnknownEvent(self, event):
raise NotImplementedError() | PypiClean |
/GQCMS-0.0.4-py3-none-any.whl/build/lib/build/lib/build/lib/build/lib/build/lib/build/lib/gqcms/Heisenberg.py | import numpy as np
import scipy.linalg as sp
class heisenberg():
def __init__(self, sites, S=0.5):
"""
Will initialize the Heisenberg object
sites: the amount of sites
S: the total spin per site
"""
self.sites = sites
self.S = S
self.degeneracy = int(2*S + 1)
self.spinops = np.array([[0, 0.5], [0.5, 0]]), np.array([[0, -0.5j], [0.5j, 0]]), np.array([[0.5, 0], [0, -0.5]])
self.raising = self.spinops[0] + 1j*self.spinops[1]
self.lowering = self.spinops[0] - 1j*self.spinops[1]
def calculateHamiltonian(self, J, periodic=True):
"""
Will calculate the Heisenberg Hamiltonian with a given coupling constant
J: the coupling constant
"""
total_ham = np.zeros((2**self.sites, 2**self.sites))
for site in range(self.sites - 1 + periodic):
# reset button for when the site is adapted
reset = site
S_z_site = np.kron(np.eye(2**(site)), np.kron(self.spinops[2], np.eye(2**(self.sites - site - 1))))
if site + 1 == self.sites:
site = -1
S_z_nextdoor = np.kron(np.eye(2**(site + 1)), np.kron(self.spinops[2], np.eye(2**(self.sites - site - 2))))
site = reset
S_minus_site = np.kron(np.eye(2**(site)), np.kron(self.lowering, np.eye(2**(self.sites - site - 1))))
if site + 1 == self.sites:
site = -1
S_minus_nextdoor = np.kron(np.eye(2**(site + 1)), np.kron(self.lowering, np.eye(2**(self.sites - site -2))))
site = reset
S_plus_site = np.kron(np.eye(2**(site)), np.kron(self.raising, np.eye(2**(self.sites - site - 1))))
if site + 1 == self.sites:
site = -1
S_plus_nextdoor = np.kron(np.eye(2**(site + 1)), np.kron(self.raising, np.eye(2**(self.sites - site - 2))))
total_ham = total_ham + S_z_site@S_z_nextdoor + 0.5*S_plus_site@S_minus_nextdoor + 0.5*S_minus_site@S_plus_nextdoor
return total_ham*J | PypiClean |
/Flask-Scaffold-0.5.1.tar.gz/Flask-Scaffold-0.5.1/app/templates/static/node_modules/angular-grid/src/ts/filter/filterManager.ts |
module awk.grid {
var _ = Utils;
export class FilterManager {
private $compile: any;
private $scope: any;
private gridOptionsWrapper: GridOptionsWrapper;
private grid: any;
private allFilters: any;
private rowModel: any;
private popupService: PopupService;
private valueService: ValueService;
private columnController: ColumnController;
private quickFilter: string;
private advancedFilterPresent: boolean;
private externalFilterPresent: boolean;
public init(grid: Grid, gridOptionsWrapper: GridOptionsWrapper, $compile: any, $scope: any,
columnController: ColumnController, popupService: PopupService, valueService: ValueService) {
this.$compile = $compile;
this.$scope = $scope;
this.gridOptionsWrapper = gridOptionsWrapper;
this.grid = grid;
this.allFilters = {};
this.columnController = columnController;
this.popupService = popupService;
this.valueService = valueService;
this.columnController = columnController;
this.quickFilter = null;
}
public setFilterModel(model: any) {
if (model) {
// mark the filters as we set them, so any active filters left over we stop
var modelKeys = Object.keys(model);
_.iterateObject(this.allFilters, (colId, filterWrapper) => {
_.removeFromArray(modelKeys, colId);
var newModel = model[colId];
this.setModelOnFilterWrapper(filterWrapper.filter, newModel);
});
// at this point, processedFields contains data for which we don't have a filter working yet
_.iterateArray(modelKeys, (colId) => {
var column = this.columnController.getColumn(colId);
if (!column) {
console.warn('Warning ag-grid setFilterModel - no column found for colId ' + colId);
return;
}
var filterWrapper = this.getOrCreateFilterWrapper(column);
this.setModelOnFilterWrapper(filterWrapper.filter, model[colId]);
});
} else {
_.iterateObject(this.allFilters, (key, filterWrapper) => {
this.setModelOnFilterWrapper(filterWrapper.filter, null);
});
}
}
private setModelOnFilterWrapper(filter: { getApi: () => { setModel: Function }}, newModel: any) {
// because user can provide filters, we provide useful error checking and messages
if (typeof filter.getApi !== 'function') {
console.warn('Warning ag-grid - filter missing getApi method, which is needed for getFilterModel');
return;
}
var filterApi = filter.getApi();
if (typeof filterApi.setModel !== 'function') {
console.warn('Warning ag-grid - filter API missing setModel method, which is needed for setFilterModel');
return;
}
filterApi.setModel(newModel);
}
public getFilterModel() {
var result = <any>{};
_.iterateObject(this.allFilters, function (key: any, filterWrapper: any) {
// because user can provide filters, we provide useful error checking and messages
if (typeof filterWrapper.filter.getApi !== 'function') {
console.warn('Warning ag-grid - filter missing getApi method, which is needed for getFilterModel');
return;
}
var filterApi = filterWrapper.filter.getApi();
if (typeof filterApi.getModel !== 'function') {
console.warn('Warning ag-grid - filter API missing getModel method, which is needed for getFilterModel');
return;
}
var model = filterApi.getModel();
if (model) {
result[key] = model;
}
});
return result;
}
public setRowModel(rowModel: any) {
this.rowModel = rowModel;
}
// returns true if any advanced filter (ie not quick filter) active
private isAdvancedFilterPresent() {
var atLeastOneActive = false;
_.iterateObject(this.allFilters, function (key, filterWrapper) {
if (!filterWrapper.filter.isFilterActive) { // because users can do custom filters, give nice error message
console.error('Filter is missing method isFilterActive');
}
if (filterWrapper.filter.isFilterActive()) {
atLeastOneActive = true;
}
});
return atLeastOneActive;
}
// returns true if quickFilter or advancedFilter
public isAnyFilterPresent(): boolean {
return this.isQuickFilterPresent() || this.advancedFilterPresent || this.externalFilterPresent;
}
// returns true if given col has a filter active
public isFilterPresentForCol(colId: any) {
var filterWrapper = this.allFilters[colId];
if (!filterWrapper) {
return false;
}
if (!filterWrapper.filter.isFilterActive) { // because users can do custom filters, give nice error message
console.error('Filter is missing method isFilterActive');
}
var filterPresent = filterWrapper.filter.isFilterActive();
return filterPresent;
}
private doesFilterPass(node: RowNode, filterToSkip?: any) {
var data = node.data;
var colKeys = Object.keys(this.allFilters);
for (var i = 0, l = colKeys.length; i < l; i++) { // critical code, don't use functional programming
var colId = colKeys[i];
var filterWrapper = this.allFilters[colId];
// if no filter, always pass
if (filterWrapper === undefined) {
continue;
}
if (filterWrapper.filter === filterToSkip) {
continue
}
if (!filterWrapper.filter.doesFilterPass) { // because users can do custom filters, give nice error message
console.error('Filter is missing method doesFilterPass');
}
var params = {
node: node,
data: data
};
if (!filterWrapper.filter.doesFilterPass(params)) {
return false;
}
}
// all filters passed
return true;
}
// returns true if it has changed (not just same value again)
public setQuickFilter(newFilter: any): boolean {
if (newFilter === undefined || newFilter === "") {
newFilter = null;
}
if (this.quickFilter !== newFilter) {
if (this.gridOptionsWrapper.isVirtualPaging()) {
console.warn('ag-grid: cannot do quick filtering when doing virtual paging');
return;
}
//want 'null' to mean to filter, so remove undefined and empty string
if (newFilter === undefined || newFilter === "") {
newFilter = null;
}
if (newFilter !== null) {
newFilter = newFilter.toUpperCase();
}
this.quickFilter = newFilter;
return true;
} else {
return false;
}
}
public onFilterChanged(): void {
this.advancedFilterPresent = this.isAdvancedFilterPresent();
this.externalFilterPresent = this.gridOptionsWrapper.isExternalFilterPresent();
_.iterateObject(this.allFilters, function (key, filterWrapper) {
if (filterWrapper.filter.onAnyFilterChanged) {
filterWrapper.filter.onAnyFilterChanged();
}
});
}
private isQuickFilterPresent(): boolean {
return this.quickFilter !== null;
}
public doesRowPassOtherFilters(filterToSkip: any, node: any): boolean {
return this.doesRowPassFilter(node, filterToSkip);
}
public doesRowPassFilter(node: any, filterToSkip?: any): boolean {
//first up, check quick filter
if (this.isQuickFilterPresent()) {
if (!node.quickFilterAggregateText) {
this.aggregateRowForQuickFilter(node);
}
if (node.quickFilterAggregateText.indexOf(this.quickFilter) < 0) {
//quick filter fails, so skip item
return false;
}
}
//secondly, give the client a chance to reject this row
if (this.externalFilterPresent) {
if (!this.gridOptionsWrapper.doesExternalFilterPass(node)) {
return false;
}
}
//lastly, check our internal advanced filter
if (this.advancedFilterPresent) {
if (!this.doesFilterPass(node, filterToSkip)) {
return false;
}
}
//got this far, all filters pass
return true;
}
private aggregateRowForQuickFilter(node: RowNode) {
var aggregatedText = '';
var that = this;
this.columnController.getAllColumns().forEach(function (column: Column) {
var data = node.data;
var value = that.valueService.getValue(column.colDef, data, node);
if (value && value !== '') {
aggregatedText = aggregatedText + value.toString().toUpperCase() + "_";
}
});
node.quickFilterAggregateText = aggregatedText;
}
public refreshDisplayedValues() {
if (!this.rowModel.getTopLevelNodes) {
console.error('ag-Grid: could not find getTopLevelNodes on rowModel. you cannot use setFilter when' +
'doing virtualScrolling as the filter has no way of getting the full set of values to display. ' +
'Either stop using this filter type, or provide the filter with a set of values (see the docs' +
'on configuring the setFilter).')
}
var rows = this.rowModel.getTopLevelNodes();
var colKeys = Object.keys(this.allFilters);
for (var i = 0, l = colKeys.length; i < l; i++) {
var colId = colKeys[i];
var filterWrapper = this.allFilters[colId];
// if no filter, always pass
if (filterWrapper === undefined || (typeof filterWrapper.filter.setFilteredDisplayValues !== 'function')) {
continue;
}
var displayedFilterValues = new Array();
for (var j = 0; j < rows.length; j++) {
if (this.doesFilterPass(rows[j], i)) {
displayedFilterValues.push(rows[j])
}
}
filterWrapper.filter.setFilteredDisplayValues(displayedFilterValues)
}
}
public onNewRowsLoaded() {
var that = this;
Object.keys(this.allFilters).forEach(function (field) {
var filter = that.allFilters[field].filter;
if (filter.onNewRowsLoaded) {
filter.onNewRowsLoaded();
}
});
}
private createValueGetter(column: Column) {
var that = this;
return function valueGetter(node: any) {
return that.valueService.getValue(column.colDef, node.data, node);
};
}
public getFilterApi(column: Column) {
var filterWrapper = this.getOrCreateFilterWrapper(column);
if (filterWrapper) {
if (typeof filterWrapper.filter.getApi === 'function') {
return filterWrapper.filter.getApi();
}
}
}
private getOrCreateFilterWrapper(column: Column) {
var filterWrapper = this.allFilters[column.colId];
if (!filterWrapper) {
filterWrapper = this.createFilterWrapper(column);
this.allFilters[column.colId] = filterWrapper;
this.refreshDisplayedValues();
}
return filterWrapper;
}
private createFilterWrapper(column: Column) {
var colDef = column.colDef;
var filterWrapper = {
column: column,
filter: <any> null,
scope: <any> null,
gui: <any> null
};
if (typeof colDef.filter === 'function') {
// if user provided a filter, just use it
// first up, create child scope if needed
if (this.gridOptionsWrapper.isAngularCompileFilters()) {
filterWrapper.scope = this.$scope.$new();;
}
// now create filter (had to cast to any to get 'new' working)
this.assertMethodHasNoParameters(colDef.filter);
filterWrapper.filter = new (<any>colDef.filter)();
} else if (colDef.filter === 'text') {
filterWrapper.filter = new TextFilter();
} else if (colDef.filter === 'number') {
filterWrapper.filter = new NumberFilter();
} else {
filterWrapper.filter = new SetFilter();
}
var filterChangedCallback = this.grid.onFilterChanged.bind(this.grid);
var filterModifiedCallback = this.grid.onFilterModified.bind(this.grid);
var doesRowPassOtherFilters = this.doesRowPassOtherFilters.bind(this, filterWrapper.filter);
var filterParams = colDef.filterParams;
var params = {
colDef: colDef,
rowModel: this.rowModel,
filterChangedCallback: filterChangedCallback,
filterModifiedCallback: filterModifiedCallback,
filterParams: filterParams,
localeTextFunc: this.gridOptionsWrapper.getLocaleTextFunc(),
valueGetter: this.createValueGetter(column),
doesRowPassOtherFilter: doesRowPassOtherFilters,
$scope: filterWrapper.scope
};
if (!filterWrapper.filter.init) { // because users can do custom filters, give nice error message
throw 'Filter is missing method init';
}
filterWrapper.filter.init(params);
if (!filterWrapper.filter.getGui) { // because users can do custom filters, give nice error message
throw 'Filter is missing method getGui';
}
var eFilterGui = document.createElement('div');
eFilterGui.className = 'ag-filter';
var guiFromFilter = filterWrapper.filter.getGui();
if (_.isNodeOrElement(guiFromFilter)) {
//a dom node or element was returned, so add child
eFilterGui.appendChild(guiFromFilter);
} else {
//otherwise assume it was html, so just insert
var eTextSpan = document.createElement('span');
eTextSpan.innerHTML = guiFromFilter;
eFilterGui.appendChild(eTextSpan);
}
if (filterWrapper.scope) {
filterWrapper.gui = this.$compile(eFilterGui)(filterWrapper.scope)[0];
} else {
filterWrapper.gui = eFilterGui;
}
return filterWrapper;
}
private assertMethodHasNoParameters(theMethod: any) {
var getRowsParams = _.getFunctionParameters(theMethod);
if (getRowsParams.length > 0) {
console.warn('ag-grid: It looks like your filter is of the old type and expecting parameters in the constructor.');
console.warn('ag-grid: From ag-grid 1.14, the constructor should take no parameters and init() used instead.');
}
}
public showFilter(column: Column, eventSource: any) {
var filterWrapper = this.getOrCreateFilterWrapper(column);
this.popupService.positionPopup(eventSource, filterWrapper.gui, 200);
var hidePopup = this.popupService.addAsModalPopup(filterWrapper.gui, true);
if (filterWrapper.filter.afterGuiAttached) {
var params = {
hidePopup: hidePopup,
eventSource: eventSource
};
filterWrapper.filter.afterGuiAttached(params);
}
}
}
} | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/angular/i18n/angular-locale_shi.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"\u2d5c\u2d49\u2d3c\u2d30\u2d61\u2d5c",
"\u2d5c\u2d30\u2d37\u2d33\u2d33\u2d6f\u2d30\u2d5c"
],
"DAY": [
"\u2d30\u2d59\u2d30\u2d4e\u2d30\u2d59",
"\u2d30\u2d62\u2d4f\u2d30\u2d59",
"\u2d30\u2d59\u2d49\u2d4f\u2d30\u2d59",
"\u2d30\u2d3d\u2d55\u2d30\u2d59",
"\u2d30\u2d3d\u2d61\u2d30\u2d59",
"\u2d59\u2d49\u2d4e\u2d61\u2d30\u2d59",
"\u2d30\u2d59\u2d49\u2d39\u2d62\u2d30\u2d59"
],
"MONTH": [
"\u2d49\u2d4f\u2d4f\u2d30\u2d62\u2d54",
"\u2d31\u2d55\u2d30\u2d62\u2d55",
"\u2d4e\u2d30\u2d55\u2d5a",
"\u2d49\u2d31\u2d54\u2d49\u2d54",
"\u2d4e\u2d30\u2d62\u2d62\u2d53",
"\u2d62\u2d53\u2d4f\u2d62\u2d53",
"\u2d62\u2d53\u2d4d\u2d62\u2d53\u2d63",
"\u2d56\u2d53\u2d5b\u2d5c",
"\u2d5b\u2d53\u2d5c\u2d30\u2d4f\u2d31\u2d49\u2d54",
"\u2d3d\u2d5c\u2d53\u2d31\u2d54",
"\u2d4f\u2d53\u2d61\u2d30\u2d4f\u2d31\u2d49\u2d54",
"\u2d37\u2d53\u2d4a\u2d30\u2d4f\u2d31\u2d49\u2d54"
],
"SHORTDAY": [
"\u2d30\u2d59\u2d30",
"\u2d30\u2d62\u2d4f",
"\u2d30\u2d59\u2d49",
"\u2d30\u2d3d\u2d55",
"\u2d30\u2d3d\u2d61",
"\u2d30\u2d59\u2d49\u2d4e",
"\u2d30\u2d59\u2d49\u2d39"
],
"SHORTMONTH": [
"\u2d49\u2d4f\u2d4f",
"\u2d31\u2d55\u2d30",
"\u2d4e\u2d30\u2d55",
"\u2d49\u2d31\u2d54",
"\u2d4e\u2d30\u2d62",
"\u2d62\u2d53\u2d4f",
"\u2d62\u2d53\u2d4d",
"\u2d56\u2d53\u2d5b",
"\u2d5b\u2d53\u2d5c",
"\u2d3d\u2d5c\u2d53",
"\u2d4f\u2d53\u2d61",
"\u2d37\u2d53\u2d4a"
],
"fullDate": "EEEE d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM, y HH:mm:ss",
"mediumDate": "d MMM, y",
"mediumTime": "HH:mm:ss",
"short": "d/M/y HH:mm",
"shortDate": "d/M/y",
"shortTime": "HH:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "dh",
"DECIMAL_SEP": ",",
"GROUP_SEP": "\u00a0",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-",
"negSuf": "\u00a4",
"posPre": "",
"posSuf": "\u00a4"
}
]
},
"id": "shi",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } return PLURAL_CATEGORY.OTHER;}
});
}]); | PypiClean |
/EOxServer-1.2.12-py3-none-any.whl/eoxserver/services/mapserver/wms/layerfactories/coverage_mask_layer_factory.py |
from eoxserver.contrib import mapserver as ms
from eoxserver.resources.coverages import models
from eoxserver.services.mapserver.wms.layerfactories.base import (
BaseCoverageLayerFactory
)
class CoverageMaskedLayerFactory(BaseCoverageLayerFactory):
handles = (models.RectifiedDataset, models.RectifiedStitchedMosaic)
suffixes = ("_masked",)
requires_connection = True
def generate(self, eo_object, group_layer, suffix, options):
mask_layer_name = eo_object.identifier + "__mask__"
# handle the mask layers
# get the applicable sematics
mask_semantics = ("polygonmask",)
mask_items = eo_object.data_items.all()
for mask_semantic in mask_semantics:
mask_items = mask_items.filter(semantic__startswith=mask_semantic)
# layer creating closure
def _create_mask_polygon_layer(name):
mask_layer = ms.layerObj()
mask_layer.name = name
mask_layer.type = ms.MS_LAYER_POLYGON
mask_layer.setMetaData("eoxs_geometry_reversed", "true")
cls = ms.classObj(mask_layer)
style = ms.styleObj(cls)
style.color.setRGB(0, 0, 0)
return mask_layer
# multiple masks shall be grouped by a group layer
if len(mask_items) > 1:
# more than one mask, requires a mask group layer
mask_layer = ms.Layer(mask_layer_name)
yield (mask_layer, ())
# generate mask layers
for i, mask_item in enumerate(mask_items):
mask_sublayer = _create_mask_polygon_layer(
"%s%2.2d" % (mask_layer_name, i)
)
mask_sublayer.group = mask_layer.name
yield (mask_sublayer, (mask_item,))
# single mask shall be used directly as a "group" layer
elif len(mask_items) == 1:
mask_layer = _create_mask_polygon_layer(mask_layer_name)
yield (mask_layer, (mask_items[0],))
# no mask at all
else:
mask_layer = None
# handle the image layers
super_items = super(CoverageMaskedLayerFactory, self).generate(
eo_object, group_layer, suffix, options
)
for layer, data_items in super_items:
# if we do have a mask, reference it in the layer
if mask_layer:
layer.mask = mask_layer.name
# fix the layer name by appending the right suffix
layer.name = layer.name + suffix
if layer.group:
layer.group = layer.group + suffix
# "re-yield" the layer and its items
yield (layer, data_items)
def generate_group(self, name):
layer = ms.layerObj()
layer.name = name
layer.type = ms.MS_LAYER_RASTER
return layer | PypiClean |
/GooeyPy-0.2.tar.gz/GooeyPy-0.2/README | =============================
README: GooeyPy
=============================
:Author: Joey Marshall
:Contact: [email protected]
Please don't contact me about bugs with the OpenGL rendering (unless you have a fix), I already know about them!
Who is GooeyPy for?
-------------------
GooeyPy is designed for smaller SDL games that need a light GUI implemented easily and fast. It can be used, but isn't really designed for, larger games that need large complex GUIs.
Description
-----------
GooeyPy is the result of my need for a flexible and easy to use gui for pygame. Yes, I realize there are already guis for pygame out there. But with each one there is something I don't like. GooeyPy simply has everything the way I like it. So I'm not going to say it's the best one out there, because everyone has their own opinions. Here are some things that are different about GooeyPy:
Styling:
I didn't like the way styling worked with other guis. Maybe I'm just spoiled by CSS in web development, but wouldn't it be nice if you could have CSS like styling for a gui? Oh woops, now you can! With GooeyPy, all widgets are stylable with CSS like attributes. Also, at any time during the application run time you can change the styling dynamically (and it will update itself)! All information related to displaying widgets are stored in the styling (that includes positioning; relative or absolute)
.. sourcecode:: python
l = gui.Label("Text", x=395, y=30, font_size=25, color=(0,255,0))
Any additional arguments passed when creating a widget are taken as styling options.
Integration:
With all the guis I've tried, you have to figure out how to integrate the gui into an already made game. Sometimes that's impossible. And if it is possible, until you figure out how the gui works, it's very hard to do. GooeyPy doesn't steal your pygame events and doesn't interfear with your bliting (well, actually it can. Setting up the surfaces can be a little hairy, but there is some nice documentation to explain it all).
Dependencies:
First of all, this isn't what you think. I'm not meaning other libs that GooeyPy depends on, but the dependencies within GooeyPy. Ok, that doesn't make a whole lot of sense... let me put it another way. You have a widget and all of the sudden you want to change the value. so you do widget.value = "hi". And it works! GooeyPy automatically knows when something changes that effects another part of itself. In this case, the value effects how the widget displays, so it will redraw itself. It works that way with everything! You never have to worry about redrawing a widget when it's dirty (and nither do I within GooeyPy, so that means a whole lot less bugs). All the dependencies are also cached.
Linking:
Another cool thing I have is you can link values from one widget to another. So if the value of widget A changes, so does the value it's linked to in widget B.
.. sourcecode:: pycon
>>> l1 = gui.Label("Text")
>>> linked_l = gui.Label(l1.link("value"))
>>> linked_label.value
Text
>>> l1.vale = "Something else"
>>> linked_label.value
Something else
Be carefull not to do **l1.value** as that will put the actual string value!
You can also link with functions!
.. sourcecode:: pycon
>>> x = 1
>>> y = 2
>>> l = gui.Label(lambda:x+y)
>>> l.value
3
>>> x = 6
>>> l.value
8
Actually you'll need to do app.run() before it will recalculate the values sense x and y aren't cellulose cells, but you'll be doing that every frame anyways. :)
Effects:
I like the ability to have cool effects applied to my widgets. Now I have them.
Documentation
-------------
You can open up ipython, import gooeypy, and find allot of documentation in the doc strings... or you can look at docs/index.html with all the doc strings nice and organized.
If you would like somewhat of a quickstart guide, you can look at examples/example1.py. It has lots of helpfull comments and the actual example program displays a lot of the cool stuff in GooeyPy. | PypiClean |
/Flask-Jasmine-1.4.tar.gz/Flask-Jasmine-1.4/flask_jasmine/static/jasmine/jasmine.js | var isCommonJS = typeof window == "undefined" && typeof exports == "object";
/**
* Top level namespace for Jasmine, a lightweight JavaScript BDD/spec/testing framework.
*
* @namespace
*/
var jasmine = {};
if (isCommonJS) exports.jasmine = jasmine;
/**
* @private
*/
jasmine.unimplementedMethod_ = function() {
throw new Error("unimplemented method");
};
/**
* Use <code>jasmine.undefined</code> instead of <code>undefined</code>, since <code>undefined</code> is just
* a plain old variable and may be redefined by somebody else.
*
* @private
*/
jasmine.undefined = jasmine.___undefined___;
/**
* Show diagnostic messages in the console if set to true
*
*/
jasmine.VERBOSE = false;
/**
* Default interval in milliseconds for event loop yields (e.g. to allow network activity or to refresh the screen with the HTML-based runner). Small values here may result in slow test running. Zero means no updates until all tests have completed.
*
*/
jasmine.DEFAULT_UPDATE_INTERVAL = 250;
/**
* Maximum levels of nesting that will be included when an object is pretty-printed
*/
jasmine.MAX_PRETTY_PRINT_DEPTH = 40;
/**
* Default timeout interval in milliseconds for waitsFor() blocks.
*/
jasmine.DEFAULT_TIMEOUT_INTERVAL = 5000;
/**
* By default exceptions thrown in the context of a test are caught by jasmine so that it can run the remaining tests in the suite.
* Set to false to let the exception bubble up in the browser.
*
*/
jasmine.CATCH_EXCEPTIONS = true;
jasmine.getGlobal = function() {
function getGlobal() {
return this;
}
return getGlobal();
};
/**
* Allows for bound functions to be compared. Internal use only.
*
* @ignore
* @private
* @param base {Object} bound 'this' for the function
* @param name {Function} function to find
*/
jasmine.bindOriginal_ = function(base, name) {
var original = base[name];
if (original.apply) {
return function() {
return original.apply(base, arguments);
};
} else {
// IE support
return jasmine.getGlobal()[name];
}
};
jasmine.setTimeout = jasmine.bindOriginal_(jasmine.getGlobal(), 'setTimeout');
jasmine.clearTimeout = jasmine.bindOriginal_(jasmine.getGlobal(), 'clearTimeout');
jasmine.setInterval = jasmine.bindOriginal_(jasmine.getGlobal(), 'setInterval');
jasmine.clearInterval = jasmine.bindOriginal_(jasmine.getGlobal(), 'clearInterval');
jasmine.MessageResult = function(values) {
this.type = 'log';
this.values = values;
this.trace = new Error(); // todo: test better
};
jasmine.MessageResult.prototype.toString = function() {
var text = "";
for (var i = 0; i < this.values.length; i++) {
if (i > 0) text += " ";
if (jasmine.isString_(this.values[i])) {
text += this.values[i];
} else {
text += jasmine.pp(this.values[i]);
}
}
return text;
};
jasmine.ExpectationResult = function(params) {
this.type = 'expect';
this.matcherName = params.matcherName;
this.passed_ = params.passed;
this.expected = params.expected;
this.actual = params.actual;
this.message = this.passed_ ? 'Passed.' : params.message;
var trace = (params.trace || new Error(this.message));
this.trace = this.passed_ ? '' : trace;
};
jasmine.ExpectationResult.prototype.toString = function () {
return this.message;
};
jasmine.ExpectationResult.prototype.passed = function () {
return this.passed_;
};
/**
* Getter for the Jasmine environment. Ensures one gets created
*/
jasmine.getEnv = function() {
var env = jasmine.currentEnv_ = jasmine.currentEnv_ || new jasmine.Env();
return env;
};
/**
* @ignore
* @private
* @param value
* @returns {Boolean}
*/
jasmine.isArray_ = function(value) {
return jasmine.isA_("Array", value);
};
/**
* @ignore
* @private
* @param value
* @returns {Boolean}
*/
jasmine.isString_ = function(value) {
return jasmine.isA_("String", value);
};
/**
* @ignore
* @private
* @param value
* @returns {Boolean}
*/
jasmine.isNumber_ = function(value) {
return jasmine.isA_("Number", value);
};
/**
* @ignore
* @private
* @param {String} typeName
* @param value
* @returns {Boolean}
*/
jasmine.isA_ = function(typeName, value) {
return Object.prototype.toString.apply(value) === '[object ' + typeName + ']';
};
/**
* Pretty printer for expecations. Takes any object and turns it into a human-readable string.
*
* @param value {Object} an object to be outputted
* @returns {String}
*/
jasmine.pp = function(value) {
var stringPrettyPrinter = new jasmine.StringPrettyPrinter();
stringPrettyPrinter.format(value);
return stringPrettyPrinter.string;
};
/**
* Returns true if the object is a DOM Node.
*
* @param {Object} obj object to check
* @returns {Boolean}
*/
jasmine.isDomNode = function(obj) {
return obj.nodeType > 0;
};
/**
* Returns a matchable 'generic' object of the class type. For use in expecations of type when values don't matter.
*
* @example
* // don't care about which function is passed in, as long as it's a function
* expect(mySpy).toHaveBeenCalledWith(jasmine.any(Function));
*
* @param {Class} clazz
* @returns matchable object of the type clazz
*/
jasmine.any = function(clazz) {
return new jasmine.Matchers.Any(clazz);
};
/**
* Returns a matchable subset of a JSON object. For use in expectations when you don't care about all of the
* attributes on the object.
*
* @example
* // don't care about any other attributes than foo.
* expect(mySpy).toHaveBeenCalledWith(jasmine.objectContaining({foo: "bar"});
*
* @param sample {Object} sample
* @returns matchable object for the sample
*/
jasmine.objectContaining = function (sample) {
return new jasmine.Matchers.ObjectContaining(sample);
};
/**
* Jasmine Spies are test doubles that can act as stubs, spies, fakes or when used in an expecation, mocks.
*
* Spies should be created in test setup, before expectations. They can then be checked, using the standard Jasmine
* expectation syntax. Spies can be checked if they were called or not and what the calling params were.
*
* A Spy has the following fields: wasCalled, callCount, mostRecentCall, and argsForCall (see docs).
*
* Spies are torn down at the end of every spec.
*
* Note: Do <b>not</b> call new jasmine.Spy() directly - a spy must be created using spyOn, jasmine.createSpy or jasmine.createSpyObj.
*
* @example
* // a stub
* var myStub = jasmine.createSpy('myStub'); // can be used anywhere
*
* // spy example
* var foo = {
* not: function(bool) { return !bool; }
* }
*
* // actual foo.not will not be called, execution stops
* spyOn(foo, 'not');
// foo.not spied upon, execution will continue to implementation
* spyOn(foo, 'not').andCallThrough();
*
* // fake example
* var foo = {
* not: function(bool) { return !bool; }
* }
*
* // foo.not(val) will return val
* spyOn(foo, 'not').andCallFake(function(value) {return value;});
*
* // mock example
* foo.not(7 == 7);
* expect(foo.not).toHaveBeenCalled();
* expect(foo.not).toHaveBeenCalledWith(true);
*
* @constructor
* @see spyOn, jasmine.createSpy, jasmine.createSpyObj
* @param {String} name
*/
jasmine.Spy = function(name) {
/**
* The name of the spy, if provided.
*/
this.identity = name || 'unknown';
/**
* Is this Object a spy?
*/
this.isSpy = true;
/**
* The actual function this spy stubs.
*/
this.plan = function() {
};
/**
* Tracking of the most recent call to the spy.
* @example
* var mySpy = jasmine.createSpy('foo');
* mySpy(1, 2);
* mySpy.mostRecentCall.args = [1, 2];
*/
this.mostRecentCall = {};
/**
* Holds arguments for each call to the spy, indexed by call count
* @example
* var mySpy = jasmine.createSpy('foo');
* mySpy(1, 2);
* mySpy(7, 8);
* mySpy.mostRecentCall.args = [7, 8];
* mySpy.argsForCall[0] = [1, 2];
* mySpy.argsForCall[1] = [7, 8];
*/
this.argsForCall = [];
this.calls = [];
};
/**
* Tells a spy to call through to the actual implemenatation.
*
* @example
* var foo = {
* bar: function() { // do some stuff }
* }
*
* // defining a spy on an existing property: foo.bar
* spyOn(foo, 'bar').andCallThrough();
*/
jasmine.Spy.prototype.andCallThrough = function() {
this.plan = this.originalValue;
return this;
};
/**
* For setting the return value of a spy.
*
* @example
* // defining a spy from scratch: foo() returns 'baz'
* var foo = jasmine.createSpy('spy on foo').andReturn('baz');
*
* // defining a spy on an existing property: foo.bar() returns 'baz'
* spyOn(foo, 'bar').andReturn('baz');
*
* @param {Object} value
*/
jasmine.Spy.prototype.andReturn = function(value) {
this.plan = function() {
return value;
};
return this;
};
/**
* For throwing an exception when a spy is called.
*
* @example
* // defining a spy from scratch: foo() throws an exception w/ message 'ouch'
* var foo = jasmine.createSpy('spy on foo').andThrow('baz');
*
* // defining a spy on an existing property: foo.bar() throws an exception w/ message 'ouch'
* spyOn(foo, 'bar').andThrow('baz');
*
* @param {String} exceptionMsg
*/
jasmine.Spy.prototype.andThrow = function(exceptionMsg) {
this.plan = function() {
throw exceptionMsg;
};
return this;
};
/**
* Calls an alternate implementation when a spy is called.
*
* @example
* var baz = function() {
* // do some stuff, return something
* }
* // defining a spy from scratch: foo() calls the function baz
* var foo = jasmine.createSpy('spy on foo').andCall(baz);
*
* // defining a spy on an existing property: foo.bar() calls an anonymnous function
* spyOn(foo, 'bar').andCall(function() { return 'baz';} );
*
* @param {Function} fakeFunc
*/
jasmine.Spy.prototype.andCallFake = function(fakeFunc) {
this.plan = fakeFunc;
return this;
};
/**
* Resets all of a spy's the tracking variables so that it can be used again.
*
* @example
* spyOn(foo, 'bar');
*
* foo.bar();
*
* expect(foo.bar.callCount).toEqual(1);
*
* foo.bar.reset();
*
* expect(foo.bar.callCount).toEqual(0);
*/
jasmine.Spy.prototype.reset = function() {
this.wasCalled = false;
this.callCount = 0;
this.argsForCall = [];
this.calls = [];
this.mostRecentCall = {};
};
jasmine.createSpy = function(name) {
var spyObj = function() {
spyObj.wasCalled = true;
spyObj.callCount++;
var args = jasmine.util.argsToArray(arguments);
spyObj.mostRecentCall.object = this;
spyObj.mostRecentCall.args = args;
spyObj.argsForCall.push(args);
spyObj.calls.push({object: this, args: args});
return spyObj.plan.apply(this, arguments);
};
var spy = new jasmine.Spy(name);
for (var prop in spy) {
spyObj[prop] = spy[prop];
}
spyObj.reset();
return spyObj;
};
/**
* Determines whether an object is a spy.
*
* @param {jasmine.Spy|Object} putativeSpy
* @returns {Boolean}
*/
jasmine.isSpy = function(putativeSpy) {
return putativeSpy && putativeSpy.isSpy;
};
/**
* Creates a more complicated spy: an Object that has every property a function that is a spy. Used for stubbing something
* large in one call.
*
* @param {String} baseName name of spy class
* @param {Array} methodNames array of names of methods to make spies
*/
jasmine.createSpyObj = function(baseName, methodNames) {
if (!jasmine.isArray_(methodNames) || methodNames.length === 0) {
throw new Error('createSpyObj requires a non-empty array of method names to create spies for');
}
var obj = {};
for (var i = 0; i < methodNames.length; i++) {
obj[methodNames[i]] = jasmine.createSpy(baseName + '.' + methodNames[i]);
}
return obj;
};
/**
* All parameters are pretty-printed and concatenated together, then written to the current spec's output.
*
* Be careful not to leave calls to <code>jasmine.log</code> in production code.
*/
jasmine.log = function() {
var spec = jasmine.getEnv().currentSpec;
spec.log.apply(spec, arguments);
};
/**
* Function that installs a spy on an existing object's method name. Used within a Spec to create a spy.
*
* @example
* // spy example
* var foo = {
* not: function(bool) { return !bool; }
* }
* spyOn(foo, 'not'); // actual foo.not will not be called, execution stops
*
* @see jasmine.createSpy
* @param obj
* @param methodName
* @return {jasmine.Spy} a Jasmine spy that can be chained with all spy methods
*/
var spyOn = function(obj, methodName) {
return jasmine.getEnv().currentSpec.spyOn(obj, methodName);
};
if (isCommonJS) exports.spyOn = spyOn;
/**
* Creates a Jasmine spec that will be added to the current suite.
*
* // TODO: pending tests
*
* @example
* it('should be true', function() {
* expect(true).toEqual(true);
* });
*
* @param {String} desc description of this specification
* @param {Function} func defines the preconditions and expectations of the spec
*/
var it = function(desc, func) {
return jasmine.getEnv().it(desc, func);
};
if (isCommonJS) exports.it = it;
/**
* Creates a <em>disabled</em> Jasmine spec.
*
* A convenience method that allows existing specs to be disabled temporarily during development.
*
* @param {String} desc description of this specification
* @param {Function} func defines the preconditions and expectations of the spec
*/
var xit = function(desc, func) {
return jasmine.getEnv().xit(desc, func);
};
if (isCommonJS) exports.xit = xit;
/**
* Starts a chain for a Jasmine expectation.
*
* It is passed an Object that is the actual value and should chain to one of the many
* jasmine.Matchers functions.
*
* @param {Object} actual Actual value to test against and expected value
* @return {jasmine.Matchers}
*/
var expect = function(actual) {
return jasmine.getEnv().currentSpec.expect(actual);
};
if (isCommonJS) exports.expect = expect;
/**
* Defines part of a jasmine spec. Used in cominbination with waits or waitsFor in asynchrnous specs.
*
* @param {Function} func Function that defines part of a jasmine spec.
*/
var runs = function(func) {
jasmine.getEnv().currentSpec.runs(func);
};
if (isCommonJS) exports.runs = runs;
/**
* Waits a fixed time period before moving to the next block.
*
* @deprecated Use waitsFor() instead
* @param {Number} timeout milliseconds to wait
*/
var waits = function(timeout) {
jasmine.getEnv().currentSpec.waits(timeout);
};
if (isCommonJS) exports.waits = waits;
/**
* Waits for the latchFunction to return true before proceeding to the next block.
*
* @param {Function} latchFunction
* @param {String} optional_timeoutMessage
* @param {Number} optional_timeout
*/
var waitsFor = function(latchFunction, optional_timeoutMessage, optional_timeout) {
jasmine.getEnv().currentSpec.waitsFor.apply(jasmine.getEnv().currentSpec, arguments);
};
if (isCommonJS) exports.waitsFor = waitsFor;
/**
* A function that is called before each spec in a suite.
*
* Used for spec setup, including validating assumptions.
*
* @param {Function} beforeEachFunction
*/
var beforeEach = function(beforeEachFunction) {
jasmine.getEnv().beforeEach(beforeEachFunction);
};
if (isCommonJS) exports.beforeEach = beforeEach;
/**
* A function that is called after each spec in a suite.
*
* Used for restoring any state that is hijacked during spec execution.
*
* @param {Function} afterEachFunction
*/
var afterEach = function(afterEachFunction) {
jasmine.getEnv().afterEach(afterEachFunction);
};
if (isCommonJS) exports.afterEach = afterEach;
/**
* Defines a suite of specifications.
*
* Stores the description and all defined specs in the Jasmine environment as one suite of specs. Variables declared
* are accessible by calls to beforeEach, it, and afterEach. Describe blocks can be nested, allowing for specialization
* of setup in some tests.
*
* @example
* // TODO: a simple suite
*
* // TODO: a simple suite with a nested describe block
*
* @param {String} description A string, usually the class under test.
* @param {Function} specDefinitions function that defines several specs.
*/
var describe = function(description, specDefinitions) {
return jasmine.getEnv().describe(description, specDefinitions);
};
if (isCommonJS) exports.describe = describe;
/**
* Disables a suite of specifications. Used to disable some suites in a file, or files, temporarily during development.
*
* @param {String} description A string, usually the class under test.
* @param {Function} specDefinitions function that defines several specs.
*/
var xdescribe = function(description, specDefinitions) {
return jasmine.getEnv().xdescribe(description, specDefinitions);
};
if (isCommonJS) exports.xdescribe = xdescribe;
// Provide the XMLHttpRequest class for IE 5.x-6.x:
jasmine.XmlHttpRequest = (typeof XMLHttpRequest == "undefined") ? function() {
function tryIt(f) {
try {
return f();
} catch(e) {
}
return null;
}
var xhr = tryIt(function() {
return new ActiveXObject("Msxml2.XMLHTTP.6.0");
}) ||
tryIt(function() {
return new ActiveXObject("Msxml2.XMLHTTP.3.0");
}) ||
tryIt(function() {
return new ActiveXObject("Msxml2.XMLHTTP");
}) ||
tryIt(function() {
return new ActiveXObject("Microsoft.XMLHTTP");
});
if (!xhr) throw new Error("This browser does not support XMLHttpRequest.");
return xhr;
} : XMLHttpRequest;
/**
* @namespace
*/
jasmine.util = {};
/**
* Declare that a child class inherit it's prototype from the parent class.
*
* @private
* @param {Function} childClass
* @param {Function} parentClass
*/
jasmine.util.inherit = function(childClass, parentClass) {
/**
* @private
*/
var subclass = function() {
};
subclass.prototype = parentClass.prototype;
childClass.prototype = new subclass();
};
jasmine.util.formatException = function(e) {
var lineNumber;
if (e.line) {
lineNumber = e.line;
}
else if (e.lineNumber) {
lineNumber = e.lineNumber;
}
var file;
if (e.sourceURL) {
file = e.sourceURL;
}
else if (e.fileName) {
file = e.fileName;
}
var message = (e.name && e.message) ? (e.name + ': ' + e.message) : e.toString();
if (file && lineNumber) {
message += ' in ' + file + ' (line ' + lineNumber + ')';
}
return message;
};
jasmine.util.htmlEscape = function(str) {
if (!str) return str;
return str.replace(/&/g, '&')
.replace(/</g, '<')
.replace(/>/g, '>');
};
jasmine.util.argsToArray = function(args) {
var arrayOfArgs = [];
for (var i = 0; i < args.length; i++) arrayOfArgs.push(args[i]);
return arrayOfArgs;
};
jasmine.util.extend = function(destination, source) {
for (var property in source) destination[property] = source[property];
return destination;
};
/**
* Environment for Jasmine
*
* @constructor
*/
jasmine.Env = function() {
this.currentSpec = null;
this.currentSuite = null;
this.currentRunner_ = new jasmine.Runner(this);
this.reporter = new jasmine.MultiReporter();
this.updateInterval = jasmine.DEFAULT_UPDATE_INTERVAL;
this.defaultTimeoutInterval = jasmine.DEFAULT_TIMEOUT_INTERVAL;
this.lastUpdate = 0;
this.specFilter = function() {
return true;
};
this.nextSpecId_ = 0;
this.nextSuiteId_ = 0;
this.equalityTesters_ = [];
// wrap matchers
this.matchersClass = function() {
jasmine.Matchers.apply(this, arguments);
};
jasmine.util.inherit(this.matchersClass, jasmine.Matchers);
jasmine.Matchers.wrapInto_(jasmine.Matchers.prototype, this.matchersClass);
};
jasmine.Env.prototype.setTimeout = jasmine.setTimeout;
jasmine.Env.prototype.clearTimeout = jasmine.clearTimeout;
jasmine.Env.prototype.setInterval = jasmine.setInterval;
jasmine.Env.prototype.clearInterval = jasmine.clearInterval;
/**
* @returns an object containing jasmine version build info, if set.
*/
jasmine.Env.prototype.version = function () {
if (jasmine.version_) {
return jasmine.version_;
} else {
throw new Error('Version not set');
}
};
/**
* @returns string containing jasmine version build info, if set.
*/
jasmine.Env.prototype.versionString = function() {
if (!jasmine.version_) {
return "version unknown";
}
var version = this.version();
var versionString = version.major + "." + version.minor + "." + version.build;
if (version.release_candidate) {
versionString += ".rc" + version.release_candidate;
}
versionString += " revision " + version.revision;
return versionString;
};
/**
* @returns a sequential integer starting at 0
*/
jasmine.Env.prototype.nextSpecId = function () {
return this.nextSpecId_++;
};
/**
* @returns a sequential integer starting at 0
*/
jasmine.Env.prototype.nextSuiteId = function () {
return this.nextSuiteId_++;
};
/**
* Register a reporter to receive status updates from Jasmine.
* @param {jasmine.Reporter} reporter An object which will receive status updates.
*/
jasmine.Env.prototype.addReporter = function(reporter) {
this.reporter.addReporter(reporter);
};
jasmine.Env.prototype.execute = function() {
this.currentRunner_.execute();
};
jasmine.Env.prototype.describe = function(description, specDefinitions) {
var suite = new jasmine.Suite(this, description, specDefinitions, this.currentSuite);
var parentSuite = this.currentSuite;
if (parentSuite) {
parentSuite.add(suite);
} else {
this.currentRunner_.add(suite);
}
this.currentSuite = suite;
var declarationError = null;
try {
specDefinitions.call(suite);
} catch(e) {
declarationError = e;
}
if (declarationError) {
this.it("encountered a declaration exception", function() {
throw declarationError;
});
}
this.currentSuite = parentSuite;
return suite;
};
jasmine.Env.prototype.beforeEach = function(beforeEachFunction) {
if (this.currentSuite) {
this.currentSuite.beforeEach(beforeEachFunction);
} else {
this.currentRunner_.beforeEach(beforeEachFunction);
}
};
jasmine.Env.prototype.currentRunner = function () {
return this.currentRunner_;
};
jasmine.Env.prototype.afterEach = function(afterEachFunction) {
if (this.currentSuite) {
this.currentSuite.afterEach(afterEachFunction);
} else {
this.currentRunner_.afterEach(afterEachFunction);
}
};
jasmine.Env.prototype.xdescribe = function(desc, specDefinitions) {
return {
execute: function() {
}
};
};
jasmine.Env.prototype.it = function(description, func) {
var spec = new jasmine.Spec(this, this.currentSuite, description);
this.currentSuite.add(spec);
this.currentSpec = spec;
if (func) {
spec.runs(func);
}
return spec;
};
jasmine.Env.prototype.xit = function(desc, func) {
return {
id: this.nextSpecId(),
runs: function() {
}
};
};
jasmine.Env.prototype.compareRegExps_ = function(a, b, mismatchKeys, mismatchValues) {
if (a.source != b.source)
mismatchValues.push("expected pattern /" + b.source + "/ is not equal to the pattern /" + a.source + "/");
if (a.ignoreCase != b.ignoreCase)
mismatchValues.push("expected modifier i was" + (b.ignoreCase ? " " : " not ") + "set and does not equal the origin modifier");
if (a.global != b.global)
mismatchValues.push("expected modifier g was" + (b.global ? " " : " not ") + "set and does not equal the origin modifier");
if (a.multiline != b.multiline)
mismatchValues.push("expected modifier m was" + (b.multiline ? " " : " not ") + "set and does not equal the origin modifier");
if (a.sticky != b.sticky)
mismatchValues.push("expected modifier y was" + (b.sticky ? " " : " not ") + "set and does not equal the origin modifier");
return (mismatchValues.length === 0);
};
jasmine.Env.prototype.compareObjects_ = function(a, b, mismatchKeys, mismatchValues) {
if (a.__Jasmine_been_here_before__ === b && b.__Jasmine_been_here_before__ === a) {
return true;
}
a.__Jasmine_been_here_before__ = b;
b.__Jasmine_been_here_before__ = a;
var hasKey = function(obj, keyName) {
return obj !== null && obj[keyName] !== jasmine.undefined;
};
for (var property in b) {
if (!hasKey(a, property) && hasKey(b, property)) {
mismatchKeys.push("expected has key '" + property + "', but missing from actual.");
}
}
for (property in a) {
if (!hasKey(b, property) && hasKey(a, property)) {
mismatchKeys.push("expected missing key '" + property + "', but present in actual.");
}
}
for (property in b) {
if (property == '__Jasmine_been_here_before__') continue;
if (!this.equals_(a[property], b[property], mismatchKeys, mismatchValues)) {
mismatchValues.push("'" + property + "' was '" + (b[property] ? jasmine.util.htmlEscape(b[property].toString()) : b[property]) + "' in expected, but was '" + (a[property] ? jasmine.util.htmlEscape(a[property].toString()) : a[property]) + "' in actual.");
}
}
if (jasmine.isArray_(a) && jasmine.isArray_(b) && a.length != b.length) {
mismatchValues.push("arrays were not the same length");
}
delete a.__Jasmine_been_here_before__;
delete b.__Jasmine_been_here_before__;
return (mismatchKeys.length === 0 && mismatchValues.length === 0);
};
jasmine.Env.prototype.equals_ = function(a, b, mismatchKeys, mismatchValues) {
mismatchKeys = mismatchKeys || [];
mismatchValues = mismatchValues || [];
for (var i = 0; i < this.equalityTesters_.length; i++) {
var equalityTester = this.equalityTesters_[i];
var result = equalityTester(a, b, this, mismatchKeys, mismatchValues);
if (result !== jasmine.undefined) return result;
}
if (a === b) return true;
if (a === jasmine.undefined || a === null || b === jasmine.undefined || b === null) {
return (a == jasmine.undefined && b == jasmine.undefined);
}
if (jasmine.isDomNode(a) && jasmine.isDomNode(b)) {
return a === b;
}
if (a instanceof Date && b instanceof Date) {
return a.getTime() == b.getTime();
}
if (a.jasmineMatches) {
return a.jasmineMatches(b);
}
if (b.jasmineMatches) {
return b.jasmineMatches(a);
}
if (a instanceof jasmine.Matchers.ObjectContaining) {
return a.matches(b);
}
if (b instanceof jasmine.Matchers.ObjectContaining) {
return b.matches(a);
}
if (jasmine.isString_(a) && jasmine.isString_(b)) {
return (a == b);
}
if (jasmine.isNumber_(a) && jasmine.isNumber_(b)) {
return (a == b);
}
if (a instanceof RegExp && b instanceof RegExp) {
return this.compareRegExps_(a, b, mismatchKeys, mismatchValues);
}
if (typeof a === "object" && typeof b === "object") {
return this.compareObjects_(a, b, mismatchKeys, mismatchValues);
}
//Straight check
return (a === b);
};
jasmine.Env.prototype.contains_ = function(haystack, needle) {
if (jasmine.isArray_(haystack)) {
for (var i = 0; i < haystack.length; i++) {
if (this.equals_(haystack[i], needle)) return true;
}
return false;
}
return haystack.indexOf(needle) >= 0;
};
jasmine.Env.prototype.addEqualityTester = function(equalityTester) {
this.equalityTesters_.push(equalityTester);
};
/** No-op base class for Jasmine reporters.
*
* @constructor
*/
jasmine.Reporter = function() {
};
//noinspection JSUnusedLocalSymbols
jasmine.Reporter.prototype.reportRunnerStarting = function(runner) {
};
//noinspection JSUnusedLocalSymbols
jasmine.Reporter.prototype.reportRunnerResults = function(runner) {
};
//noinspection JSUnusedLocalSymbols
jasmine.Reporter.prototype.reportSuiteResults = function(suite) {
};
//noinspection JSUnusedLocalSymbols
jasmine.Reporter.prototype.reportSpecStarting = function(spec) {
};
//noinspection JSUnusedLocalSymbols
jasmine.Reporter.prototype.reportSpecResults = function(spec) {
};
//noinspection JSUnusedLocalSymbols
jasmine.Reporter.prototype.log = function(str) {
};
/**
* Blocks are functions with executable code that make up a spec.
*
* @constructor
* @param {jasmine.Env} env
* @param {Function} func
* @param {jasmine.Spec} spec
*/
jasmine.Block = function(env, func, spec) {
this.env = env;
this.func = func;
this.spec = spec;
};
jasmine.Block.prototype.execute = function(onComplete) {
if (!jasmine.CATCH_EXCEPTIONS) {
this.func.apply(this.spec);
}
else {
try {
this.func.apply(this.spec);
} catch (e) {
this.spec.fail(e);
}
}
onComplete();
};
/** JavaScript API reporter.
*
* @constructor
*/
jasmine.JsApiReporter = function() {
this.started = false;
this.finished = false;
this.suites_ = [];
this.results_ = {};
};
jasmine.JsApiReporter.prototype.reportRunnerStarting = function(runner) {
this.started = true;
var suites = runner.topLevelSuites();
for (var i = 0; i < suites.length; i++) {
var suite = suites[i];
this.suites_.push(this.summarize_(suite));
}
};
jasmine.JsApiReporter.prototype.suites = function() {
return this.suites_;
};
jasmine.JsApiReporter.prototype.summarize_ = function(suiteOrSpec) {
var isSuite = suiteOrSpec instanceof jasmine.Suite;
var summary = {
id: suiteOrSpec.id,
name: suiteOrSpec.description,
type: isSuite ? 'suite' : 'spec',
children: []
};
if (isSuite) {
var children = suiteOrSpec.children();
for (var i = 0; i < children.length; i++) {
summary.children.push(this.summarize_(children[i]));
}
}
return summary;
};
jasmine.JsApiReporter.prototype.results = function() {
return this.results_;
};
jasmine.JsApiReporter.prototype.resultsForSpec = function(specId) {
return this.results_[specId];
};
//noinspection JSUnusedLocalSymbols
jasmine.JsApiReporter.prototype.reportRunnerResults = function(runner) {
this.finished = true;
};
//noinspection JSUnusedLocalSymbols
jasmine.JsApiReporter.prototype.reportSuiteResults = function(suite) {
};
//noinspection JSUnusedLocalSymbols
jasmine.JsApiReporter.prototype.reportSpecResults = function(spec) {
this.results_[spec.id] = {
messages: spec.results().getItems(),
result: spec.results().failedCount > 0 ? "failed" : "passed"
};
};
//noinspection JSUnusedLocalSymbols
jasmine.JsApiReporter.prototype.log = function(str) {
};
jasmine.JsApiReporter.prototype.resultsForSpecs = function(specIds){
var results = {};
for (var i = 0; i < specIds.length; i++) {
var specId = specIds[i];
results[specId] = this.summarizeResult_(this.results_[specId]);
}
return results;
};
jasmine.JsApiReporter.prototype.summarizeResult_ = function(result){
var summaryMessages = [];
var messagesLength = result.messages.length;
for (var messageIndex = 0; messageIndex < messagesLength; messageIndex++) {
var resultMessage = result.messages[messageIndex];
summaryMessages.push({
text: resultMessage.type == 'log' ? resultMessage.toString() : jasmine.undefined,
passed: resultMessage.passed ? resultMessage.passed() : true,
type: resultMessage.type,
message: resultMessage.message,
trace: {
stack: resultMessage.passed && !resultMessage.passed() ? resultMessage.trace.stack : jasmine.undefined
}
});
}
return {
result : result.result,
messages : summaryMessages
};
};
/**
* @constructor
* @param {jasmine.Env} env
* @param actual
* @param {jasmine.Spec} spec
*/
jasmine.Matchers = function(env, actual, spec, opt_isNot) {
this.env = env;
this.actual = actual;
this.spec = spec;
this.isNot = opt_isNot || false;
this.reportWasCalled_ = false;
};
// todo: @deprecated as of Jasmine 0.11, remove soon [xw]
jasmine.Matchers.pp = function(str) {
throw new Error("jasmine.Matchers.pp() is no longer supported, please use jasmine.pp() instead!");
};
// todo: @deprecated Deprecated as of Jasmine 0.10. Rewrite your custom matchers to return true or false. [xw]
jasmine.Matchers.prototype.report = function(result, failing_message, details) {
throw new Error("As of jasmine 0.11, custom matchers must be implemented differently -- please see jasmine docs");
};
jasmine.Matchers.wrapInto_ = function(prototype, matchersClass) {
for (var methodName in prototype) {
if (methodName == 'report') continue;
var orig = prototype[methodName];
matchersClass.prototype[methodName] = jasmine.Matchers.matcherFn_(methodName, orig);
}
};
jasmine.Matchers.matcherFn_ = function(matcherName, matcherFunction) {
return function() {
var matcherArgs = jasmine.util.argsToArray(arguments);
var result = matcherFunction.apply(this, arguments);
if (this.isNot) {
result = !result;
}
if (this.reportWasCalled_) return result;
var message;
if (!result) {
if (this.message) {
message = this.message.apply(this, arguments);
if (jasmine.isArray_(message)) {
message = message[this.isNot ? 1 : 0];
}
} else {
var englishyPredicate = matcherName.replace(/[A-Z]/g, function(s) { return ' ' + s.toLowerCase(); });
message = "Expected " + jasmine.pp(this.actual) + (this.isNot ? " not " : " ") + englishyPredicate;
if (matcherArgs.length > 0) {
for (var i = 0; i < matcherArgs.length; i++) {
if (i > 0) message += ",";
message += " " + jasmine.pp(matcherArgs[i]);
}
}
message += ".";
}
}
var expectationResult = new jasmine.ExpectationResult({
matcherName: matcherName,
passed: result,
expected: matcherArgs.length > 1 ? matcherArgs : matcherArgs[0],
actual: this.actual,
message: message
});
this.spec.addMatcherResult(expectationResult);
return jasmine.undefined;
};
};
/**
* toBe: compares the actual to the expected using ===
* @param expected
*/
jasmine.Matchers.prototype.toBe = function(expected) {
return this.actual === expected;
};
/**
* toNotBe: compares the actual to the expected using !==
* @param expected
* @deprecated as of 1.0. Use not.toBe() instead.
*/
jasmine.Matchers.prototype.toNotBe = function(expected) {
return this.actual !== expected;
};
/**
* toEqual: compares the actual to the expected using common sense equality. Handles Objects, Arrays, etc.
*
* @param expected
*/
jasmine.Matchers.prototype.toEqual = function(expected) {
return this.env.equals_(this.actual, expected);
};
/**
* toNotEqual: compares the actual to the expected using the ! of jasmine.Matchers.toEqual
* @param expected
* @deprecated as of 1.0. Use not.toEqual() instead.
*/
jasmine.Matchers.prototype.toNotEqual = function(expected) {
return !this.env.equals_(this.actual, expected);
};
/**
* Matcher that compares the actual to the expected using a regular expression. Constructs a RegExp, so takes
* a pattern or a String.
*
* @param expected
*/
jasmine.Matchers.prototype.toMatch = function(expected) {
return new RegExp(expected).test(this.actual);
};
/**
* Matcher that compares the actual to the expected using the boolean inverse of jasmine.Matchers.toMatch
* @param expected
* @deprecated as of 1.0. Use not.toMatch() instead.
*/
jasmine.Matchers.prototype.toNotMatch = function(expected) {
return !(new RegExp(expected).test(this.actual));
};
/**
* Matcher that compares the actual to jasmine.undefined.
*/
jasmine.Matchers.prototype.toBeDefined = function() {
return (this.actual !== jasmine.undefined);
};
/**
* Matcher that compares the actual to jasmine.undefined.
*/
jasmine.Matchers.prototype.toBeUndefined = function() {
return (this.actual === jasmine.undefined);
};
/**
* Matcher that compares the actual to null.
*/
jasmine.Matchers.prototype.toBeNull = function() {
return (this.actual === null);
};
/**
* Matcher that compares the actual to NaN.
*/
jasmine.Matchers.prototype.toBeNaN = function() {
this.message = function() {
return [ "Expected " + jasmine.pp(this.actual) + " to be NaN." ];
};
return (this.actual !== this.actual);
};
/**
* Matcher that boolean not-nots the actual.
*/
jasmine.Matchers.prototype.toBeTruthy = function() {
return !!this.actual;
};
/**
* Matcher that boolean nots the actual.
*/
jasmine.Matchers.prototype.toBeFalsy = function() {
return !this.actual;
};
/**
* Matcher that checks to see if the actual, a Jasmine spy, was called.
*/
jasmine.Matchers.prototype.toHaveBeenCalled = function() {
if (arguments.length > 0) {
throw new Error('toHaveBeenCalled does not take arguments, use toHaveBeenCalledWith');
}
if (!jasmine.isSpy(this.actual)) {
throw new Error('Expected a spy, but got ' + jasmine.pp(this.actual) + '.');
}
this.message = function() {
return [
"Expected spy " + this.actual.identity + " to have been called.",
"Expected spy " + this.actual.identity + " not to have been called."
];
};
return this.actual.wasCalled;
};
/** @deprecated Use expect(xxx).toHaveBeenCalled() instead */
jasmine.Matchers.prototype.wasCalled = jasmine.Matchers.prototype.toHaveBeenCalled;
/**
* Matcher that checks to see if the actual, a Jasmine spy, was not called.
*
* @deprecated Use expect(xxx).not.toHaveBeenCalled() instead
*/
jasmine.Matchers.prototype.wasNotCalled = function() {
if (arguments.length > 0) {
throw new Error('wasNotCalled does not take arguments');
}
if (!jasmine.isSpy(this.actual)) {
throw new Error('Expected a spy, but got ' + jasmine.pp(this.actual) + '.');
}
this.message = function() {
return [
"Expected spy " + this.actual.identity + " to not have been called.",
"Expected spy " + this.actual.identity + " to have been called."
];
};
return !this.actual.wasCalled;
};
/**
* Matcher that checks to see if the actual, a Jasmine spy, was called with a set of parameters.
*
* @example
*
*/
jasmine.Matchers.prototype.toHaveBeenCalledWith = function() {
var expectedArgs = jasmine.util.argsToArray(arguments);
if (!jasmine.isSpy(this.actual)) {
throw new Error('Expected a spy, but got ' + jasmine.pp(this.actual) + '.');
}
this.message = function() {
var invertedMessage = "Expected spy " + this.actual.identity + " not to have been called with " + jasmine.pp(expectedArgs) + " but it was.";
var positiveMessage = "";
if (this.actual.callCount === 0) {
positiveMessage = "Expected spy " + this.actual.identity + " to have been called with " + jasmine.pp(expectedArgs) + " but it was never called.";
} else {
positiveMessage = "Expected spy " + this.actual.identity + " to have been called with " + jasmine.pp(expectedArgs) + " but actual calls were " + jasmine.pp(this.actual.argsForCall).replace(/^\[ | \]$/g, '')
}
return [positiveMessage, invertedMessage];
};
return this.env.contains_(this.actual.argsForCall, expectedArgs);
};
/** @deprecated Use expect(xxx).toHaveBeenCalledWith() instead */
jasmine.Matchers.prototype.wasCalledWith = jasmine.Matchers.prototype.toHaveBeenCalledWith;
/** @deprecated Use expect(xxx).not.toHaveBeenCalledWith() instead */
jasmine.Matchers.prototype.wasNotCalledWith = function() {
var expectedArgs = jasmine.util.argsToArray(arguments);
if (!jasmine.isSpy(this.actual)) {
throw new Error('Expected a spy, but got ' + jasmine.pp(this.actual) + '.');
}
this.message = function() {
return [
"Expected spy not to have been called with " + jasmine.pp(expectedArgs) + " but it was",
"Expected spy to have been called with " + jasmine.pp(expectedArgs) + " but it was"
];
};
return !this.env.contains_(this.actual.argsForCall, expectedArgs);
};
/**
* Matcher that checks that the expected item is an element in the actual Array.
*
* @param {Object} expected
*/
jasmine.Matchers.prototype.toContain = function(expected) {
return this.env.contains_(this.actual, expected);
};
/**
* Matcher that checks that the expected item is NOT an element in the actual Array.
*
* @param {Object} expected
* @deprecated as of 1.0. Use not.toContain() instead.
*/
jasmine.Matchers.prototype.toNotContain = function(expected) {
return !this.env.contains_(this.actual, expected);
};
jasmine.Matchers.prototype.toBeLessThan = function(expected) {
return this.actual < expected;
};
jasmine.Matchers.prototype.toBeGreaterThan = function(expected) {
return this.actual > expected;
};
/**
* Matcher that checks that the expected item is equal to the actual item
* up to a given level of decimal precision (default 2).
*
* @param {Number} expected
* @param {Number} precision, as number of decimal places
*/
jasmine.Matchers.prototype.toBeCloseTo = function(expected, precision) {
if (!(precision === 0)) {
precision = precision || 2;
}
return Math.abs(expected - this.actual) < (Math.pow(10, -precision) / 2);
};
/**
* Matcher that checks that the expected exception was thrown by the actual.
*
* @param {String} [expected]
*/
jasmine.Matchers.prototype.toThrow = function(expected) {
var result = false;
var exception;
if (typeof this.actual != 'function') {
throw new Error('Actual is not a function');
}
try {
this.actual();
} catch (e) {
exception = e;
}
if (exception) {
result = (expected === jasmine.undefined || this.env.equals_(exception.message || exception, expected.message || expected));
}
var not = this.isNot ? "not " : "";
this.message = function() {
if (exception && (expected === jasmine.undefined || !this.env.equals_(exception.message || exception, expected.message || expected))) {
return ["Expected function " + not + "to throw", expected ? expected.message || expected : "an exception", ", but it threw", exception.message || exception].join(' ');
} else {
return "Expected function to throw an exception.";
}
};
return result;
};
jasmine.Matchers.Any = function(expectedClass) {
this.expectedClass = expectedClass;
};
jasmine.Matchers.Any.prototype.jasmineMatches = function(other) {
if (this.expectedClass == String) {
return typeof other == 'string' || other instanceof String;
}
if (this.expectedClass == Number) {
return typeof other == 'number' || other instanceof Number;
}
if (this.expectedClass == Function) {
return typeof other == 'function' || other instanceof Function;
}
if (this.expectedClass == Object) {
return typeof other == 'object';
}
return other instanceof this.expectedClass;
};
jasmine.Matchers.Any.prototype.jasmineToString = function() {
return '<jasmine.any(' + this.expectedClass + ')>';
};
jasmine.Matchers.ObjectContaining = function (sample) {
this.sample = sample;
};
jasmine.Matchers.ObjectContaining.prototype.jasmineMatches = function(other, mismatchKeys, mismatchValues) {
mismatchKeys = mismatchKeys || [];
mismatchValues = mismatchValues || [];
var env = jasmine.getEnv();
var hasKey = function(obj, keyName) {
return obj != null && obj[keyName] !== jasmine.undefined;
};
for (var property in this.sample) {
if (!hasKey(other, property) && hasKey(this.sample, property)) {
mismatchKeys.push("expected has key '" + property + "', but missing from actual.");
}
else if (!env.equals_(this.sample[property], other[property], mismatchKeys, mismatchValues)) {
mismatchValues.push("'" + property + "' was '" + (other[property] ? jasmine.util.htmlEscape(other[property].toString()) : other[property]) + "' in expected, but was '" + (this.sample[property] ? jasmine.util.htmlEscape(this.sample[property].toString()) : this.sample[property]) + "' in actual.");
}
}
return (mismatchKeys.length === 0 && mismatchValues.length === 0);
};
jasmine.Matchers.ObjectContaining.prototype.jasmineToString = function () {
return "<jasmine.objectContaining(" + jasmine.pp(this.sample) + ")>";
};
// Mock setTimeout, clearTimeout
// Contributed by Pivotal Computer Systems, www.pivotalsf.com
jasmine.FakeTimer = function() {
this.reset();
var self = this;
self.setTimeout = function(funcToCall, millis) {
self.timeoutsMade++;
self.scheduleFunction(self.timeoutsMade, funcToCall, millis, false);
return self.timeoutsMade;
};
self.setInterval = function(funcToCall, millis) {
self.timeoutsMade++;
self.scheduleFunction(self.timeoutsMade, funcToCall, millis, true);
return self.timeoutsMade;
};
self.clearTimeout = function(timeoutKey) {
self.scheduledFunctions[timeoutKey] = jasmine.undefined;
};
self.clearInterval = function(timeoutKey) {
self.scheduledFunctions[timeoutKey] = jasmine.undefined;
};
};
jasmine.FakeTimer.prototype.reset = function() {
this.timeoutsMade = 0;
this.scheduledFunctions = {};
this.nowMillis = 0;
};
jasmine.FakeTimer.prototype.tick = function(millis) {
var oldMillis = this.nowMillis;
var newMillis = oldMillis + millis;
this.runFunctionsWithinRange(oldMillis, newMillis);
this.nowMillis = newMillis;
};
jasmine.FakeTimer.prototype.runFunctionsWithinRange = function(oldMillis, nowMillis) {
var scheduledFunc;
var funcsToRun = [];
for (var timeoutKey in this.scheduledFunctions) {
scheduledFunc = this.scheduledFunctions[timeoutKey];
if (scheduledFunc != jasmine.undefined &&
scheduledFunc.runAtMillis >= oldMillis &&
scheduledFunc.runAtMillis <= nowMillis) {
funcsToRun.push(scheduledFunc);
this.scheduledFunctions[timeoutKey] = jasmine.undefined;
}
}
if (funcsToRun.length > 0) {
funcsToRun.sort(function(a, b) {
return a.runAtMillis - b.runAtMillis;
});
for (var i = 0; i < funcsToRun.length; ++i) {
try {
var funcToRun = funcsToRun[i];
this.nowMillis = funcToRun.runAtMillis;
funcToRun.funcToCall();
if (funcToRun.recurring) {
this.scheduleFunction(funcToRun.timeoutKey,
funcToRun.funcToCall,
funcToRun.millis,
true);
}
} catch(e) {
}
}
this.runFunctionsWithinRange(oldMillis, nowMillis);
}
};
jasmine.FakeTimer.prototype.scheduleFunction = function(timeoutKey, funcToCall, millis, recurring) {
this.scheduledFunctions[timeoutKey] = {
runAtMillis: this.nowMillis + millis,
funcToCall: funcToCall,
recurring: recurring,
timeoutKey: timeoutKey,
millis: millis
};
};
/**
* @namespace
*/
jasmine.Clock = {
defaultFakeTimer: new jasmine.FakeTimer(),
reset: function() {
jasmine.Clock.assertInstalled();
jasmine.Clock.defaultFakeTimer.reset();
},
tick: function(millis) {
jasmine.Clock.assertInstalled();
jasmine.Clock.defaultFakeTimer.tick(millis);
},
runFunctionsWithinRange: function(oldMillis, nowMillis) {
jasmine.Clock.defaultFakeTimer.runFunctionsWithinRange(oldMillis, nowMillis);
},
scheduleFunction: function(timeoutKey, funcToCall, millis, recurring) {
jasmine.Clock.defaultFakeTimer.scheduleFunction(timeoutKey, funcToCall, millis, recurring);
},
useMock: function() {
if (!jasmine.Clock.isInstalled()) {
var spec = jasmine.getEnv().currentSpec;
spec.after(jasmine.Clock.uninstallMock);
jasmine.Clock.installMock();
}
},
installMock: function() {
jasmine.Clock.installed = jasmine.Clock.defaultFakeTimer;
},
uninstallMock: function() {
jasmine.Clock.assertInstalled();
jasmine.Clock.installed = jasmine.Clock.real;
},
real: {
setTimeout: jasmine.getGlobal().setTimeout,
clearTimeout: jasmine.getGlobal().clearTimeout,
setInterval: jasmine.getGlobal().setInterval,
clearInterval: jasmine.getGlobal().clearInterval
},
assertInstalled: function() {
if (!jasmine.Clock.isInstalled()) {
throw new Error("Mock clock is not installed, use jasmine.Clock.useMock()");
}
},
isInstalled: function() {
return jasmine.Clock.installed == jasmine.Clock.defaultFakeTimer;
},
installed: null
};
jasmine.Clock.installed = jasmine.Clock.real;
//else for IE support
jasmine.getGlobal().setTimeout = function(funcToCall, millis) {
if (jasmine.Clock.installed.setTimeout.apply) {
return jasmine.Clock.installed.setTimeout.apply(this, arguments);
} else {
return jasmine.Clock.installed.setTimeout(funcToCall, millis);
}
};
jasmine.getGlobal().setInterval = function(funcToCall, millis) {
if (jasmine.Clock.installed.setInterval.apply) {
return jasmine.Clock.installed.setInterval.apply(this, arguments);
} else {
return jasmine.Clock.installed.setInterval(funcToCall, millis);
}
};
jasmine.getGlobal().clearTimeout = function(timeoutKey) {
if (jasmine.Clock.installed.clearTimeout.apply) {
return jasmine.Clock.installed.clearTimeout.apply(this, arguments);
} else {
return jasmine.Clock.installed.clearTimeout(timeoutKey);
}
};
jasmine.getGlobal().clearInterval = function(timeoutKey) {
if (jasmine.Clock.installed.clearTimeout.apply) {
return jasmine.Clock.installed.clearInterval.apply(this, arguments);
} else {
return jasmine.Clock.installed.clearInterval(timeoutKey);
}
};
/**
* @constructor
*/
jasmine.MultiReporter = function() {
this.subReporters_ = [];
};
jasmine.util.inherit(jasmine.MultiReporter, jasmine.Reporter);
jasmine.MultiReporter.prototype.addReporter = function(reporter) {
this.subReporters_.push(reporter);
};
(function() {
var functionNames = [
"reportRunnerStarting",
"reportRunnerResults",
"reportSuiteResults",
"reportSpecStarting",
"reportSpecResults",
"log"
];
for (var i = 0; i < functionNames.length; i++) {
var functionName = functionNames[i];
jasmine.MultiReporter.prototype[functionName] = (function(functionName) {
return function() {
for (var j = 0; j < this.subReporters_.length; j++) {
var subReporter = this.subReporters_[j];
if (subReporter[functionName]) {
subReporter[functionName].apply(subReporter, arguments);
}
}
};
})(functionName);
}
})();
/**
* Holds results for a set of Jasmine spec. Allows for the results array to hold another jasmine.NestedResults
*
* @constructor
*/
jasmine.NestedResults = function() {
/**
* The total count of results
*/
this.totalCount = 0;
/**
* Number of passed results
*/
this.passedCount = 0;
/**
* Number of failed results
*/
this.failedCount = 0;
/**
* Was this suite/spec skipped?
*/
this.skipped = false;
/**
* @ignore
*/
this.items_ = [];
};
/**
* Roll up the result counts.
*
* @param result
*/
jasmine.NestedResults.prototype.rollupCounts = function(result) {
this.totalCount += result.totalCount;
this.passedCount += result.passedCount;
this.failedCount += result.failedCount;
};
/**
* Adds a log message.
* @param values Array of message parts which will be concatenated later.
*/
jasmine.NestedResults.prototype.log = function(values) {
this.items_.push(new jasmine.MessageResult(values));
};
/**
* Getter for the results: message & results.
*/
jasmine.NestedResults.prototype.getItems = function() {
return this.items_;
};
/**
* Adds a result, tracking counts (total, passed, & failed)
* @param {jasmine.ExpectationResult|jasmine.NestedResults} result
*/
jasmine.NestedResults.prototype.addResult = function(result) {
if (result.type != 'log') {
if (result.items_) {
this.rollupCounts(result);
} else {
this.totalCount++;
if (result.passed()) {
this.passedCount++;
} else {
this.failedCount++;
}
}
}
this.items_.push(result);
};
/**
* @returns {Boolean} True if <b>everything</b> below passed
*/
jasmine.NestedResults.prototype.passed = function() {
return this.passedCount === this.totalCount;
};
/**
* Base class for pretty printing for expectation results.
*/
jasmine.PrettyPrinter = function() {
this.ppNestLevel_ = 0;
};
/**
* Formats a value in a nice, human-readable string.
*
* @param value
*/
jasmine.PrettyPrinter.prototype.format = function(value) {
this.ppNestLevel_++;
try {
if (value === jasmine.undefined) {
this.emitScalar('undefined');
} else if (value === null) {
this.emitScalar('null');
} else if (value === jasmine.getGlobal()) {
this.emitScalar('<global>');
} else if (value.jasmineToString) {
this.emitScalar(value.jasmineToString());
} else if (typeof value === 'string') {
this.emitString(value);
} else if (jasmine.isSpy(value)) {
this.emitScalar("spy on " + value.identity);
} else if (value instanceof RegExp) {
this.emitScalar(value.toString());
} else if (typeof value === 'function') {
this.emitScalar('Function');
} else if (typeof value.nodeType === 'number') {
this.emitScalar('HTMLNode');
} else if (value instanceof Date) {
this.emitScalar('Date(' + value + ')');
} else if (value.__Jasmine_been_here_before__) {
this.emitScalar('<circular reference: ' + (jasmine.isArray_(value) ? 'Array' : 'Object') + '>');
} else if (jasmine.isArray_(value) || typeof value == 'object') {
value.__Jasmine_been_here_before__ = true;
if (jasmine.isArray_(value)) {
this.emitArray(value);
} else {
this.emitObject(value);
}
delete value.__Jasmine_been_here_before__;
} else {
this.emitScalar(value.toString());
}
} finally {
this.ppNestLevel_--;
}
};
jasmine.PrettyPrinter.prototype.iterateObject = function(obj, fn) {
for (var property in obj) {
if (!obj.hasOwnProperty(property)) continue;
if (property == '__Jasmine_been_here_before__') continue;
fn(property, obj.__lookupGetter__ ? (obj.__lookupGetter__(property) !== jasmine.undefined &&
obj.__lookupGetter__(property) !== null) : false);
}
};
jasmine.PrettyPrinter.prototype.emitArray = jasmine.unimplementedMethod_;
jasmine.PrettyPrinter.prototype.emitObject = jasmine.unimplementedMethod_;
jasmine.PrettyPrinter.prototype.emitScalar = jasmine.unimplementedMethod_;
jasmine.PrettyPrinter.prototype.emitString = jasmine.unimplementedMethod_;
jasmine.StringPrettyPrinter = function() {
jasmine.PrettyPrinter.call(this);
this.string = '';
};
jasmine.util.inherit(jasmine.StringPrettyPrinter, jasmine.PrettyPrinter);
jasmine.StringPrettyPrinter.prototype.emitScalar = function(value) {
this.append(value);
};
jasmine.StringPrettyPrinter.prototype.emitString = function(value) {
this.append("'" + value + "'");
};
jasmine.StringPrettyPrinter.prototype.emitArray = function(array) {
if (this.ppNestLevel_ > jasmine.MAX_PRETTY_PRINT_DEPTH) {
this.append("Array");
return;
}
this.append('[ ');
for (var i = 0; i < array.length; i++) {
if (i > 0) {
this.append(', ');
}
this.format(array[i]);
}
this.append(' ]');
};
jasmine.StringPrettyPrinter.prototype.emitObject = function(obj) {
if (this.ppNestLevel_ > jasmine.MAX_PRETTY_PRINT_DEPTH) {
this.append("Object");
return;
}
var self = this;
this.append('{ ');
var first = true;
this.iterateObject(obj, function(property, isGetter) {
if (first) {
first = false;
} else {
self.append(', ');
}
self.append(property);
self.append(' : ');
if (isGetter) {
self.append('<getter>');
} else {
self.format(obj[property]);
}
});
this.append(' }');
};
jasmine.StringPrettyPrinter.prototype.append = function(value) {
this.string += value;
};
jasmine.Queue = function(env) {
this.env = env;
// parallel to blocks. each true value in this array means the block will
// get executed even if we abort
this.ensured = [];
this.blocks = [];
this.running = false;
this.index = 0;
this.offset = 0;
this.abort = false;
};
jasmine.Queue.prototype.addBefore = function(block, ensure) {
if (ensure === jasmine.undefined) {
ensure = false;
}
this.blocks.unshift(block);
this.ensured.unshift(ensure);
};
jasmine.Queue.prototype.add = function(block, ensure) {
if (ensure === jasmine.undefined) {
ensure = false;
}
this.blocks.push(block);
this.ensured.push(ensure);
};
jasmine.Queue.prototype.insertNext = function(block, ensure) {
if (ensure === jasmine.undefined) {
ensure = false;
}
this.ensured.splice((this.index + this.offset + 1), 0, ensure);
this.blocks.splice((this.index + this.offset + 1), 0, block);
this.offset++;
};
jasmine.Queue.prototype.start = function(onComplete) {
this.running = true;
this.onComplete = onComplete;
this.next_();
};
jasmine.Queue.prototype.isRunning = function() {
return this.running;
};
jasmine.Queue.LOOP_DONT_RECURSE = true;
jasmine.Queue.prototype.next_ = function() {
var self = this;
var goAgain = true;
while (goAgain) {
goAgain = false;
if (self.index < self.blocks.length && !(this.abort && !this.ensured[self.index])) {
var calledSynchronously = true;
var completedSynchronously = false;
var onComplete = function () {
if (jasmine.Queue.LOOP_DONT_RECURSE && calledSynchronously) {
completedSynchronously = true;
return;
}
if (self.blocks[self.index].abort) {
self.abort = true;
}
self.offset = 0;
self.index++;
var now = new Date().getTime();
if (self.env.updateInterval && now - self.env.lastUpdate > self.env.updateInterval) {
self.env.lastUpdate = now;
self.env.setTimeout(function() {
self.next_();
}, 0);
} else {
if (jasmine.Queue.LOOP_DONT_RECURSE && completedSynchronously) {
goAgain = true;
} else {
self.next_();
}
}
};
self.blocks[self.index].execute(onComplete);
calledSynchronously = false;
if (completedSynchronously) {
onComplete();
}
} else {
self.running = false;
if (self.onComplete) {
self.onComplete();
}
}
}
};
jasmine.Queue.prototype.results = function() {
var results = new jasmine.NestedResults();
for (var i = 0; i < this.blocks.length; i++) {
if (this.blocks[i].results) {
results.addResult(this.blocks[i].results());
}
}
return results;
};
/**
* Runner
*
* @constructor
* @param {jasmine.Env} env
*/
jasmine.Runner = function(env) {
var self = this;
self.env = env;
self.queue = new jasmine.Queue(env);
self.before_ = [];
self.after_ = [];
self.suites_ = [];
};
jasmine.Runner.prototype.execute = function() {
var self = this;
if (self.env.reporter.reportRunnerStarting) {
self.env.reporter.reportRunnerStarting(this);
}
self.queue.start(function () {
self.finishCallback();
});
};
jasmine.Runner.prototype.beforeEach = function(beforeEachFunction) {
beforeEachFunction.typeName = 'beforeEach';
this.before_.splice(0,0,beforeEachFunction);
};
jasmine.Runner.prototype.afterEach = function(afterEachFunction) {
afterEachFunction.typeName = 'afterEach';
this.after_.splice(0,0,afterEachFunction);
};
jasmine.Runner.prototype.finishCallback = function() {
this.env.reporter.reportRunnerResults(this);
};
jasmine.Runner.prototype.addSuite = function(suite) {
this.suites_.push(suite);
};
jasmine.Runner.prototype.add = function(block) {
if (block instanceof jasmine.Suite) {
this.addSuite(block);
}
this.queue.add(block);
};
jasmine.Runner.prototype.specs = function () {
var suites = this.suites();
var specs = [];
for (var i = 0; i < suites.length; i++) {
specs = specs.concat(suites[i].specs());
}
return specs;
};
jasmine.Runner.prototype.suites = function() {
return this.suites_;
};
jasmine.Runner.prototype.topLevelSuites = function() {
var topLevelSuites = [];
for (var i = 0; i < this.suites_.length; i++) {
if (!this.suites_[i].parentSuite) {
topLevelSuites.push(this.suites_[i]);
}
}
return topLevelSuites;
};
jasmine.Runner.prototype.results = function() {
return this.queue.results();
};
/**
* Internal representation of a Jasmine specification, or test.
*
* @constructor
* @param {jasmine.Env} env
* @param {jasmine.Suite} suite
* @param {String} description
*/
jasmine.Spec = function(env, suite, description) {
if (!env) {
throw new Error('jasmine.Env() required');
}
if (!suite) {
throw new Error('jasmine.Suite() required');
}
var spec = this;
spec.id = env.nextSpecId ? env.nextSpecId() : null;
spec.env = env;
spec.suite = suite;
spec.description = description;
spec.queue = new jasmine.Queue(env);
spec.afterCallbacks = [];
spec.spies_ = [];
spec.results_ = new jasmine.NestedResults();
spec.results_.description = description;
spec.matchersClass = null;
};
jasmine.Spec.prototype.getFullName = function() {
return this.suite.getFullName() + ' ' + this.description + '.';
};
jasmine.Spec.prototype.results = function() {
return this.results_;
};
/**
* All parameters are pretty-printed and concatenated together, then written to the spec's output.
*
* Be careful not to leave calls to <code>jasmine.log</code> in production code.
*/
jasmine.Spec.prototype.log = function() {
return this.results_.log(arguments);
};
jasmine.Spec.prototype.runs = function (func) {
var block = new jasmine.Block(this.env, func, this);
this.addToQueue(block);
return this;
};
jasmine.Spec.prototype.addToQueue = function (block) {
if (this.queue.isRunning()) {
this.queue.insertNext(block);
} else {
this.queue.add(block);
}
};
/**
* @param {jasmine.ExpectationResult} result
*/
jasmine.Spec.prototype.addMatcherResult = function(result) {
this.results_.addResult(result);
};
jasmine.Spec.prototype.expect = function(actual) {
var positive = new (this.getMatchersClass_())(this.env, actual, this);
positive.not = new (this.getMatchersClass_())(this.env, actual, this, true);
return positive;
};
/**
* Waits a fixed time period before moving to the next block.
*
* @deprecated Use waitsFor() instead
* @param {Number} timeout milliseconds to wait
*/
jasmine.Spec.prototype.waits = function(timeout) {
var waitsFunc = new jasmine.WaitsBlock(this.env, timeout, this);
this.addToQueue(waitsFunc);
return this;
};
/**
* Waits for the latchFunction to return true before proceeding to the next block.
*
* @param {Function} latchFunction
* @param {String} optional_timeoutMessage
* @param {Number} optional_timeout
*/
jasmine.Spec.prototype.waitsFor = function(latchFunction, optional_timeoutMessage, optional_timeout) {
var latchFunction_ = null;
var optional_timeoutMessage_ = null;
var optional_timeout_ = null;
for (var i = 0; i < arguments.length; i++) {
var arg = arguments[i];
switch (typeof arg) {
case 'function':
latchFunction_ = arg;
break;
case 'string':
optional_timeoutMessage_ = arg;
break;
case 'number':
optional_timeout_ = arg;
break;
}
}
var waitsForFunc = new jasmine.WaitsForBlock(this.env, optional_timeout_, latchFunction_, optional_timeoutMessage_, this);
this.addToQueue(waitsForFunc);
return this;
};
jasmine.Spec.prototype.fail = function (e) {
var expectationResult = new jasmine.ExpectationResult({
passed: false,
message: e ? jasmine.util.formatException(e) : 'Exception',
trace: { stack: e.stack }
});
this.results_.addResult(expectationResult);
};
jasmine.Spec.prototype.getMatchersClass_ = function() {
return this.matchersClass || this.env.matchersClass;
};
jasmine.Spec.prototype.addMatchers = function(matchersPrototype) {
var parent = this.getMatchersClass_();
var newMatchersClass = function() {
parent.apply(this, arguments);
};
jasmine.util.inherit(newMatchersClass, parent);
jasmine.Matchers.wrapInto_(matchersPrototype, newMatchersClass);
this.matchersClass = newMatchersClass;
};
jasmine.Spec.prototype.finishCallback = function() {
this.env.reporter.reportSpecResults(this);
};
jasmine.Spec.prototype.finish = function(onComplete) {
this.removeAllSpies();
this.finishCallback();
if (onComplete) {
onComplete();
}
};
jasmine.Spec.prototype.after = function(doAfter) {
if (this.queue.isRunning()) {
this.queue.add(new jasmine.Block(this.env, doAfter, this), true);
} else {
this.afterCallbacks.unshift(doAfter);
}
};
jasmine.Spec.prototype.execute = function(onComplete) {
var spec = this;
if (!spec.env.specFilter(spec)) {
spec.results_.skipped = true;
spec.finish(onComplete);
return;
}
this.env.reporter.reportSpecStarting(this);
spec.env.currentSpec = spec;
spec.addBeforesAndAftersToQueue();
spec.queue.start(function () {
spec.finish(onComplete);
});
};
jasmine.Spec.prototype.addBeforesAndAftersToQueue = function() {
var runner = this.env.currentRunner();
var i;
for (var suite = this.suite; suite; suite = suite.parentSuite) {
for (i = 0; i < suite.before_.length; i++) {
this.queue.addBefore(new jasmine.Block(this.env, suite.before_[i], this));
}
}
for (i = 0; i < runner.before_.length; i++) {
this.queue.addBefore(new jasmine.Block(this.env, runner.before_[i], this));
}
for (i = 0; i < this.afterCallbacks.length; i++) {
this.queue.add(new jasmine.Block(this.env, this.afterCallbacks[i], this), true);
}
for (suite = this.suite; suite; suite = suite.parentSuite) {
for (i = 0; i < suite.after_.length; i++) {
this.queue.add(new jasmine.Block(this.env, suite.after_[i], this), true);
}
}
for (i = 0; i < runner.after_.length; i++) {
this.queue.add(new jasmine.Block(this.env, runner.after_[i], this), true);
}
};
jasmine.Spec.prototype.explodes = function() {
throw 'explodes function should not have been called';
};
jasmine.Spec.prototype.spyOn = function(obj, methodName, ignoreMethodDoesntExist) {
if (obj == jasmine.undefined) {
throw "spyOn could not find an object to spy upon for " + methodName + "()";
}
if (!ignoreMethodDoesntExist && obj[methodName] === jasmine.undefined) {
throw methodName + '() method does not exist';
}
if (!ignoreMethodDoesntExist && obj[methodName] && obj[methodName].isSpy) {
throw new Error(methodName + ' has already been spied upon');
}
var spyObj = jasmine.createSpy(methodName);
this.spies_.push(spyObj);
spyObj.baseObj = obj;
spyObj.methodName = methodName;
spyObj.originalValue = obj[methodName];
obj[methodName] = spyObj;
return spyObj;
};
jasmine.Spec.prototype.removeAllSpies = function() {
for (var i = 0; i < this.spies_.length; i++) {
var spy = this.spies_[i];
spy.baseObj[spy.methodName] = spy.originalValue;
}
this.spies_ = [];
};
/**
* Internal representation of a Jasmine suite.
*
* @constructor
* @param {jasmine.Env} env
* @param {String} description
* @param {Function} specDefinitions
* @param {jasmine.Suite} parentSuite
*/
jasmine.Suite = function(env, description, specDefinitions, parentSuite) {
var self = this;
self.id = env.nextSuiteId ? env.nextSuiteId() : null;
self.description = description;
self.queue = new jasmine.Queue(env);
self.parentSuite = parentSuite;
self.env = env;
self.before_ = [];
self.after_ = [];
self.children_ = [];
self.suites_ = [];
self.specs_ = [];
};
jasmine.Suite.prototype.getFullName = function() {
var fullName = this.description;
for (var parentSuite = this.parentSuite; parentSuite; parentSuite = parentSuite.parentSuite) {
fullName = parentSuite.description + ' ' + fullName;
}
return fullName;
};
jasmine.Suite.prototype.finish = function(onComplete) {
this.env.reporter.reportSuiteResults(this);
this.finished = true;
if (typeof(onComplete) == 'function') {
onComplete();
}
};
jasmine.Suite.prototype.beforeEach = function(beforeEachFunction) {
beforeEachFunction.typeName = 'beforeEach';
this.before_.unshift(beforeEachFunction);
};
jasmine.Suite.prototype.afterEach = function(afterEachFunction) {
afterEachFunction.typeName = 'afterEach';
this.after_.unshift(afterEachFunction);
};
jasmine.Suite.prototype.results = function() {
return this.queue.results();
};
jasmine.Suite.prototype.add = function(suiteOrSpec) {
this.children_.push(suiteOrSpec);
if (suiteOrSpec instanceof jasmine.Suite) {
this.suites_.push(suiteOrSpec);
this.env.currentRunner().addSuite(suiteOrSpec);
} else {
this.specs_.push(suiteOrSpec);
}
this.queue.add(suiteOrSpec);
};
jasmine.Suite.prototype.specs = function() {
return this.specs_;
};
jasmine.Suite.prototype.suites = function() {
return this.suites_;
};
jasmine.Suite.prototype.children = function() {
return this.children_;
};
jasmine.Suite.prototype.execute = function(onComplete) {
var self = this;
this.queue.start(function () {
self.finish(onComplete);
});
};
jasmine.WaitsBlock = function(env, timeout, spec) {
this.timeout = timeout;
jasmine.Block.call(this, env, null, spec);
};
jasmine.util.inherit(jasmine.WaitsBlock, jasmine.Block);
jasmine.WaitsBlock.prototype.execute = function (onComplete) {
if (jasmine.VERBOSE) {
this.env.reporter.log('>> Jasmine waiting for ' + this.timeout + ' ms...');
}
this.env.setTimeout(function () {
onComplete();
}, this.timeout);
};
/**
* A block which waits for some condition to become true, with timeout.
*
* @constructor
* @extends jasmine.Block
* @param {jasmine.Env} env The Jasmine environment.
* @param {Number} timeout The maximum time in milliseconds to wait for the condition to become true.
* @param {Function} latchFunction A function which returns true when the desired condition has been met.
* @param {String} message The message to display if the desired condition hasn't been met within the given time period.
* @param {jasmine.Spec} spec The Jasmine spec.
*/
jasmine.WaitsForBlock = function(env, timeout, latchFunction, message, spec) {
this.timeout = timeout || env.defaultTimeoutInterval;
this.latchFunction = latchFunction;
this.message = message;
this.totalTimeSpentWaitingForLatch = 0;
jasmine.Block.call(this, env, null, spec);
};
jasmine.util.inherit(jasmine.WaitsForBlock, jasmine.Block);
jasmine.WaitsForBlock.TIMEOUT_INCREMENT = 10;
jasmine.WaitsForBlock.prototype.execute = function(onComplete) {
if (jasmine.VERBOSE) {
this.env.reporter.log('>> Jasmine waiting for ' + (this.message || 'something to happen'));
}
var latchFunctionResult;
try {
latchFunctionResult = this.latchFunction.apply(this.spec);
} catch (e) {
this.spec.fail(e);
onComplete();
return;
}
if (latchFunctionResult) {
onComplete();
} else if (this.totalTimeSpentWaitingForLatch >= this.timeout) {
var message = 'timed out after ' + this.timeout + ' msec waiting for ' + (this.message || 'something to happen');
this.spec.fail({
name: 'timeout',
message: message
});
this.abort = true;
onComplete();
} else {
this.totalTimeSpentWaitingForLatch += jasmine.WaitsForBlock.TIMEOUT_INCREMENT;
var self = this;
this.env.setTimeout(function() {
self.execute(onComplete);
}, jasmine.WaitsForBlock.TIMEOUT_INCREMENT);
}
};
jasmine.version_= {
"major": 1,
"minor": 3,
"build": 1,
"revision": 1354556913
}; | PypiClean |
/GoDaddyPy-2.3.4.tar.gz/GoDaddyPy-2.3.4/README.rst | |downloads| |travis| |climate|
.. |downloads| image:: https://img.shields.io/pypi/dm/godaddypy.svg
:target: https://pypi.python.org/pypi/godaddypy
.. |travis| image:: https://travis-ci.org/eXamadeus/godaddypy.svg?branch=master
:target: https://travis-ci.org/eXamadeus/godaddypy
.. |climate| image:: https://codeclimate.com/github/eXamadeus/godaddypy/badges/gpa.svg
:target: https://codeclimate.com/github/eXamadeus/godaddypy
GoDaddyPy
==========
Python library useful for updating DNS settings through the GoDaddy v1 API.
Source located @ https://github.com/eXamadeus/godaddypy
Migrated from @ https://github.com/eXamadeus-zz/godaddypy
This concept was spawned from observerss' pygodaddy @ https://github.com/observerss/pygodaddy.
Setup
--------
First, go to https://developer.godaddy.com/keys/ and request a production API key and secret.
*Note: Sometimes the production API keys don't seem to work correctly. Just delete it and request another one.*
Second, install GoDaddyPy with pip.
.. code-block:: bash
$ pip install godaddypy
..
Examples
--------
.. code-block:: python
>>> from godaddypy import Client, Account
>>>
>>> my_acct = Account(api_key='PUBLIC_KEY', api_secret='SECRET_KEY')
>>> delegate_acct = Account(api_key='PUBLIC_KEY', api_secret='SECRET_KEY', delegate='DELEGATE_ID')
>>> client = Client(my_acct)
>>> delegate_client = Client(delegate_acct)
>>>
>>> client.get_domains()
['domain1.example', 'domain2.example']
>>>
>>> client.get_records('domain1.example', record_type='A')
[{'name': 'dynamic', 'ttl': 3600, 'data': '1.1.1.1', 'type': 'A'}]
>>>
>>> client.update_ip('2.2.2.2', domains=['domain1.example'])
True
>>>
>>> client.get_records('domain1.example')
[{'name': 'dynamic', 'ttl': 3600, 'data': '2.2.2.2', 'type': 'A'}, {'name': 'dynamic', 'ttl': 3600, 'data': '::1',
'type': 'AAAA'},]
>>>
>>> client.get_records('apple.com', record_type='A', name='@')
[{u'data': u'1.2.3.4', u'type': u'A', u'name': u'@', u'ttl': 3600}]
>>>
>>> client.update_record_ip('3.3.3.3', 'domain1.example', 'dynamic', 'A')
True
>>>
>>> client.add_record('apple.com', {'data':'1.2.3.4','name':'test','ttl':3600, 'type':'A'})
True
>>>
>>> client.delete_records('apple.com', name='test')
True
..
| PypiClean |
/INGInious-0.8.7.tar.gz/INGInious-0.8.7/inginious/common/babel.py | from inginious.common import custom_yaml
from inginious.common.tasks_problems import CodeProblem, CodeSingleLineProblem, MultipleChoiceProblem, MatchProblem, FileProblem
def import_class(name):
m = name.split('.')
mod = __import__(m[0])
for comp in m[1:]:
mod = getattr(mod, comp)
return mod
def get_strings(content, fields):
# If fields is an empty list or dict, take all the elements
if isinstance(fields, dict) and not len(fields):
for key, val in content.items():
yield val, key
elif isinstance(fields, list) and not len(fields):
for val in content:
yield val, ""
for key, val in fields.items():
if isinstance(val, dict):
yield from get_strings(content.get(key, {}), val)
elif isinstance(val, list):
for elem in content.get(key, []):
yield from get_strings(elem, val[0])
else:
result = content.get(key, "")
if result:
yield result, key
def extract_yaml(fileobj, keywords, comment_tags, options):
task_problem_types = {"code": CodeProblem, "code_single_line": CodeSingleLineProblem,
"file": FileProblem, "multiple_choice": MultipleChoiceProblem,
"match": MatchProblem}
problems = options["problems"].split() if "problems" in options else []
for problem in problems:
problem_class = import_class(problem)
task_problem_types[problem_class.get_type()] = problem_class
source = fileobj.read().decode(options.get('encoding', 'utf-8'))
content = custom_yaml.load(source)
if "task.yaml" in fileobj.name:
keys = ["author", "context", "name"]
for key in keys:
yield 0, "", content.get(key, ""), [key]
for problem_id, problem_content in content.get("problems").items():
fields = task_problem_types.get(problem_content.get('type', "")).get_text_fields()
for string, strkey in get_strings(content.get("problems").get(problem_id), fields):
yield 0, "", string, [key + ", " + problem_id + ", " + strkey]
elif "course.yaml" in fileobj.name:
yield 0, "", content.get("name", ""), ["name"] | PypiClean |
/LabtoolSuite-0.1.3.tar.gz/LabtoolSuite-0.1.3/Labtools/docs/docs/_build/html/_static/sidebar.js | $(function() {
// global elements used by the functions.
// the 'sidebarbutton' element is defined as global after its
// creation, in the add_sidebar_button function
var bodywrapper = $('.bodywrapper');
var sidebar = $('.sphinxsidebar');
var sidebarwrapper = $('.sphinxsidebarwrapper');
// for some reason, the document has no sidebar; do not run into errors
if (!sidebar.length) return;
// original margin-left of the bodywrapper and width of the sidebar
// with the sidebar expanded
var bw_margin_expanded = bodywrapper.css('margin-left');
var ssb_width_expanded = sidebar.width();
// margin-left of the bodywrapper and width of the sidebar
// with the sidebar collapsed
var bw_margin_collapsed = '.8em';
var ssb_width_collapsed = '.8em';
// colors used by the current theme
var dark_color = $('.related').css('background-color');
var light_color = $('.document').css('background-color');
function sidebar_is_collapsed() {
return sidebarwrapper.is(':not(:visible)');
}
function toggle_sidebar() {
if (sidebar_is_collapsed())
expand_sidebar();
else
collapse_sidebar();
}
function collapse_sidebar() {
sidebarwrapper.hide();
sidebar.css('width', ssb_width_collapsed);
bodywrapper.css('margin-left', bw_margin_collapsed);
sidebarbutton.css({
'margin-left': '0',
'height': bodywrapper.height()
});
sidebarbutton.find('span').text('»');
sidebarbutton.attr('title', _('Expand sidebar'));
document.cookie = 'sidebar=collapsed';
}
function expand_sidebar() {
bodywrapper.css('margin-left', bw_margin_expanded);
sidebar.css('width', ssb_width_expanded);
sidebarwrapper.show();
sidebarbutton.css({
'margin-left': ssb_width_expanded-12,
'height': bodywrapper.height()
});
sidebarbutton.find('span').text('«');
sidebarbutton.attr('title', _('Collapse sidebar'));
document.cookie = 'sidebar=expanded';
}
function add_sidebar_button() {
sidebarwrapper.css({
'float': 'left',
'margin-right': '0',
'width': ssb_width_expanded - 28
});
// create the button
sidebar.append(
'<div id="sidebarbutton"><span>«</span></div>'
);
var sidebarbutton = $('#sidebarbutton');
light_color = sidebarbutton.css('background-color');
// find the height of the viewport to center the '<<' in the page
var viewport_height;
if (window.innerHeight)
viewport_height = window.innerHeight;
else
viewport_height = $(window).height();
sidebarbutton.find('span').css({
'display': 'block',
'margin-top': (viewport_height - sidebar.position().top - 20) / 2
});
sidebarbutton.click(toggle_sidebar);
sidebarbutton.attr('title', _('Collapse sidebar'));
sidebarbutton.css({
'color': '#FFFFFF',
'border-left': '1px solid ' + dark_color,
'font-size': '1.2em',
'cursor': 'pointer',
'height': bodywrapper.height(),
'padding-top': '1px',
'margin-left': ssb_width_expanded - 12
});
sidebarbutton.hover(
function () {
$(this).css('background-color', dark_color);
},
function () {
$(this).css('background-color', light_color);
}
);
}
function set_position_from_cookie() {
if (!document.cookie)
return;
var items = document.cookie.split(';');
for(var k=0; k<items.length; k++) {
var key_val = items[k].split('=');
var key = key_val[0].replace(/ /, ""); // strip leading spaces
if (key == 'sidebar') {
var value = key_val[1];
if ((value == 'collapsed') && (!sidebar_is_collapsed()))
collapse_sidebar();
else if ((value == 'expanded') && (sidebar_is_collapsed()))
expand_sidebar();
}
}
}
add_sidebar_button();
var sidebarbutton = $('#sidebarbutton');
set_position_from_cookie();
}); | PypiClean |
/chatglm6bpkg-0.0.1.tar.gz/chatglm6bpkg-0.0.1/api.py | from fastapi import FastAPI, Request
from transformers import AutoTokenizer, AutoModel
import uvicorn, json, datetime
import torch
DEVICE = "cuda"
DEVICE_ID = "0"
CUDA_DEVICE = f"{DEVICE}:{DEVICE_ID}" if DEVICE_ID else DEVICE
api_model, api_tokenizer = None, None
def torch_gc():
if torch.cuda.is_available():
with torch.cuda.device(CUDA_DEVICE):
torch.cuda.empty_cache()
torch.cuda.ipc_collect()
app = FastAPI()
@app.post("/")
async def create_item(request: Request):
global api_model, api_tokenizer
json_post_raw = await request.json()
json_post = json.dumps(json_post_raw)
json_post_list = json.loads(json_post)
prompt = json_post_list.get('prompt')
history = json_post_list.get('history')
max_length = json_post_list.get('max_length')
top_p = json_post_list.get('top_p')
temperature = json_post_list.get('temperature')
response, history = api_model.chat(api_tokenizer,
prompt,
history=history,
max_length=max_length if max_length else 2048,
top_p=top_p if top_p else 0.7,
temperature=temperature if temperature else 0.95)
now = datetime.datetime.now()
time = now.strftime("%Y-%m-%d %H:%M:%S")
answer = {
"response": response,
"history": history,
"status": 200,
"time": time
}
log = "[" + time + "] " + '", prompt:"' + prompt + '", response:"' + repr(response) + '"'
print(log)
torch_gc()
return answer
def launch_server(
model_name_or_path="THUDM/chatglm-6b",
trust_remote_code=True,
model=None,
tokenizer=None,
host="0.0.0.0",
port=8000,
workers=1,
):
global api_model, api_tokenizer
if tokenizer is None:
api_tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, trust_remote_code=trust_remote_code)
else:
api_tokenizer = tokenizer
if model is None:
api_model = AutoModel.from_pretrained(model_name_or_path, trust_remote_code=trust_remote_code).half().cuda()
api_model = api_model.eval()
else:
api_model = model
uvicorn.run(app, host=host, port=port, workers=workers)
if __name__ == '__main__':
model_name_or_path = "THUDM/chatglm-6b"
tokenizer = AutoTokenizer.from_pretrained(model_name_or_path, trust_remote_code=True)
model = AutoModel.from_pretrained(model_name_or_path, trust_remote_code=True).float()
model = model.eval()
launch_server(model=model, tokenizer=tokenizer) | PypiClean |
/LitleSdkPython3-9.3.1b0.tar.gz/LitleSdkPython3-9.3.1b0/litleSdkPythonTest/functional/TestBatchTcp.py |
import os, sys
lib_path = os.path.abspath('../all')
sys.path.append(lib_path)
from SetupTest import *
import unittest
import tempfile
import shutil
import copy
class TestBatchTcp(unittest.TestCase):
def setUp(self):
self.merchantId = '0180'
def testSendToLitle_WithFileConfig(self):
requestFileName = "litleSdk-testBatchFile-fileConfig.xml"
request = litleBatchFileRequest(requestFileName)
self.assertTrue(os.path.exists(request.requestFile.name))
configFromFile = request.config
self.assertEqual('prelive.litle.com', configFromFile.batchHost)
self.assertEqual('15000', configFromFile.batchPort)
self.prepareTestRequest(request)
response = request.sendRequestTCP()
self.assertPythonApi(request, response)
requestDir = configFromFile.batchRequestFolder
responseDir = configFromFile.batchResponseFolder
self.assertGeneratedFiles(requestDir, responseDir, requestFileName, request)
def testSendToLitle_WithConfigOverrides(self):
requestDir = tempfile.gettempdir() + '/' + 'request'
responseDir = tempfile.gettempdir() + '/' + 'response'
configOverrides = Configuration()
configOverrides.batchHost = 'prelive.litle.com'
configOverrides.batchPort = '15000'
configOverrides.batchRequestFolder = requestDir
configOverrides.batchResponseFolder = responseDir
requestFileName = "litleSdk-testBatchFile-fileConfig.xml"
request = litleBatchFileRequest(requestFileName, configOverrides)
self.assertTrue(os.path.exists(request.requestFile.name))
self.prepareTestRequest(request)
response = request.sendRequestTCP()
self.assertPythonApi(request, response)
self.assertGeneratedFiles(requestDir, responseDir, requestFileName, request)
def testMechantBatchAndProcess(self):
requestFileName = "litleSdk-testBatchFile-MECHA.xml"
for i in range(0, 3):
request = litleBatchFileRequest(requestFileName)
configFromFile = request.config
self.assertEqual('prelive.litle.com', configFromFile.batchHost)
self.assertEqual('15000', configFromFile.batchPort)
batch = request.createBatch()
# card
card = litleXmlFields.cardType()
card.number = '4100000000000001'
card.expDate = '1210'
card.type = 'VI'
# echeck
echeck = litleXmlFields.echeck()
echeck.accNum = '1092969901'
echeck.accType = 'Corporate'
echeck.routingNum = '011075150'
# billto address
contact = litleXmlFields.contact()
contact.name = 'PreNote Sale Corporate'
contact.firstName = 'unavailable'
contact.lastName = 'unavailable'
contact.companyName = 'PreNote Sale Corporate'
contact.addressLine1 = '1 Lowell Street'
contact.addressLine2 = 'Tower 2'
contact.city = 'lowell'
contact.state = 'MA'
contact.zip = '01850'
contact.phone = '1234567890'
contact.email = '[email protected]'
# auth
auth = litleXmlFields.authorization()
auth.reportGroup = 'Planets'
auth.orderId = '12344'
auth.amount = 106
auth.orderSource = 'ecommerce'
auth.card = card
batch.addTransaction(auth)
sale = litleXmlFields.sale()
sale.reportGroup = 'Planets'
sale.orderId = '12344'
sale.amount = 6000
sale.orderSource = 'ecommerce'
sale.card = card
batch.addTransaction(sale)
credit = litleXmlFields.credit()
credit.reportGroup = 'Planets'
credit.orderId = '12344'
credit.amount = 106
credit.orderSource = 'ecommerce'
credit.card = card
batch.addTransaction(credit)
authReversal = litleXmlFields.authReversal()
authReversal.reportGroup = 'Planets'
authReversal.litleTxnId = 12345678000
authReversal.amount = 106
authReversal.payPalNotes = 'Notes'
batch.addTransaction(authReversal)
registerTokenRequestType = litleXmlFields.registerTokenRequest()
registerTokenRequestType.reportGroup = 'Planets'
registerTokenRequestType.orderId = '12344'
registerTokenRequestType.accountNumber = '1233456789103801'
batch.addTransaction(registerTokenRequestType)
cardValidationNumOnToken = litleXmlFields.updateCardValidationNumOnToken()
cardValidationNumOnToken.reportGroup = 'Planets'
cardValidationNumOnToken.id = '12345'
cardValidationNumOnToken.customerId = '0987'
cardValidationNumOnToken.orderId = '12344'
cardValidationNumOnToken.litleToken = '1233456789103801'
cardValidationNumOnToken.cardValidationNum = '123'
batch.addTransaction(cardValidationNumOnToken)
forceCapture = litleXmlFields.forceCapture()
forceCapture.reportGroup = 'Planets'
forceCapture.id = '123456'
forceCapture.orderId = '12344'
forceCapture.amount = 106
forceCapture.orderSource = 'ecommerce'
forceCapture.card = card
batch.addTransaction(forceCapture)
capture = litleXmlFields.capture()
capture.reportGroup = 'Planets'
capture.litleTxnId = 123456000
capture.amount = 106
batch.addTransaction(capture)
captureGivenAuth = litleXmlFields.captureGivenAuth()
captureGivenAuth.reportGroup = 'Planets'
captureGivenAuth.orderId = '12344'
captureGivenAuth.amount = 106
authInformation = litleXmlFields.authInformation()
authInformation.authDate = pyxb.binding.datatypes.date(2002, 10, 9)
authInformation.authAmount = 12345
authInformation.authCode = '543216'
captureGivenAuth.authInformation = authInformation
captureGivenAuth.orderSource = 'ecommerce'
captureGivenAuth.card = card
batch.addTransaction(captureGivenAuth)
echeckVerification = litleXmlFields.echeckVerification()
echeckVerification.reportGroup = 'Planets'
echeckVerification.amount = 123456
echeckVerification.orderId = '12345'
echeckVerification.orderSource = 'ecommerce'
echeckVerification.billToAddress = contact
echeckVerification.echeckOrEcheckToken = echeck
batch.addTransaction(echeckVerification)
echeckCredit = litleXmlFields.echeckCredit()
echeckCredit.reportGroup = 'Planets'
echeckCredit.litleTxnId = 1234567890
echeckCredit.amount = 12
batch.addTransaction(echeckCredit)
echeckRedeposit = litleXmlFields.echeckRedeposit()
echeckRedeposit.reportGroup = 'Planets'
echeckRedeposit.litleTxnId = 124321341412
batch.addTransaction(echeckRedeposit)
echeckSale = litleXmlFields.echeckSale()
echeckSale.reportGroup = 'Planets'
echeckSale.amount = 123456
echeckSale.orderId = '12345'
echeckSale.orderSource = 'ecommerce'
echeckSale.billToAddress = contact
echeckSale.echeckOrEcheckToken = echeck
echeckSale.verify = True
batch.addTransaction(echeckSale)
echeckPreNoteSale = litleXmlFields.echeckPreNoteSale();
echeckPreNoteSale.orderId = "12344"
echeckPreNoteSale.billToAddress = contact
echeckPreNoteSale.echeck = echeck
echeckPreNoteSale.orderSource = 'ecommerce'
transactionCount = batch.numOfTxn
fileResponse = request.sendRequestTCP()
batchResponse = fileResponse.getNextBatchResponse()
txns = 0
nextTransaction = True
while nextTransaction:
try:
batchResponse.getNextTransaction()
txns += 1
except:
nextTransaction = False
if txns == transactionCount:
break
self.assertEqual(transactionCount, txns)
def testGiftCardTransactions(self):
requestFileName = "litleSdk-testBatchFile-GIFTCARD.xml"
request = litleBatchFileRequest(requestFileName)
configFromFile = request.config
self.assertEqual('prelive.litle.com', configFromFile.batchHost)
self.assertEqual('15000', configFromFile.batchPort)
batch = request.createBatch()
giftCard = litleXmlFields.cardType()
giftCard.type = 'GC'
giftCard.expDate = '1218'
giftCard.number = '4100000000000001'
activate = litleXmlFields.activate()
activate.reportGroup = 'Planets'
activate.orderSource = 'ecommerce'
activate.amount = 100
activate.orderId = 'abc'
activate.card = giftCard
batch.addTransaction(activate)
deactivate = litleXmlFields.deactivate()
deactivate.reportGroup = 'Planets'
deactivate.orderId = 'def'
deactivate.orderSource = 'ecommerce'
deactivate.card = giftCard
batch.addTransaction(deactivate)
load = litleXmlFields.load()
load.reportGroup = 'Planets'
load.orderId = 'ghi'
load.amount = 100
load.orderSource = 'ecommerce'
load.card = giftCard
batch.addTransaction(load)
unload = litleXmlFields.unload()
unload.reportGroup = 'Planets'
unload.orderId = 'jkl'
unload.amount = 100
unload.orderSource = 'ecommerce'
unload.card = giftCard
batch.addTransaction(unload)
balanceInquiry = litleXmlFields.balanceInquiry()
balanceInquiry.reportGroup = 'Planets'
balanceInquiry.orderId = 'mno'
balanceInquiry.orderSource = 'ecommerce'
balanceInquiry.card = giftCard
batch.addTransaction(balanceInquiry)
fileResponse = request.sendRequestTCP()
batchResponse = fileResponse.getNextBatchResponse()
txns = 0
nextTransaction = True
while nextTransaction:
try:
txn = batchResponse.getNextTransaction()
self.assertNotEqual(None, txn.litleTxnId)
txns += 1
except NoTransactionException:
nextTransaction = False
self.assertEqual(5, txns)
def testMechaBatchAndProcess_Recurring(self):
requestFileName = "litleSdk-testBatchFile-RECURRING.xml"
request = litleBatchFileRequest(requestFileName)
configFromFile = request.config
self.assertEqual('prelive.litle.com', configFromFile.batchHost)
self.assertEqual('15000', configFromFile.batchPort)
batch = request.createBatch()
cancelSubscription = litleXmlFields.cancelSubscription()
cancelSubscription.subscriptionId = 12345
batch.addTransaction(cancelSubscription)
updateSubscription = litleXmlFields.updateSubscription()
updateSubscription.subscriptionId = 12345
batch.addTransaction(updateSubscription)
createPlan = litleXmlFields.createPlan()
createPlan.planCode = 'abc'
createPlan.name = 'name'
createPlan.intervalType = 'ANNUAL'
createPlan.amount = 100
batch.addTransaction(createPlan)
updatePlan = litleXmlFields.updatePlan()
updatePlan.planCode = 'def'
updatePlan.active = True
batch.addTransaction(updatePlan)
fileResponse = request.sendRequestTCP()
batchResponse = fileResponse.getNextBatchResponse()
txns = 0
nextTransaction = True
while nextTransaction:
try:
txn = batchResponse.getNextTransaction()
if isinstance(txn, litleXmlFields.updateSubscriptionResponse.typeDefinition()):
self.assertEqual(12345, txn.subscriptionId)
elif isinstance(txn, litleXmlFields.cancelSubscriptionResponse.typeDefinition()):
self.assertEqual(12345, txn.subscriptionId)
elif isinstance(txn, litleXmlFields.createPlanResponse.typeDefinition()):
self.assertEqual('abc', txn.planCode)
elif isinstance(txn, litleXmlFields.createPlanResponse.typeDefinition()):
self.assertEqual('def', txn.planCode)
txns += 1
except NoTransactionException:
nextTransaction = False
self.assertEqual(4, txns)
def testBatch_AU(self):
requestFileName = "litleSdk-testBatchFile-AU.xml"
request = litleBatchFileRequest(requestFileName)
configFromFile = request.config
self.assertEqual('prelive.litle.com', configFromFile.batchHost)
self.assertEqual('15000', configFromFile.batchPort)
batch = request.createBatch()
# card
card = litleXmlFields.card()
card.number = '4100000000000001'
card.expDate = '1210'
card.type = 'VI'
accountUpdate = litleXmlFields.accountUpdate()
accountUpdate.reportGroup = 'Planets'
accountUpdate.id = '12345'
accountUpdate.customerId = '0987'
accountUpdate.orderId = '1234'
accountUpdate.cardOrToken = card
batch.addTransaction(accountUpdate)
fileResponse = request.sendRequestTCP()
batchResponse = fileResponse.getNextBatchResponse()
txns = 0
nextTransaction = True
while nextTransaction:
try:
txn = batchResponse.getNextTransaction()
self.assertEqual('Planets', txn.reportGroup)
self.assertEqual('12345', txn.id)
self.assertEqual('0987', txn.customerId)
self.assertEqual('1234', txn.orderId)
txns += 1
except NoTransactionException:
nextTransaction = False
self.assertEqual(1, txns)
def testEcheckPreNoteAll(self):
requestFileName = "litleSdk-testBatchFile-EchecPreNoteAll.xml"
request = litleBatchFileRequest(requestFileName)
configFromFile = request.config
self.assertEqual('prelive.litle.com', configFromFile.batchHost)
self.assertEqual('15000', configFromFile.batchPort)
batch = request.createBatch()
# echeckSuccess
echeckSuccess = litleXmlFields.echeck()
echeckSuccess.accNum = '1092969901'
echeckSuccess.accType = 'Corporate'
echeckSuccess.routingNum = '011075150'
# echeckErr1
echeckRoutErr = litleXmlFields.echeck()
echeckRoutErr.accNum = '6099999992'
echeckRoutErr.accType = 'Checking'
echeckRoutErr.routingNum = '053133052'
# echeckErr2l
echeckAccErr = litleXmlFields.echeck()
echeckAccErr.accNum = '10@2969901'
echeckAccErr.accType = 'Savings'
echeckAccErr.routingNum = '011100012'
# billto address
contact = litleXmlFields.contact()
contact.name = 'PreNote Sale Corporate'
contact.firstName = 'unavailable'
contact.lastName = 'unavailable'
contact.companyName = 'PreNote Sale Corporate'
contact.addressLine1 = '1 Lowell Street'
contact.addressLine2 = 'Tower 2'
contact.city = 'lowell'
contact.state = 'MA'
contact.zip = '01850'
contact.phone = '1234567890'
contact.email = '[email protected]'
echeckPreNoteSale = litleXmlFields.echeckPreNoteSale();
echeckPreNoteSale.orderId = "000"
echeckPreNoteSale.billToAddress = contact
echeckPreNoteSale.echeck = echeckSuccess
echeckPreNoteSale.orderSource = 'ecommerce'
batch.addTransaction(echeckPreNoteSale)
echeckPreNoteCredit = litleXmlFields.echeckPreNoteCredit();
echeckPreNoteCredit.orderId = "000"
echeckPreNoteCredit.billToAddress = contact
echeckPreNoteCredit.echeck = echeckSuccess
echeckPreNoteCredit.orderSource = 'ecommerce'
batch.addTransaction(echeckPreNoteCredit)
echeckPreNoteSale = litleXmlFields.echeckPreNoteSale();
echeckPreNoteSale.orderId = "900"
echeckPreNoteSale.billToAddress = contact
echeckPreNoteSale.echeck = echeckRoutErr
echeckPreNoteSale.orderSource = 'ecommerce'
batch.addTransaction(echeckPreNoteSale)
echeckPreNoteCredit = litleXmlFields.echeckPreNoteCredit();
echeckPreNoteCredit.orderId = "900"
echeckPreNoteCredit.billToAddress = contact
echeckPreNoteCredit.echeck = echeckRoutErr
echeckPreNoteCredit.orderSource = 'ecommerce'
batch.addTransaction(echeckPreNoteCredit)
echeckPreNoteSale = litleXmlFields.echeckPreNoteSale();
echeckPreNoteSale.orderId = "301"
echeckPreNoteSale.billToAddress = contact
echeckPreNoteSale.echeck = echeckAccErr
echeckPreNoteSale.orderSource = 'ecommerce'
batch.addTransaction(echeckPreNoteSale)
echeckPreNoteCredit = litleXmlFields.echeckPreNoteCredit();
echeckPreNoteCredit.orderId = "301"
echeckPreNoteCredit.billToAddress = contact
echeckPreNoteCredit.echeck = echeckAccErr
echeckPreNoteCredit.orderSource = 'ecommerce'
batch.addTransaction(echeckPreNoteCredit)
transactionCount = batch.numOfTxn
fileResponse = request.sendRequestTCP()
batchResponse = fileResponse.getNextBatchResponse()
txns = 0
nextTransaction = True
while nextTransaction:
try:
responseTxn = batchResponse.getNextTransaction()
self.assertEqual(responseTxn.response, responseTxn.orderId)
txns += 1;
except NoTransactionException:
nextTransaction = False
self.assertEqual(transactionCount, txns)
def testPFIFInstructionTxn(self):
requestFileName = "litleSdk-testBatchFile-PFIF.xml"
request = litleBatchFileRequest(requestFileName)
configFromFile = request.config
self.assertEqual('prelive.litle.com', configFromFile.batchHost)
self.assertEqual('15000', configFromFile.batchPort)
batch = request.createBatch()
# echeck
echeck = litleXmlFields.echeck()
echeck.accNum = '1092969901'
echeck.accType = 'Corporate'
echeck.routingNum = '011075150'
submerchantCredit = litleXmlFields.submerchantCredit();
submerchantCredit.fundingSubmerchantId = "12347"
submerchantCredit.submerchantName = "001"
submerchantCredit.fundsTransferId = "123456"
submerchantCredit.amount = "100"
submerchantCredit.accountInfo = echeck
batch.addTransaction(submerchantCredit)
submerchantDebit = litleXmlFields.submerchantDebit();
submerchantDebit.fundingSubmerchantId = "12347"
submerchantDebit.submerchantName = "001"
submerchantDebit.fundsTransferId = "123456"
submerchantDebit.amount = "100"
submerchantDebit.accountInfo = echeck
batch.addTransaction(submerchantDebit)
payFacCredit = litleXmlFields.payFacCredit();
payFacCredit.fundingSubmerchantId = "12347"
payFacCredit.fundsTransferId = "123456"
payFacCredit.amount = "100"
batch.addTransaction(payFacCredit)
payFacDebit = litleXmlFields.payFacDebit();
payFacDebit.fundingSubmerchantId = "12347"
payFacDebit.fundsTransferId = "123456"
payFacDebit.amount = "100"
batch.addTransaction(payFacDebit)
reserveCredit = litleXmlFields.reserveCredit();
reserveCredit.fundingSubmerchantId = "12347"
reserveCredit.fundsTransferId = "123456"
reserveCredit.amount = "100"
batch.addTransaction(reserveCredit)
reserveDebit = litleXmlFields.reserveDebit();
reserveDebit.fundingSubmerchantId = "12347"
reserveDebit.fundsTransferId = "123456"
reserveDebit.amount = "100"
batch.addTransaction(reserveDebit)
vendorCredit = litleXmlFields.vendorCredit();
vendorCredit.fundingSubmerchantId = "12347"
vendorCredit.vendorName = "001"
vendorCredit.fundsTransferId = "123456"
vendorCredit.amount = "100"
vendorCredit.accountInfo = echeck
batch.addTransaction(vendorCredit)
vendorDebit = litleXmlFields.vendorDebit();
vendorDebit.fundingSubmerchantId = "12347"
vendorDebit.vendorName = "001"
vendorDebit.fundsTransferId = "123456"
vendorDebit.amount = "100"
vendorDebit.accountInfo = echeck
batch.addTransaction(vendorDebit)
physicalCheckCredit = litleXmlFields.physicalCheckCredit();
physicalCheckCredit.fundingSubmerchantId = "12347"
physicalCheckCredit.fundsTransferId = "123456"
physicalCheckCredit.amount = "100"
batch.addTransaction(physicalCheckCredit)
physicalCheckDebit = litleXmlFields.physicalCheckDebit();
physicalCheckDebit.fundingSubmerchantId = "12347"
physicalCheckDebit.fundsTransferId = "123456"
physicalCheckDebit.amount = "100"
batch.addTransaction(physicalCheckDebit)
transactionCount = batch.numOfTxn
fileResponse = request.sendRequestTCP()
batchResponse = fileResponse.getNextBatchResponse()
txns = 0
nextTransaction = True
while nextTransaction:
try:
batchResponse.getNextTransaction()
txns += 1
except:
nextTransaction = False
self.assertEqual(transactionCount, txns)
def assertPythonApi(self, request, response):
self.assertNotEqual(None, response)
self.assertNotEqual(None, response.litleResponse.litleSessionId)
self.assertEqual('0', response.litleResponse.response)
self.assertEqual('Valid Format', response.litleResponse.message)
self.assertEqual('9.3', response.litleResponse.version)
batchResponse = response.getNextBatchResponse()
self.assertNotEqual(None, response)
self.assertNotEqual(None, batchResponse.batchResponse.litleBatchId)
self.assertEqual(self.merchantId, batchResponse.batchResponse.merchantId)
saleResponse = batchResponse.getNextTransaction()
self.assertEqual('000', saleResponse.response)
self.assertEqual('Approved', saleResponse.message)
self.assertNotEqual(None, saleResponse.litleTxnId)
self.assertEqual('orderId11', saleResponse.orderId)
self.assertEqual('reportGroup11', saleResponse.reportGroup)
def prepareTestRequest(self, request):
batchRequest = request.createBatch()
sale = litleXmlFields.sale()
sale.reportGroup = 'reportGroup11'
sale.orderId = 'orderId11'
sale.amount = 1099
sale.orderSource = 'ecommerce'
card = litleXmlFields.cardType()
card.type = 'VI'
card.number = "4457010000000009"
card.expDate = "0114"
sale.card = card
batchRequest.addTransaction(sale)
def assertGeneratedFiles(self, requestDir, responseDir, requestFileName, request):
requestPath = requestDir + '/' + requestFileName
responsePath = responseDir + '/' + requestFileName
fRequest = os.path.abspath(request.requestFile.name)
fResponse = os.path.abspath(request.responseFile.name)
self.assertEqual(requestPath, fRequest)
self.assertEqual(responsePath, fResponse)
self.assertTrue(os.path.exists(fRequest))
self.assertTrue(os.path.exists(fResponse))
self.assertTrue(os.path.getsize(fRequest) > 0)
self.assertTrue(os.path.getsize(fResponse) > 0)
responseFromFile = litleBatchFileResponse(fResponse)
self.assertPythonApi(request, responseFromFile)
def suite():
suite = unittest.TestSuite()
suite = unittest.TestLoader().loadTestsFromTestCase(TestBatchTcp)
return suite
if __name__ == '__main__':
unittest.main() | PypiClean |
/Mako-1.2.4.tar.gz/Mako-1.2.4/mako/parsetree.py | import re
from mako import ast
from mako import exceptions
from mako import filters
from mako import util
class Node:
"""base class for a Node in the parse tree."""
def __init__(self, source, lineno, pos, filename):
self.source = source
self.lineno = lineno
self.pos = pos
self.filename = filename
@property
def exception_kwargs(self):
return {
"source": self.source,
"lineno": self.lineno,
"pos": self.pos,
"filename": self.filename,
}
def get_children(self):
return []
def accept_visitor(self, visitor):
def traverse(node):
for n in node.get_children():
n.accept_visitor(visitor)
method = getattr(visitor, "visit" + self.__class__.__name__, traverse)
method(self)
class TemplateNode(Node):
"""a 'container' node that stores the overall collection of nodes."""
def __init__(self, filename):
super().__init__("", 0, 0, filename)
self.nodes = []
self.page_attributes = {}
def get_children(self):
return self.nodes
def __repr__(self):
return "TemplateNode(%s, %r)" % (
util.sorted_dict_repr(self.page_attributes),
self.nodes,
)
class ControlLine(Node):
"""defines a control line, a line-oriented python line or end tag.
e.g.::
% if foo:
(markup)
% endif
"""
has_loop_context = False
def __init__(self, keyword, isend, text, **kwargs):
super().__init__(**kwargs)
self.text = text
self.keyword = keyword
self.isend = isend
self.is_primary = keyword in ["for", "if", "while", "try", "with"]
self.nodes = []
if self.isend:
self._declared_identifiers = []
self._undeclared_identifiers = []
else:
code = ast.PythonFragment(text, **self.exception_kwargs)
self._declared_identifiers = code.declared_identifiers
self._undeclared_identifiers = code.undeclared_identifiers
def get_children(self):
return self.nodes
def declared_identifiers(self):
return self._declared_identifiers
def undeclared_identifiers(self):
return self._undeclared_identifiers
def is_ternary(self, keyword):
"""return true if the given keyword is a ternary keyword
for this ControlLine"""
cases = {
"if": {"else", "elif"},
"try": {"except", "finally"},
"for": {"else"},
}
return keyword in cases.get(self.keyword, set())
def __repr__(self):
return "ControlLine(%r, %r, %r, %r)" % (
self.keyword,
self.text,
self.isend,
(self.lineno, self.pos),
)
class Text(Node):
"""defines plain text in the template."""
def __init__(self, content, **kwargs):
super().__init__(**kwargs)
self.content = content
def __repr__(self):
return "Text(%r, %r)" % (self.content, (self.lineno, self.pos))
class Code(Node):
"""defines a Python code block, either inline or module level.
e.g.::
inline:
<%
x = 12
%>
module level:
<%!
import logger
%>
"""
def __init__(self, text, ismodule, **kwargs):
super().__init__(**kwargs)
self.text = text
self.ismodule = ismodule
self.code = ast.PythonCode(text, **self.exception_kwargs)
def declared_identifiers(self):
return self.code.declared_identifiers
def undeclared_identifiers(self):
return self.code.undeclared_identifiers
def __repr__(self):
return "Code(%r, %r, %r)" % (
self.text,
self.ismodule,
(self.lineno, self.pos),
)
class Comment(Node):
"""defines a comment line.
# this is a comment
"""
def __init__(self, text, **kwargs):
super().__init__(**kwargs)
self.text = text
def __repr__(self):
return "Comment(%r, %r)" % (self.text, (self.lineno, self.pos))
class Expression(Node):
"""defines an inline expression.
${x+y}
"""
def __init__(self, text, escapes, **kwargs):
super().__init__(**kwargs)
self.text = text
self.escapes = escapes
self.escapes_code = ast.ArgumentList(escapes, **self.exception_kwargs)
self.code = ast.PythonCode(text, **self.exception_kwargs)
def declared_identifiers(self):
return []
def undeclared_identifiers(self):
# TODO: make the "filter" shortcut list configurable at parse/gen time
return self.code.undeclared_identifiers.union(
self.escapes_code.undeclared_identifiers.difference(
filters.DEFAULT_ESCAPES
)
).difference(self.code.declared_identifiers)
def __repr__(self):
return "Expression(%r, %r, %r)" % (
self.text,
self.escapes_code.args,
(self.lineno, self.pos),
)
class _TagMeta(type):
"""metaclass to allow Tag to produce a subclass according to
its keyword"""
_classmap = {}
def __init__(cls, clsname, bases, dict_):
if getattr(cls, "__keyword__", None) is not None:
cls._classmap[cls.__keyword__] = cls
super().__init__(clsname, bases, dict_)
def __call__(cls, keyword, attributes, **kwargs):
if ":" in keyword:
ns, defname = keyword.split(":")
return type.__call__(
CallNamespaceTag, ns, defname, attributes, **kwargs
)
try:
cls = _TagMeta._classmap[keyword]
except KeyError:
raise exceptions.CompileException(
"No such tag: '%s'" % keyword,
source=kwargs["source"],
lineno=kwargs["lineno"],
pos=kwargs["pos"],
filename=kwargs["filename"],
)
return type.__call__(cls, keyword, attributes, **kwargs)
class Tag(Node, metaclass=_TagMeta):
"""abstract base class for tags.
e.g.::
<%sometag/>
<%someothertag>
stuff
</%someothertag>
"""
__keyword__ = None
def __init__(
self,
keyword,
attributes,
expressions,
nonexpressions,
required,
**kwargs,
):
r"""construct a new Tag instance.
this constructor not called directly, and is only called
by subclasses.
:param keyword: the tag keyword
:param attributes: raw dictionary of attribute key/value pairs
:param expressions: a set of identifiers that are legal attributes,
which can also contain embedded expressions
:param nonexpressions: a set of identifiers that are legal
attributes, which cannot contain embedded expressions
:param \**kwargs:
other arguments passed to the Node superclass (lineno, pos)
"""
super().__init__(**kwargs)
self.keyword = keyword
self.attributes = attributes
self._parse_attributes(expressions, nonexpressions)
missing = [r for r in required if r not in self.parsed_attributes]
if len(missing):
raise exceptions.CompileException(
(
"Missing attribute(s): %s"
% ",".join(repr(m) for m in missing)
),
**self.exception_kwargs,
)
self.parent = None
self.nodes = []
def is_root(self):
return self.parent is None
def get_children(self):
return self.nodes
def _parse_attributes(self, expressions, nonexpressions):
undeclared_identifiers = set()
self.parsed_attributes = {}
for key in self.attributes:
if key in expressions:
expr = []
for x in re.compile(r"(\${.+?})", re.S).split(
self.attributes[key]
):
m = re.compile(r"^\${(.+?)}$", re.S).match(x)
if m:
code = ast.PythonCode(
m.group(1).rstrip(), **self.exception_kwargs
)
# we aren't discarding "declared_identifiers" here,
# which we do so that list comprehension-declared
# variables aren't counted. As yet can't find a
# condition that requires it here.
undeclared_identifiers = undeclared_identifiers.union(
code.undeclared_identifiers
)
expr.append("(%s)" % m.group(1))
elif x:
expr.append(repr(x))
self.parsed_attributes[key] = " + ".join(expr) or repr("")
elif key in nonexpressions:
if re.search(r"\${.+?}", self.attributes[key]):
raise exceptions.CompileException(
"Attribute '%s' in tag '%s' does not allow embedded "
"expressions" % (key, self.keyword),
**self.exception_kwargs,
)
self.parsed_attributes[key] = repr(self.attributes[key])
else:
raise exceptions.CompileException(
"Invalid attribute for tag '%s': '%s'"
% (self.keyword, key),
**self.exception_kwargs,
)
self.expression_undeclared_identifiers = undeclared_identifiers
def declared_identifiers(self):
return []
def undeclared_identifiers(self):
return self.expression_undeclared_identifiers
def __repr__(self):
return "%s(%r, %s, %r, %r)" % (
self.__class__.__name__,
self.keyword,
util.sorted_dict_repr(self.attributes),
(self.lineno, self.pos),
self.nodes,
)
class IncludeTag(Tag):
__keyword__ = "include"
def __init__(self, keyword, attributes, **kwargs):
super().__init__(
keyword,
attributes,
("file", "import", "args"),
(),
("file",),
**kwargs,
)
self.page_args = ast.PythonCode(
"__DUMMY(%s)" % attributes.get("args", ""), **self.exception_kwargs
)
def declared_identifiers(self):
return []
def undeclared_identifiers(self):
identifiers = self.page_args.undeclared_identifiers.difference(
{"__DUMMY"}
).difference(self.page_args.declared_identifiers)
return identifiers.union(super().undeclared_identifiers())
class NamespaceTag(Tag):
__keyword__ = "namespace"
def __init__(self, keyword, attributes, **kwargs):
super().__init__(
keyword,
attributes,
("file",),
("name", "inheritable", "import", "module"),
(),
**kwargs,
)
self.name = attributes.get("name", "__anon_%s" % hex(abs(id(self))))
if "name" not in attributes and "import" not in attributes:
raise exceptions.CompileException(
"'name' and/or 'import' attributes are required "
"for <%namespace>",
**self.exception_kwargs,
)
if "file" in attributes and "module" in attributes:
raise exceptions.CompileException(
"<%namespace> may only have one of 'file' or 'module'",
**self.exception_kwargs,
)
def declared_identifiers(self):
return []
class TextTag(Tag):
__keyword__ = "text"
def __init__(self, keyword, attributes, **kwargs):
super().__init__(keyword, attributes, (), ("filter"), (), **kwargs)
self.filter_args = ast.ArgumentList(
attributes.get("filter", ""), **self.exception_kwargs
)
def undeclared_identifiers(self):
return self.filter_args.undeclared_identifiers.difference(
filters.DEFAULT_ESCAPES.keys()
).union(self.expression_undeclared_identifiers)
class DefTag(Tag):
__keyword__ = "def"
def __init__(self, keyword, attributes, **kwargs):
expressions = ["buffered", "cached"] + [
c for c in attributes if c.startswith("cache_")
]
super().__init__(
keyword,
attributes,
expressions,
("name", "filter", "decorator"),
("name",),
**kwargs,
)
name = attributes["name"]
if re.match(r"^[\w_]+$", name):
raise exceptions.CompileException(
"Missing parenthesis in %def", **self.exception_kwargs
)
self.function_decl = ast.FunctionDecl(
"def " + name + ":pass", **self.exception_kwargs
)
self.name = self.function_decl.funcname
self.decorator = attributes.get("decorator", "")
self.filter_args = ast.ArgumentList(
attributes.get("filter", ""), **self.exception_kwargs
)
is_anonymous = False
is_block = False
@property
def funcname(self):
return self.function_decl.funcname
def get_argument_expressions(self, **kw):
return self.function_decl.get_argument_expressions(**kw)
def declared_identifiers(self):
return self.function_decl.allargnames
def undeclared_identifiers(self):
res = []
for c in self.function_decl.defaults:
res += list(
ast.PythonCode(
c, **self.exception_kwargs
).undeclared_identifiers
)
return (
set(res)
.union(
self.filter_args.undeclared_identifiers.difference(
filters.DEFAULT_ESCAPES.keys()
)
)
.union(self.expression_undeclared_identifiers)
.difference(self.function_decl.allargnames)
)
class BlockTag(Tag):
__keyword__ = "block"
def __init__(self, keyword, attributes, **kwargs):
expressions = ["buffered", "cached", "args"] + [
c for c in attributes if c.startswith("cache_")
]
super().__init__(
keyword,
attributes,
expressions,
("name", "filter", "decorator"),
(),
**kwargs,
)
name = attributes.get("name")
if name and not re.match(r"^[\w_]+$", name):
raise exceptions.CompileException(
"%block may not specify an argument signature",
**self.exception_kwargs,
)
if not name and attributes.get("args", None):
raise exceptions.CompileException(
"Only named %blocks may specify args", **self.exception_kwargs
)
self.body_decl = ast.FunctionArgs(
attributes.get("args", ""), **self.exception_kwargs
)
self.name = name
self.decorator = attributes.get("decorator", "")
self.filter_args = ast.ArgumentList(
attributes.get("filter", ""), **self.exception_kwargs
)
is_block = True
@property
def is_anonymous(self):
return self.name is None
@property
def funcname(self):
return self.name or "__M_anon_%d" % (self.lineno,)
def get_argument_expressions(self, **kw):
return self.body_decl.get_argument_expressions(**kw)
def declared_identifiers(self):
return self.body_decl.allargnames
def undeclared_identifiers(self):
return (
self.filter_args.undeclared_identifiers.difference(
filters.DEFAULT_ESCAPES.keys()
)
).union(self.expression_undeclared_identifiers)
class CallTag(Tag):
__keyword__ = "call"
def __init__(self, keyword, attributes, **kwargs):
super().__init__(
keyword, attributes, ("args"), ("expr",), ("expr",), **kwargs
)
self.expression = attributes["expr"]
self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
self.body_decl = ast.FunctionArgs(
attributes.get("args", ""), **self.exception_kwargs
)
def declared_identifiers(self):
return self.code.declared_identifiers.union(self.body_decl.allargnames)
def undeclared_identifiers(self):
return self.code.undeclared_identifiers.difference(
self.code.declared_identifiers
)
class CallNamespaceTag(Tag):
def __init__(self, namespace, defname, attributes, **kwargs):
super().__init__(
namespace + ":" + defname,
attributes,
tuple(attributes.keys()) + ("args",),
(),
(),
**kwargs,
)
self.expression = "%s.%s(%s)" % (
namespace,
defname,
",".join(
"%s=%s" % (k, v)
for k, v in self.parsed_attributes.items()
if k != "args"
),
)
self.code = ast.PythonCode(self.expression, **self.exception_kwargs)
self.body_decl = ast.FunctionArgs(
attributes.get("args", ""), **self.exception_kwargs
)
def declared_identifiers(self):
return self.code.declared_identifiers.union(self.body_decl.allargnames)
def undeclared_identifiers(self):
return self.code.undeclared_identifiers.difference(
self.code.declared_identifiers
)
class InheritTag(Tag):
__keyword__ = "inherit"
def __init__(self, keyword, attributes, **kwargs):
super().__init__(
keyword, attributes, ("file",), (), ("file",), **kwargs
)
class PageTag(Tag):
__keyword__ = "page"
def __init__(self, keyword, attributes, **kwargs):
expressions = [
"cached",
"args",
"expression_filter",
"enable_loop",
] + [c for c in attributes if c.startswith("cache_")]
super().__init__(keyword, attributes, expressions, (), (), **kwargs)
self.body_decl = ast.FunctionArgs(
attributes.get("args", ""), **self.exception_kwargs
)
self.filter_args = ast.ArgumentList(
attributes.get("expression_filter", ""), **self.exception_kwargs
)
def declared_identifiers(self):
return self.body_decl.allargnames | PypiClean |
/Cartopy-0.22.0-cp310-cp310-macosx_11_0_arm64.whl/cartopy/io/__init__.py | import collections
from pathlib import Path
import string
from urllib.request import urlopen
import warnings
from cartopy import config
def fh_getter(fh, mode='r', needs_filename=False):
"""
Convenience function for opening files.
Parameters
----------
fh
File handle, filename or (file handle, filename) tuple.
mode: optional
Open mode. Defaults to "r".
needs_filename: optional
Defaults to False
Returns
-------
file handle, filename
Opened in the given mode.
"""
if mode != 'r':
raise ValueError('Only mode "r" currently supported.')
if isinstance(fh, str):
filename = fh
fh = open(fh, mode)
elif isinstance(fh, tuple):
fh, filename = fh
if filename is None:
try:
filename = fh.name
except AttributeError: # does this occur?
if needs_filename:
raise ValueError('filename cannot be determined')
else:
filename = ''
return fh, filename
class DownloadWarning(Warning):
"""Issued when a file is being downloaded by a :class:`Downloader`."""
pass
class Downloader:
"""
Represents a resource, that can be configured easily, which knows
how to acquire itself (perhaps via HTTP).
The key interface method is :meth:`path` - typically *all* external calls
will be made to that method. To get hold of an appropriate
:class:`Downloader` instance the :func:`Downloader.from_config` static
method should be considered.
Parameters
----------
url_template
The template of the full URL representing this
resource.
target_path_template
The template of the full path to the file
that this Downloader represents. Typically the path will be a
subdirectory of ``config['data_dir']``, but this is not a strict
requirement. If the file does not exist when calling
:meth:`Downloader.path` it will be downloaded to this location.
pre_downloaded_path_template: optional
The template of a full path of a file which has been downloaded
outside of this Downloader which should be used as the file that
this resource represents. If the file does not exist when
:meth:`Downloader.path` is called it will not be downloaded
to this location (unlike the ``target_path_template`` argument).
Note
----
All ``*_template`` arguments should be formattable using the
standard :meth:`string.format` rules. The formatting itself
is not done until a call to a subsequent method (such as
:meth:`Downloader.path`).
"""
FORMAT_KEYS = ('config',)
"""
The minimum keys which should be provided in the ``format_dict``
argument for the ``path``, ``url``, ``target_path``,
``pre_downloaded_path`` and ``acquire_resource`` methods.
"""
def __init__(self, url_template, target_path_template,
pre_downloaded_path_template=''):
self.url_template = url_template
self.target_path_template = target_path_template
self.pre_downloaded_path_template = pre_downloaded_path_template
# define a formatter which will process the templates. Subclasses
# may override the standard ``''.format`` formatting by defining
# their own formatter subclass here.
self._formatter = string.Formatter()
def url(self, format_dict):
"""
The full URL that this resource represents.
Parameters
----------
format_dict
The dictionary which is used to replace certain
template variables. Subclasses should document which keys are
expected as a minimum in their ``FORMAT_KEYS`` class attribute.
"""
return self._formatter.format(self.url_template, **format_dict)
def target_path(self, format_dict):
"""
The path on disk of the file that this resource represents, must
either exist, or be writable by the current user. This method
does not check either of these conditions.
Parameters
----------
format_dict
The dictionary which is used to replace certain
template variables. Subclasses should document which keys are
expected as a minimum in their ``FORMAT_KEYS`` class attribute.
"""
return Path(self._formatter.format(self.target_path_template,
**format_dict))
def pre_downloaded_path(self, format_dict):
"""
The path on disk of the file that this resource represents, if it does
not exist, then no further action will be taken with this path, and all
further processing will be done using :meth:`target_path` instead.
Parameters
----------
format_dict
The dictionary which is used to replace certain
template variables. Subclasses should document which keys are
expected as a minimum in their ``FORMAT_KEYS`` class attribute.
"""
p = self._formatter.format(self.pre_downloaded_path_template,
**format_dict)
return None if p == '' else Path(p)
def path(self, format_dict):
"""
Returns the path to a file on disk that this resource represents.
If the file doesn't exist in :meth:`pre_downloaded_path` then it
will check whether it exists in :meth:`target_path`, otherwise
the resource will be downloaded via :meth:`acquire_resouce` from
:meth:`url` to :meth:`target_path`.
Typically, this is the method that most applications will call,
allowing implementors of new Downloaders to specialise
:meth:`acquire_resource`.
Parameters
----------
format_dict
The dictionary which is used to replace certain
template variables. Subclasses should document which keys are
expected as a minimum in their ``FORMAT_KEYS`` class attribute.
"""
pre_downloaded_path = self.pre_downloaded_path(format_dict)
target_path = self.target_path(format_dict)
if pre_downloaded_path is not None and pre_downloaded_path.exists():
result_path = pre_downloaded_path
elif target_path.exists():
result_path = target_path
else:
# we need to download the file
result_path = self.acquire_resource(target_path, format_dict)
return result_path
def acquire_resource(self, target_path, format_dict):
"""
Download, via HTTP, the file that this resource represents.
Subclasses will typically override this method.
Parameters
----------
format_dict
The dictionary which is used to replace certain
template variables. Subclasses should document which keys are
expected as a minimum in their ``FORMAT_KEYS`` class attribute.
"""
target_path = Path(target_path)
target_dir = target_path.parent
target_dir.mkdir(parents=True, exist_ok=True)
url = self.url(format_dict)
# try getting the resource (no exception handling, just let it raise)
response = self._urlopen(url)
with open(target_path, 'wb') as fh:
fh.write(response.read())
return target_path
def _urlopen(self, url):
"""
Returns a file handle to the given HTTP resource URL.
Caller should close the file handle when finished with it.
"""
warnings.warn(f'Downloading: {url}', DownloadWarning)
return urlopen(url)
@staticmethod
def from_config(specification, config_dict=None):
"""
The ``from_config`` static method implements the logic for acquiring a
Downloader (sub)class instance from the config dictionary.
Parameters
----------
specification
Should be iterable, as it will be traversed in
reverse order to find the most appropriate Downloader instance
for this specification. An example specification is
``('shapefiles', 'natural_earth')`` for the Natural Earth
shapefiles.
config_dict: optional
typically this is left as None to use the
default ``cartopy.config`` "downloaders" dictionary.
Examples
--------
>>> from cartopy.io import Downloader
>>>
>>> dnldr = Downloader('https://example.com/{name}', './{name}.txt')
>>> config = {('level_1', 'level_2'): dnldr}
>>> d1 = Downloader.from_config(('level_1', 'level_2', 'level_3'),
... config_dict=config)
>>> print(d1.url_template)
https://example.com/{name}
>>> print(d1.url({'name': 'item_name'}))
https://example.com/item_name
"""
spec_depth = len(specification)
if config_dict is None:
downloaders = config['downloaders']
else:
downloaders = config_dict
result_downloader = None
for i in range(spec_depth, 0, -1):
lookup = specification[:i]
downloadable_item = downloaders.get(lookup, None)
if downloadable_item is not None:
result_downloader = downloadable_item
break
if result_downloader is None:
# should never really happen, but could if the user does
# some strange things like not having any downloaders defined
# in the config...
raise ValueError('No generic downloadable item in the config '
f'dictionary for {specification}')
return result_downloader
class LocatedImage(collections.namedtuple('LocatedImage', 'image, extent')):
"""
Define an image and associated extent in the form:
``image, (min_x, max_x, min_y, max_y)``
"""
class RasterSource:
"""
Define the cartopy raster fetching interface.
A :class:`RasterSource` instance is able to supply images and
associated extents (as a sequence of :class:`LocatedImage` instances)
through its :meth:`~RasterSource.fetch_raster` method.
As a result, further interfacing classes, such as
:class:`cartopy.mpl.slippy_image_artist.SlippyImageArtist`, can then
make use of the interface for functionality such as interactive image
retrieval with pan and zoom functionality.
.. _raster-source-interface:
"""
def validate_projection(self, projection):
"""
Raise an error if this raster source cannot provide images in the
specified projection.
Parameters
----------
projection: :class:`cartopy.crs.Projection`
The desired projection of the image.
"""
raise NotImplementedError()
def fetch_raster(self, projection, extent, target_resolution):
"""
Return a sequence of images with extents given some constraining
information.
Parameters
----------
projection: :class:`cartopy.crs.Projection`
The desired projection of the image.
extent: iterable of length 4
The extent of the requested image in projected coordinates.
The resulting image may not be defined exactly by these extents,
and so the extent of the resulting image is also returned. The
extents must be defined in the form
``(min_x, max_x, min_y, max_y)``.
target_resolution: iterable of length 2
The desired resolution of the image as ``(width, height)`` in
pixels.
Returns
-------
images
A sequence of :class:`LocatedImage` instances.
"""
raise NotImplementedError()
class RasterSourceContainer(RasterSource):
"""
A container which simply calls the appropriate methods on the
contained :class:`RasterSource`.
"""
def __init__(self, contained_source):
"""
Parameters
----------
contained_source: :class:`RasterSource` instance.
The source of the raster that this container is wrapping.
"""
self._source = contained_source
def fetch_raster(self, projection, extent, target_resolution):
return self._source.fetch_raster(projection, extent,
target_resolution)
def validate_projection(self, projection):
return self._source.validate_projection(projection)
class PostprocessedRasterSource(RasterSourceContainer):
"""
A :class:`RasterSource` which wraps another, an then applies a
post-processing step on the raster fetched from the contained source.
"""
def __init__(self, contained_source, img_post_process):
"""
Parameters
----------
contained_source: :class:`RasterSource` instance.
The source of the raster that this container is wrapping.
img_post_process: callable
Called after each `fetch_raster` call which yields a non-None
image result. The callable must accept the :class:`LocatedImage`
from the contained fetch_raster as its only argument, and must
return a single LocatedImage.
"""
super().__init__(contained_source)
self._post_fetch_fn = img_post_process
def fetch_raster(self, *args, **kwargs):
fetch_raster = super().fetch_raster
located_imgs = fetch_raster(*args, **kwargs)
if located_imgs:
located_imgs = [self._post_fetch_fn(img) for img in located_imgs]
return located_imgs | PypiClean |
/Louie-latest-1.3.1.tar.gz/Louie-latest-1.3.1/louie/dispatcher.py | import os
import weakref
try:
set
except NameError:
from sets import Set as set, ImmutableSet as frozenset
from louie import error
from louie import robustapply
from louie import saferef
from louie.sender import Any, Anonymous
from louie.signal import All
try:
dict.iteritems
except AttributeError:
#python 3
def itervalues(d):
return iter(d.values())
def iteritems(d):
return iter(d.items())
else:
#python 2
def itervalues(d):
return d.itervalues()
def iteritems(d):
return d.iteritems()
# Support for statistics.
if __debug__:
connects = 0
disconnects = 0
sends = 0
def print_stats():
print('\n'
'Louie connects: {}\n'
'Louie disconnects: {}\n'
'Louie sends: {}\n'
'\n'.format(connects, disconnects, sends))
if 'PYDISPATCH_STATS' in os.environ:
import atexit
atexit.register(print_stats)
WEAKREF_TYPES = (weakref.ReferenceType, saferef.BoundMethodWeakref)
connections = {}
senders = {}
senders_back = {}
plugins = []
def reset():
"""Reset the state of Louie.
Useful during unit testing. Should be avoided otherwise.
"""
global connections, senders, senders_back, plugins
connections = {}
senders = {}
senders_back = {}
plugins = []
def connect(receiver, signal=All, sender=Any, weak=True):
"""Connect ``receiver`` to ``sender`` for ``signal``.
- ``receiver``: A callable Python object which is to receive
messages/signals/events. Receivers must be hashable objects.
If weak is ``True``, then receiver must be weak-referencable (more
precisely ``saferef.safe_ref()`` must be able to create a
reference to the receiver).
Receivers are fairly flexible in their specification, as the
machinery in the ``robustapply`` module takes care of most of the
details regarding figuring out appropriate subsets of the sent
arguments to apply to a given receiver.
Note: If ``receiver`` is itself a weak reference (a callable), it
will be de-referenced by the system's machinery, so *generally*
weak references are not suitable as receivers, though some use
might be found for the facility whereby a higher-level library
passes in pre-weakrefed receiver references.
- ``signal``: The signal to which the receiver should respond.
If ``All``, receiver will receive all signals from the indicated
sender (which might also be ``All``, but is not necessarily
``All``).
Otherwise must be a hashable Python object other than ``None``
(``DispatcherError`` raised on ``None``).
- ``sender``: The sender to which the receiver should respond.
If ``Any``, receiver will receive the indicated signals from any
sender.
If ``Anonymous``, receiver will only receive indicated signals
from ``send``/``send_exact`` which do not specify a sender, or
specify ``Anonymous`` explicitly as the sender.
Otherwise can be any python object.
- ``weak``: Whether to use weak references to the receiver.
By default, the module will attempt to use weak references to
the receiver objects. If this parameter is ``False``, then strong
references will be used.
Returns ``None``, may raise ``DispatcherTypeError``.
"""
if signal is None:
raise error.DispatcherTypeError(
'Signal cannot be None (receiver={0!r} sender={1!r})'
.format(receiver, sender))
if weak:
receiver = saferef.safe_ref(receiver, on_delete=_remove_receiver)
senderkey = id(sender)
if senderkey in connections:
signals = connections[senderkey]
else:
connections[senderkey] = signals = {}
# Keep track of senders for cleanup.
# Is Anonymous something we want to clean up?
if sender not in (None, Anonymous, Any):
def remove(object, senderkey=senderkey):
_remove_sender(senderkey=senderkey)
# Skip objects that can not be weakly referenced, which means
# they won't be automatically cleaned up, but that's too bad.
try:
weak_sender = weakref.ref(sender, remove)
senders[senderkey] = weak_sender
except Exception:
pass
receiver_id = id(receiver)
# get current set, remove any current references to
# this receiver in the set, including back-references
if signal in signals:
receivers = signals[signal]
_remove_old_back_refs(senderkey, signal, receiver, receivers)
else:
receivers = signals[signal] = []
try:
current = senders_back.get(receiver_id)
if current is None:
senders_back[receiver_id] = current = []
if senderkey not in current:
current.append(senderkey)
except Exception:
pass
receivers.append(receiver)
# Update stats.
if __debug__:
global connects
connects += 1
def disconnect(receiver, signal=All, sender=Any, weak=True):
"""Disconnect ``receiver`` from ``sender`` for ``signal``.
- ``receiver``: The registered receiver to disconnect.
- ``signal``: The registered signal to disconnect.
- ``sender``: The registered sender to disconnect.
- ``weak``: The weakref state to disconnect.
``disconnect`` reverses the process of ``connect``, the semantics for
the individual elements are logically equivalent to a tuple of
``(receiver, signal, sender, weak)`` used as a key to be deleted
from the internal routing tables. (The actual process is slightly
more complex but the semantics are basically the same).
Note: Using ``disconnect`` is not required to cleanup routing when
an object is deleted; the framework will remove routes for deleted
objects automatically. It's only necessary to disconnect if you
want to stop routing to a live object.
Returns ``None``, may raise ``DispatcherTypeError`` or
``DispatcherKeyError``.
"""
if signal is None:
raise error.DispatcherTypeError(
'Signal cannot be None (receiver={0!r} sender={1!r})'
.format(receiver, sender))
if weak:
receiver = saferef.safe_ref(receiver)
senderkey = id(sender)
try:
signals = connections[senderkey]
receivers = signals[signal]
except KeyError:
raise error.DispatcherKeyError(
'No receivers found for signal {0!r} from sender {1!r}'
.format(signal, sender))
try:
# also removes from receivers
_remove_old_back_refs(senderkey, signal, receiver, receivers)
except ValueError:
raise error.DispatcherKeyError(
'No connection to receiver {0!r} '
'for signal {1!r} from sender {2!r}'
.format(receiver, signal, sender))
_cleanup_connections(senderkey, signal)
# Update stats.
if __debug__:
global disconnects
disconnects += 1
def get_receivers(sender=Any, signal=All):
"""Get list of receivers from global tables.
This function allows you to retrieve the raw list of receivers
from the connections table for the given sender and signal pair.
Note: There is no guarantee that this is the actual list stored in
the connections table, so the value should be treated as a simple
iterable/truth value rather than, for instance a list to which you
might append new records.
Normally you would use ``live_receivers(get_receivers(...))`` to
retrieve the actual receiver objects as an iterable object.
"""
try:
return connections[id(sender)][signal]
except KeyError:
return []
def live_receivers(receivers):
"""Filter sequence of receivers to get resolved, live receivers.
This is a generator which will iterate over the passed sequence,
checking for weak references and resolving them, then returning
all live receivers.
"""
for receiver in receivers:
if isinstance(receiver, WEAKREF_TYPES):
# Dereference the weak reference.
receiver = receiver()
if receiver is not None:
# Check installed plugins to make sure this receiver is
# live.
live = True
for plugin in plugins:
if not plugin.is_live(receiver):
live = False
break
if live:
yield receiver
def get_all_receivers(sender=Any, signal=All):
"""Get list of all receivers from global tables.
This gets all receivers which should receive the given signal from
sender, each receiver should be produced only once by the
resulting generator.
"""
yielded = set()
for receivers in (
# Get receivers that receive *this* signal from *this* sender.
get_receivers(sender, signal),
# Add receivers that receive *all* signals from *this* sender.
get_receivers(sender, All),
# Add receivers that receive *this* signal from *any* sender.
get_receivers(Any, signal),
# Add receivers that receive *all* signals from *any* sender.
get_receivers(Any, All),
):
# Make a copy of each list so it's immutable within the context
# of this function, even if a receiver calls disconnect() or any
# other function that changes a list of receivers.
for receiver in list(receivers):
if receiver: # filter out dead instance-method weakrefs
try:
if not receiver in yielded:
yielded.add(receiver)
yield receiver
except TypeError:
# dead weakrefs raise TypeError on hash...
pass
def send(signal=All, sender=Anonymous, *arguments, **named):
"""Send ``signal`` from ``sender`` to all connected receivers.
- ``signal``: (Hashable) signal value; see ``connect`` for details.
- ``sender``: The sender of the signal.
If ``Any``, only receivers registered for ``Any`` will receive the
message.
If ``Anonymous``, only receivers registered to receive messages
from ``Anonymous`` or ``Any`` will receive the message.
Otherwise can be any Python object (normally one registered with
a connect if you actually want something to occur).
- ``arguments``: Positional arguments which will be passed to *all*
receivers. Note that this may raise ``TypeError`` if the receivers
do not allow the particular arguments. Note also that arguments
are applied before named arguments, so they should be used with
care.
- ``named``: Named arguments which will be filtered according to the
parameters of the receivers to only provide those acceptable to
the receiver.
Return a list of tuple pairs ``[(receiver, response), ...]``
If any receiver raises an error, the error propagates back through
send, terminating the dispatch loop, so it is quite possible to
not have all receivers called if a raises an error.
"""
# Call each receiver with whatever arguments it can accept.
# Return a list of tuple pairs [(receiver, response), ... ].
responses = []
for receiver in live_receivers(get_all_receivers(sender, signal)):
# Wrap receiver using installed plugins.
original = receiver
for plugin in plugins:
receiver = plugin.wrap_receiver(receiver)
response = robustapply.robust_apply(
receiver, original,
signal=signal,
sender=sender,
*arguments,
**named
)
responses.append((receiver, response))
# Update stats.
if __debug__:
global sends
sends += 1
return responses
def send_minimal(signal=All, sender=Anonymous, *arguments, **named):
"""Like ``send``, but does not attach ``signal`` and ``sender``
arguments to the call to the receiver."""
# Call each receiver with whatever arguments it can accept.
# Return a list of tuple pairs [(receiver, response), ... ].
responses = []
for receiver in live_receivers(get_all_receivers(sender, signal)):
# Wrap receiver using installed plugins.
original = receiver
for plugin in plugins:
receiver = plugin.wrap_receiver(receiver)
response = robustapply.robust_apply(
receiver, original,
*arguments,
**named
)
responses.append((receiver, response))
# Update stats.
if __debug__:
global sends
sends += 1
return responses
def send_exact(signal=All, sender=Anonymous, *arguments, **named):
"""Send ``signal`` only to receivers registered for exact message.
``send_exact`` allows for avoiding ``Any``/``Anonymous`` registered
handlers, sending only to those receivers explicitly registered
for a particular signal on a particular sender.
"""
responses = []
for receiver in live_receivers(get_receivers(sender, signal)):
# Wrap receiver using installed plugins.
original = receiver
for plugin in plugins:
receiver = plugin.wrap_receiver(receiver)
response = robustapply.robust_apply(
receiver, original,
signal=signal,
sender=sender,
*arguments,
**named
)
responses.append((receiver, response))
return responses
def send_robust(signal=All, sender=Anonymous, *arguments, **named):
"""Send ``signal`` from ``sender`` to all connected receivers catching
errors
- ``signal``: (Hashable) signal value, see connect for details
- ``sender``: The sender of the signal.
If ``Any``, only receivers registered for ``Any`` will receive the
message.
If ``Anonymous``, only receivers registered to receive messages
from ``Anonymous`` or ``Any`` will receive the message.
Otherwise can be any Python object (normally one registered with
a connect if you actually want something to occur).
- ``arguments``: Positional arguments which will be passed to *all*
receivers. Note that this may raise ``TypeError`` if the receivers
do not allow the particular arguments. Note also that arguments
are applied before named arguments, so they should be used with
care.
- ``named``: Named arguments which will be filtered according to the
parameters of the receivers to only provide those acceptable to
the receiver.
Return a list of tuple pairs ``[(receiver, response), ... ]``
If any receiver raises an error (specifically, any subclass of
``Exception``), the error instance is returned as the result for
that receiver.
"""
# Call each receiver with whatever arguments it can accept.
# Return a list of tuple pairs [(receiver, response), ... ].
responses = []
for receiver in live_receivers(get_all_receivers(sender, signal)):
original = receiver
for plugin in plugins:
receiver = plugin.wrap_receiver(receiver)
try:
response = robustapply.robust_apply(
receiver, original,
signal=signal,
sender=sender,
*arguments,
**named
)
except Exception as err:
responses.append((receiver, err))
else:
responses.append((receiver, response))
return responses
def _remove_receiver(receiver):
"""Remove ``receiver`` from connections."""
if not senders_back:
# During module cleanup the mapping will be replaced with None.
return False
backKey = id(receiver)
for senderkey in senders_back.get(backKey, ()):
try:
signals = connections[senderkey].keys()
except KeyError:
pass
else:
for signal in list(signals):
try:
receivers = connections[senderkey][signal]
except KeyError:
pass
else:
try:
receivers.remove(receiver)
except Exception:
pass
_cleanup_connections(senderkey, signal)
try:
del senders_back[backKey]
except KeyError:
pass
def _cleanup_connections(senderkey, signal):
"""Delete empty signals for ``senderkey``. Delete ``senderkey`` if
empty."""
try:
receivers = connections[senderkey][signal]
except Exception:
pass
else:
if not receivers:
# No more connected receivers. Therefore, remove the signal.
try:
signals = connections[senderkey]
except KeyError:
pass
else:
del signals[signal]
if not signals:
# No more signal connections. Therefore, remove the sender.
_remove_sender(senderkey)
def _remove_sender(senderkey):
"""Remove ``senderkey`` from connections."""
_remove_back_refs(senderkey)
try:
del connections[senderkey]
except KeyError:
pass
# Senderkey will only be in senders dictionary if sender
# could be weakly referenced.
try:
del senders[senderkey]
except Exception:
pass
def _remove_back_refs(senderkey):
"""Remove all back-references to this ``senderkey``."""
try:
signals = connections[senderkey]
except KeyError:
signals = None
else:
for signal, receivers in iteritems(signals):
for receiver in receivers:
_kill_back_ref(receiver, senderkey)
def _remove_old_back_refs(senderkey, signal, receiver, receivers):
"""Kill old ``senders_back`` references from ``receiver``.
This guards against multiple registration of the same receiver for
a given signal and sender leaking memory as old back reference
records build up.
Also removes old receiver instance from receivers.
"""
try:
index = receivers.index(receiver)
# need to scan back references here and remove senderkey
except ValueError:
return False
else:
old_receiver = receivers[index]
del receivers[index]
found = 0
signals = connections.get(signal)
if signals is not None:
for sig, recs in iteritems(connections.get(signal, {})):
if sig != signal:
for rec in recs:
if rec is old_receiver:
found = 1
break
if not found:
_kill_back_ref(old_receiver, senderkey)
return True
return False
def _kill_back_ref(receiver, senderkey):
"""Do actual removal of back reference from ``receiver`` to
``senderkey``."""
receiverkey = id(receiver)
senders = senders_back.get(receiverkey, ())
while senderkey in senders:
try:
senders.remove(senderkey)
except Exception:
break
if not senders:
try:
del senders_back[receiverkey]
except KeyError:
pass
return True | PypiClean |
/Neodroid-0.4.9-py36-none-any.whl/samples/generation/dataset/tf_record_generator.py | import json
from neodroid import PROJECT_APP_PATH
from neodroid.environments.droid_environment import connect
__author__ = "Christian Heider Nielsen"
__doc__ = ""
import tensorflow as tf
tf.enable_eager_execution()
class TFFeature:
@staticmethod
def listify(value):
if isinstance(value, list):
return value
return [value]
@staticmethod
def bytes(value):
"""Returns a bytes_list from a string / byte."""
return tf.train.Feature(
bytes_list=tf.train.BytesList(value=TFFeature.listify(value))
)
@staticmethod
def float(value):
"""Returns a float_list from a float / double."""
return tf.train.Feature(
float_list=tf.train.FloatList(value=TFFeature.listify(value))
)
@staticmethod
def int64(value):
"""Returns an int64_list from a bool / enum / int / uint."""
return tf.train.Feature(
int64_list=tf.train.Int64List(value=TFFeature.listify(value))
)
TFF = TFFeature
def neodroid_tf_example(image_string, label, image_shape, bounding_box):
"""
:param bounding_box:
:param image_shape:
:param image_string:
:param label:
:return:
"""
feature = {
"height": TFF.int64([image_shape[0]]),
"width": TFF.int64([image_shape[1]]),
"depth": TFF.int64([image_shape[2]]),
"label": TFF.int64([int(label)]),
"bb_x": TFF.float([bounding_box["x"]]),
"bb_y": TFF.float([bounding_box["y"]]),
"bb_w": TFF.float([bounding_box["w"]]),
"bb_h": TFF.float([bounding_box["h"]]),
"image_raw": TFF.bytes([image_string.tobytes()]),
}
return tf.train.Example(features=tf.train.Features(feature=feature))
def write_tf_record_file(data_tuples, file_name="neodroid_bb_images.tfr"):
"""
"""
with tf.python_io.TFRecordWriter(file_name) as writer:
for data_tuple in data_tuples:
tensorflow_example = neodroid_tf_example(*data_tuple)
writer.write(tensorflow_example.SerializeToString())
if __name__ == "__main__":
generate_num = 10
output_file_name = PROJECT_APP_PATH.user_data / "neodroid_bb_images.tfr"
if generate_num > 0:
dt = []
with connect() as env:
for i, state in enumerate(env):
if i >= generate_num:
break
state = state[list(state.keys())[0]]
label = state.sensor("Class").value
bb = state.sensor("BoundingBox").value
image_data = state.sensor("RGB").value
dt.append((image_data, label, (256, 256, 4), json.loads(bb)))
write_tf_record_file(dt, file_name=output_file_name)
raw_image_dataset = tf.data.TFRecordDataset(output_file_name)
image_feature_description = {
"height": tf.FixedLenFeature([], tf.int64),
"width": tf.FixedLenFeature([], tf.int64),
"depth": tf.FixedLenFeature([], tf.int64),
"label": tf.FixedLenFeature([], tf.int64),
"bb_x": tf.FixedLenFeature([], tf.float32),
"bb_y": tf.FixedLenFeature([], tf.float32),
"bb_w": tf.FixedLenFeature([], tf.float32),
"bb_h": tf.FixedLenFeature([], tf.float32),
"image_raw": tf.FixedLenFeature([], tf.string),
} # Create a dictionary describing the features.
def _parse_image_function(example_proto):
# Parse the input tf.Example proto using the dictionary above.
return tf.parse_single_example(example_proto, image_feature_description)
parsed_image_dataset = raw_image_dataset.map(_parse_image_function)
for image_features in parsed_image_dataset:
print(image_features["bb_x"])
print(image_features["bb_y"])
print(image_features["bb_w"])
print(image_features["bb_h"]) | PypiClean |
/Flask-CKEditor-0.4.6.tar.gz/Flask-CKEditor-0.4.6/flask_ckeditor/static/full/lang/fa.js | /*
Copyright (c) 2003-2020, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or https://ckeditor.com/license
*/
CKEDITOR.lang['fa']={"editor":"ویرایشگر متن غنی","editorPanel":"پنل ویرایشگر متن غنی","common":{"editorHelp":"کلید Alt+0 را برای راهنمایی بفشارید","browseServer":"فهرستنمایی سرور","url":"URL","protocol":"قرارداد","upload":"بالاگذاری","uploadSubmit":"به سرور بفرست","image":"تصویر","flash":"فلش","form":"فرم","checkbox":"چکباکس","radio":"دکمهی رادیویی","textField":"فیلد متنی","textarea":"ناحیهٴ متنی","hiddenField":"فیلد پنهان","button":"دکمه","select":"فیلد انتخاب چند گزینهای","imageButton":"دکمهی تصویری","notSet":"<تعییننشده>","id":"شناسه","name":"نام","langDir":"جهت زبان","langDirLtr":"چپ به راست","langDirRtl":"راست به چپ","langCode":"کد زبان","longDescr":"URL توصیف طولانی","cssClass":"کلاسهای شیوهنامه (Stylesheet)","advisoryTitle":"عنوان کمکی","cssStyle":"سبک","ok":"پذیرش","cancel":"انصراف","close":"بستن","preview":"پیشنمایش","resize":"تغییر اندازه","generalTab":"عمومی","advancedTab":"پیشرفته","validateNumberFailed":"این مقدار یک عدد نیست.","confirmNewPage":"هر تغییر ایجاد شدهی ذخیره نشده از بین خواهد رفت. آیا اطمینان دارید که قصد بارگیری صفحه جدیدی را دارید؟","confirmCancel":"برخی از گزینهها تغییر کردهاند. آیا واقعا قصد بستن این پنجره را دارید؟","options":"گزینهها","target":"مقصد","targetNew":"پنجره جدید","targetTop":"بالاترین پنجره","targetSelf":"همان پنجره","targetParent":"پنجره والد","langDirLTR":"چپ به راست","langDirRTL":"راست به چپ","styles":"سبک","cssClasses":"کلاسهای سبکنامه","width":"عرض","height":"طول","align":"چینش","left":"چپ","right":"راست","center":"وسط","justify":"بلوک چین","alignLeft":"چپ چین","alignRight":"راست چین","alignCenter":"مرکز قرار بده","alignTop":"بالا","alignMiddle":"میانه","alignBottom":"پائین","alignNone":"هیچ","invalidValue":"مقدار نامعتبر.","invalidHeight":"ارتفاع باید یک عدد باشد.","invalidWidth":"عرض باید یک عدد باشد.","invalidLength":"عدد تعیین شده برای فیلد \"%1\" باید یک عدد مثبت با یا بدون یک واحد اندازه گیری معتبر (\"%2\") باشد.","invalidCssLength":"عدد تعیین شده برای فیلد \"%1\" باید یک عدد مثبت با یا بدون یک واحد اندازه گیری CSS معتبر باشد (px, %, in, cm, mm, em, ex, pt, or pc).","invalidHtmlLength":"عدد تعیین شده برای فیلد \"%1\" باید یک عدد مثبت با یا بدون یک واحد اندازه گیری HTML معتبر باشد (px or %).","invalidInlineStyle":"عدد تعیین شده برای سبک درونخطی -Inline Style- باید دارای یک یا چند چندتایی با شکلی شبیه \"name : value\" که باید با یک \";\" از هم جدا شوند.","cssLengthTooltip":"یک عدد برای یک مقدار بر حسب پیکسل و یا یک عدد با یک واحد CSS معتبر وارد کنید (px, %, in, cm, mm, em, ex, pt, or pc).","unavailable":"%1<span class=\"cke_accessibility\">، غیر قابل دسترس</span>","keyboard":{"8":"عقبگرد","13":"ورود","16":"تعویض","17":"کنترل","18":"دگرساز","32":"فاصله","35":"پایان","36":"خانه","46":"حذف","112":"F1","113":"F2","114":"F3","115":"F4","116":"F5","117":"F6","118":"F7","119":"F8","120":"F9","121":"F10","122":"F11","123":"F12","124":"F13","125":"F14","126":"F15","127":"F16","128":"F17","129":"F18","130":"F19","131":"F20","132":"F21","133":"F22","134":"F23","135":"F24","224":"فرمان"},"keyboardShortcut":"میانبر صفحه کلید","optionDefault":"پیش فرض"},"about":{"copy":"حق نشر © $1. کلیه حقوق محفوظ است.","dlgTitle":"درباره CKEditor","moreInfo":"برای کسب اطلاعات مجوز لطفا به وب سایت ما مراجعه کنید:"},"basicstyles":{"bold":"درشت","italic":"خمیده","strike":"خطخورده","subscript":"زیرنویس","superscript":"بالانویس","underline":"زیرخطدار"},"bidi":{"ltr":"جهت متن از چپ به راست","rtl":"جهت متن از راست به چپ"},"blockquote":{"toolbar":"بلوک نقل قول"},"notification":{"closed":"آگاهسازی بسته شد"},"toolbar":{"toolbarCollapse":"بستن نوار ابزار","toolbarExpand":"بازکردن نوار ابزار","toolbarGroups":{"document":"سند","clipboard":"حافظه موقت/برگشت","editing":"در حال ویرایش","forms":"فرمها","basicstyles":"سبکهای پایه","paragraph":"بند","links":"پیوندها","insert":"ورود","styles":"سبکها","colors":"رنگها","tools":"ابزارها"},"toolbars":"نوار ابزارهای ویرایشگر"},"clipboard":{"copy":"رونوشت","copyError":"تنظیمات امنیتی مرورگر شما اجازه نمیدهد که ویرایشگر به طور خودکار عملکردهای کپی کردن را انجام دهد. لطفا با دکمههای صفحه کلید این کار را انجام دهید (Ctrl/Cmd+C).","cut":"برش","cutError":"تنظیمات امنیتی مرورگر شما اجازه نمیدهد که ویرایشگر به طور خودکار عملکردهای برش را انجام دهد. لطفا با دکمههای صفحه کلید این کار را انجام دهید (Ctrl/Cmd+X).","paste":"چسباندن","pasteNotification":"1% را فشاردهید تا قرار داده شود. مرورگر شما از قراردهی با دکمه نوارابزار یا گزینه منوی زمینه پشتیبانی نمیکند","pasteArea":"محل چسباندن","pasteMsg":"محتوای خود را در ناحیه زیر قرار دهید و OK را فشار دهید"},"colorbutton":{"auto":"خودکار","bgColorTitle":"رنگ پسزمینه","colors":{"000":"سیاه","800000":"خرمایی","8B4513":"قهوهای شکلاتی","2F4F4F":"ارغوانی مایل به خاکستری","008080":"آبی مایل به خاکستری","000080":"آبی سیر","4B0082":"نیلی","696969":"خاکستری تیره","B22222":"آتش آجری","A52A2A":"قهوهای","DAA520":"میلهی طلایی","006400":"سبز تیره","40E0D0":"فیروزهای","0000CD":"آبی روشن","800080":"ارغوانی","808080":"خاکستری","F00":"قرمز","FF8C00":"نارنجی پررنگ","FFD700":"طلایی","008000":"سبز","0FF":"آبی مایل به سبز","00F":"آبی","EE82EE":"بنفش","A9A9A9":"خاکستری مات","FFA07A":"صورتی کدر روشن","FFA500":"نارنجی","FFFF00":"زرد","00FF00":"فسفری","AFEEEE":"فیروزهای رنگ پریده","ADD8E6":"آبی کمرنگ","DDA0DD":"آلویی","D3D3D3":"خاکستری روشن","FFF0F5":"بنفش کمرنگ","FAEBD7":"عتیقه سفید","FFFFE0":"زرد روشن","F0FFF0":"عسلی","F0FFFF":"لاجوردی","F0F8FF":"آبی براق","E6E6FA":"بنفش کمرنگ","FFF":"سفید","1ABC9C":"فیروزه ای پررنگ","2ECC71":"سبز زمردی","3498DB":"آبی روشن","9B59B6":"ارغوانی","4E5F70":"آبی خاکستری","F1C40F":"زرد تازه","16A085":"فیروزه ای تیره","27AE60":"سبز زمردی تیره","2980B9":"آبی پر رنگ","8E44AD":"بنفش تیره","2C3E50":"آبی اشباع شده","F39C12":"نارنجی","E67E22":"هویجی","E74C3C":"قرمز روشن","ECF0F1":"نقره ای روشن","95A5A6":"آبی خاکستری روشن","DDD":"خاکستری روشن","D35400":"کدو حلوایی","C0392B":"قرمز پررنگ","BDC3C7":"نقره ای","7F8C8D":"فیروزه ای خاکستری","999":"خاکستری تیره"},"more":"رنگهای بیشتر...","panelTitle":"رنگها","textColorTitle":"رنگ متن"},"colordialog":{"clear":"پاک کردن","highlight":"متمایز","options":"گزینههای رنگ","selected":"رنگ انتخاب شده","title":"انتخاب رنگ"},"templates":{"button":"الگوها","emptyListMsg":"(الگوئی تعریف نشده است)","insertOption":"محتویات کنونی جایگزین شوند","options":"گزینههای الگو","selectPromptMsg":"لطفاً الگوی مورد نظر را برای بازکردن در ویرایشگر انتخاب کنید","title":"الگوهای محتویات"},"contextmenu":{"options":"گزینههای منوی زمینه"},"copyformatting":{"label":"Copy Formatting","notification":{"copied":"Formatting copied","applied":"Formatting applied","canceled":"Formatting canceled","failed":"Formatting failed. You cannot apply styles without copying them first."}},"div":{"IdInputLabel":"شناسه","advisoryTitleInputLabel":"عنوان مشاوره","cssClassInputLabel":"کلاسهای شیوهنامه","edit":"ویرایش Div","inlineStyleInputLabel":"سبک درونخطی(Inline Style)","langDirLTRLabel":"چپ به راست (LTR)","langDirLabel":"جهت نوشتاری زبان","langDirRTLLabel":"راست به چپ (RTL)","languageCodeInputLabel":" کد زبان","remove":"حذف Div","styleSelectLabel":"سبک","title":"ایجاد یک محل DIV","toolbar":"ایجاد یک محل DIV"},"elementspath":{"eleLabel":"مسیر عناصر","eleTitle":"%1 عنصر"},"filetools":{"loadError":"هنگام خواندن فایل، خطایی رخ داد.","networkError":"هنگام آپلود فایل خطای شبکه رخ داد.","httpError404":"هنگام آپلود فایل خطای HTTP رخ داد (404: فایل یافت نشد).","httpError403":"هنگام آپلود فایل، خطای HTTP رخ داد (403: ممنوع).","httpError":"خطای HTTP در آپلود فایل رخ داده است (وضعیت خطا: %1).","noUrlError":"آدرس آپلود تعریف نشده است.","responseError":"پاسخ نادرست سرور."},"find":{"find":"جستجو","findOptions":"گزینههای جستجو","findWhat":"چه چیز را مییابید:","matchCase":"همسانی در بزرگی و کوچکی نویسهها","matchCyclic":"همسانی با چرخه","matchWord":"همسانی با واژهٴ کامل","notFoundMsg":"متن موردنظر یافت نشد.","replace":"جایگزینی","replaceAll":"جایگزینی همهٴ یافتهها","replaceSuccessMsg":"%1 رخداد جایگزین شد.","replaceWith":"جایگزینی با:","title":"جستجو و جایگزینی"},"fakeobjects":{"anchor":"لنگر","flash":"انیمشن فلش","hiddenfield":"فیلد پنهان","iframe":"IFrame","unknown":"شیء ناشناخته"},"flash":{"access":"دسترسی به اسکریپت","accessAlways":"همیشه","accessNever":"هرگز","accessSameDomain":"همان دامنه","alignAbsBottom":"پائین مطلق","alignAbsMiddle":"وسط مطلق","alignBaseline":"خط پایه","alignTextTop":"متن بالا","bgcolor":"رنگ پسزمینه","chkFull":"اجازه تمام صفحه","chkLoop":"اجرای پیاپی","chkMenu":"در دسترس بودن منوی فلش","chkPlay":"آغاز خودکار","flashvars":"مقادیر برای فلش","hSpace":"فاصلهٴ افقی","properties":"ویژگیهای فلش","propertiesTab":"ویژگیها","quality":"کیفیت","qualityAutoHigh":"بالا - خودکار","qualityAutoLow":"پایین - خودکار","qualityBest":"بهترین","qualityHigh":"بالا","qualityLow":"پایین","qualityMedium":"متوسط","scale":"مقیاس","scaleAll":"نمایش همه","scaleFit":"جایگیری کامل","scaleNoBorder":"بدون کران","title":"ویژگیهای فلش","vSpace":"فاصلهٴ عمودی","validateHSpace":"مقدار فاصله گذاری افقی باید یک عدد باشد.","validateSrc":"لطفا URL پیوند را بنویسید","validateVSpace":"مقدار فاصله گذاری عمودی باید یک عدد باشد.","windowMode":"حالت پنجره","windowModeOpaque":"مات","windowModeTransparent":"شفاف","windowModeWindow":"پنجره"},"font":{"fontSize":{"label":"اندازه","voiceLabel":"اندازه قلم","panelTitle":"اندازه قلم"},"label":"قلم","panelTitle":"نام قلم","voiceLabel":"قلم"},"forms":{"button":{"title":"ویژگیهای دکمه","text":"متن (مقدار)","type":"نوع","typeBtn":"دکمه","typeSbm":"ثبت","typeRst":"بازنشانی (Reset)"},"checkboxAndRadio":{"checkboxTitle":"ویژگیهای خانهٴ گزینهای","radioTitle":"ویژگیهای دکمهٴ رادیویی","value":"مقدار","selected":"برگزیده","required":"ضروری"},"form":{"title":"ویژگیهای فرم","menu":"ویژگیهای فرم","action":"رویداد","method":"متد","encoding":"رمزنگاری"},"hidden":{"title":"ویژگیهای فیلد پنهان","name":"نام","value":"مقدار"},"select":{"title":"ویژگیهای فیلد چندگزینهای","selectInfo":"اطلاعات","opAvail":"گزینههای دردسترس","value":"مقدار","size":"اندازه","lines":"خطوط","chkMulti":"گزینش چندگانه فراهم باشد","required":"ضروری","opText":"متن","opValue":"مقدار","btnAdd":"افزودن","btnModify":"ویرایش","btnUp":"بالا","btnDown":"پائین","btnSetValue":"تنظیم به عنوان مقدار برگزیده","btnDelete":"پاککردن"},"textarea":{"title":"ویژگیهای ناحیهٴ متنی","cols":"ستونها","rows":"سطرها"},"textfield":{"title":"ویژگیهای فیلد متنی","name":"نام","value":"مقدار","charWidth":"پهنای نویسه","maxChars":"حداکثر کارکتر","required":"ضروری","type":"نوع","typeText":"متن","typePass":"گذرواژه","typeEmail":"ایمیل","typeSearch":"جستجو","typeTel":"شماره تلفن","typeUrl":"URL"}},"format":{"label":"قالب","panelTitle":"قالب بند","tag_address":"نشانی","tag_div":"بند","tag_h1":"سرنویس ۱","tag_h2":"سرنویس ۲","tag_h3":"سرنویس ۳","tag_h4":"سرنویس ۴","tag_h5":"سرنویس ۵","tag_h6":"سرنویس ۶","tag_p":"معمولی","tag_pre":"قالبدار"},"horizontalrule":{"toolbar":"گنجاندن خط افقی"},"iframe":{"border":"نمایش خطوط frame","noUrl":"لطفا مسیر URL iframe را درج کنید","scrolling":"نمایش خطکشها","title":"ویژگیهای IFrame","toolbar":"IFrame"},"image":{"alt":"متن جایگزین","border":"لبه","btnUpload":"به سرور بفرست","button2Img":"آیا مایلید از یک تصویر ساده روی دکمه تصویری انتخاب شده استفاده کنید؟","hSpace":"فاصلهٴ افقی","img2Button":"آیا مایلید از یک دکمه تصویری روی تصویر انتخاب شده استفاده کنید؟","infoTab":"اطلاعات تصویر","linkTab":"پیوند","lockRatio":"قفل کردن نسبت","menu":"ویژگیهای تصویر","resetSize":"بازنشانی اندازه","title":"ویژگیهای تصویر","titleButton":"ویژگیهای دکمهٴ تصویری","upload":"انتقال به سرور","urlMissing":"آدرس URL اصلی تصویر یافت نشد.","vSpace":"فاصلهٴ عمودی","validateBorder":"مقدار خطوط باید یک عدد باشد.","validateHSpace":"مقدار فاصله گذاری افقی باید یک عدد باشد.","validateVSpace":"مقدار فاصله گذاری عمودی باید یک عدد باشد."},"indent":{"indent":"افزایش تورفتگی","outdent":"کاهش تورفتگی"},"smiley":{"options":"گزینههای خندانک","title":"گنجاندن خندانک","toolbar":"خندانک"},"language":{"button":"تعیین زبان","remove":"حذف زبان"},"link":{"acccessKey":"کلید دستیابی","advanced":"پیشرفته","advisoryContentType":"نوع محتوای کمکی","advisoryTitle":"عنوان کمکی","anchor":{"toolbar":"گنجاندن/ویرایش لنگر","menu":"ویژگیهای لنگر","title":"ویژگیهای لنگر","name":"نام لنگر","errorName":"لطفا نام لنگر را بنویسید","remove":"حذف لنگر"},"anchorId":"با شناسهٴ المان","anchorName":"با نام لنگر","charset":"نویسهگان منبع پیوند شده","cssClasses":"کلاسهای شیوهنامه(Stylesheet)","download":"Force Download","displayText":"نمایش متن","emailAddress":"نشانی پست الکترونیکی","emailBody":"متن پیام","emailSubject":"موضوع پیام","id":"شناسه","info":"اطلاعات پیوند","langCode":"جهتنمای زبان","langDir":"جهتنمای زبان","langDirLTR":"چپ به راست (LTR)","langDirRTL":"راست به چپ (RTL)","menu":"ویرایش پیوند","name":"نام","noAnchors":"(در این سند لنگری دردسترس نیست)","noEmail":"لطفا نشانی پست الکترونیکی را بنویسید","noUrl":"لطفا URL پیوند را بنویسید","noTel":"Please type the phone number","other":"<سایر>","phoneNumber":"Phone number","popupDependent":"وابسته (Netscape)","popupFeatures":"ویژگیهای پنجرهٴ پاپاپ","popupFullScreen":"تمام صفحه (IE)","popupLeft":"موقعیت چپ","popupLocationBar":"نوار موقعیت","popupMenuBar":"نوار منو","popupResizable":"قابل تغییر اندازه","popupScrollBars":"میلههای پیمایش","popupStatusBar":"نوار وضعیت","popupToolbar":"نوار ابزار","popupTop":"موقعیت بالا","rel":"وابستگی","selectAnchor":"یک لنگر برگزینید","styles":"شیوه (style)","tabIndex":"نمایهٴ دسترسی با برگه","target":"مقصد","targetFrame":"<فریم>","targetFrameName":"نام فریم مقصد","targetPopup":"<پنجرهٴ پاپاپ>","targetPopupName":"نام پنجرهٴ پاپاپ","title":"پیوند","toAnchor":"لنگر در همین صفحه","toEmail":"پست الکترونیکی","toUrl":"URL","toPhone":"Phone","toolbar":"گنجاندن/ویرایش پیوند","type":"نوع پیوند","unlink":"برداشتن پیوند","upload":"انتقال به سرور"},"list":{"bulletedlist":"فهرست نقطهای","numberedlist":"فهرست شمارهدار"},"liststyle":{"bulletedTitle":"خصوصیات فهرست نقطهای","circle":"دایره","decimal":"دهدهی (۱، ۲، ۳، ...)","disc":"صفحه گرد","lowerAlpha":"پانویس الفبایی (a, b, c, d, e, etc.)","lowerRoman":"پانویس رومی (i, ii, iii, iv, v, etc.)","none":"هیچ","notset":"<تنظیم نشده>","numberedTitle":"ویژگیهای فهرست شمارهدار","square":"چهارگوش","start":"شروع","type":"نوع","upperAlpha":"بالانویس الفبایی (A, B, C, D, E, etc.)","upperRoman":"بالانویس رومی (I, II, III, IV, V, etc.)","validateStartNumber":"فهرست شماره شروع باید یک عدد صحیح باشد."},"magicline":{"title":"قرار دادن بند در اینجا"},"maximize":{"maximize":"بیشنه کردن","minimize":"کمینه کردن"},"newpage":{"toolbar":"برگهٴ تازه"},"pagebreak":{"alt":"شکستن صفحه","toolbar":"گنجاندن شکستگی پایان برگه"},"pastetext":{"button":"چسباندن به عنوان متن ساده","pasteNotification":"Press %1 to paste. Your browser doesn‘t support pasting with the toolbar button or context menu option.","title":"چسباندن به عنوان متن ساده"},"pastefromword":{"confirmCleanup":"متنی که میخواهید بچسبانید به نظر میرسد که از Word کپی شده است. آیا میخواهید قبل از چسباندن آن را پاکسازی کنید؟","error":"به دلیل بروز خطای داخلی امکان پاکسازی اطلاعات بازنشانی شده وجود ندارد.","title":"چسباندن از Word","toolbar":"چسباندن از Word"},"preview":{"preview":"پیشنمایش"},"print":{"toolbar":"چاپ"},"removeformat":{"toolbar":"برداشتن فرمت"},"save":{"toolbar":"ذخیره"},"selectall":{"toolbar":"گزینش همه"},"showblocks":{"toolbar":"نمایش بلوکها"},"sourcearea":{"toolbar":"منبع"},"specialchar":{"options":"گزینههای نویسههای ویژه","title":"گزینش نویسهی ویژه","toolbar":"گنجاندن نویسهی ویژه"},"scayt":{"btn_about":"درباره SCAYT","btn_dictionaries":"دیکشنریها","btn_disable":"غیرفعالسازی SCAYT","btn_enable":"فعالسازی SCAYT","btn_langs":"زبانها","btn_options":"گزینهها","text_title":"بررسی املای تایپ شما"},"stylescombo":{"label":"سبک","panelTitle":"سبکهای قالببندی","panelTitle1":"سبکهای بلوک","panelTitle2":"سبکهای درونخطی","panelTitle3":"سبکهای شیء"},"table":{"border":"اندازهٴ لبه","caption":"عنوان","cell":{"menu":"سلول","insertBefore":"افزودن سلول قبل از","insertAfter":"افزودن سلول بعد از","deleteCell":"حذف سلولها","merge":"ادغام سلولها","mergeRight":"ادغام به راست","mergeDown":"ادغام به پایین","splitHorizontal":"جدا کردن افقی سلول","splitVertical":"جدا کردن عمودی سلول","title":"ویژگیهای سلول","cellType":"نوع سلول","rowSpan":"محدوده ردیفها","colSpan":"محدوده ستونها","wordWrap":"شکستن کلمه","hAlign":"چینش افقی","vAlign":"چینش عمودی","alignBaseline":"خط مبنا","bgColor":"رنگ زمینه","borderColor":"رنگ خطوط","data":"اطلاعات","header":"سرنویس","yes":"بله","no":"خیر","invalidWidth":"عرض سلول باید یک عدد باشد.","invalidHeight":"ارتفاع سلول باید عدد باشد.","invalidRowSpan":"مقدار محدوده ردیفها باید یک عدد باشد.","invalidColSpan":"مقدار محدوده ستونها باید یک عدد باشد.","chooseColor":"انتخاب"},"cellPad":"فاصلهٴ پرشده در سلول","cellSpace":"فاصلهٴ میان سلولها","column":{"menu":"ستون","insertBefore":"افزودن ستون قبل از","insertAfter":"افزودن ستون بعد از","deleteColumn":"حذف ستونها"},"columns":"ستونها","deleteTable":"پاک کردن جدول","headers":"سرنویسها","headersBoth":"هردو","headersColumn":"اولین ستون","headersNone":"هیچ","headersRow":"اولین ردیف","heightUnit":"height unit","invalidBorder":"مقدار اندازه خطوط باید یک عدد باشد.","invalidCellPadding":"بالشتک سلول باید یک عدد باشد.","invalidCellSpacing":"مقدار فاصلهگذاری سلول باید یک عدد باشد.","invalidCols":"تعداد ستونها باید یک عدد بزرگتر از 0 باشد.","invalidHeight":"مقدار ارتفاع جدول باید یک عدد باشد.","invalidRows":"تعداد ردیفها باید یک عدد بزرگتر از 0 باشد.","invalidWidth":"مقدار پهنای جدول باید یک عدد باشد.","menu":"ویژگیهای جدول","row":{"menu":"سطر","insertBefore":"افزودن سطر قبل از","insertAfter":"افزودن سطر بعد از","deleteRow":"حذف سطرها"},"rows":"سطرها","summary":"خلاصه","title":"ویژگیهای جدول","toolbar":"جدول","widthPc":"درصد","widthPx":"پیکسل","widthUnit":"واحد پهنا"},"undo":{"redo":"بازچیدن","undo":"واچیدن"},"widget":{"move":"کلیک و کشیدن برای جابجایی","label":"ابزارک %1"},"uploadwidget":{"abort":"بارگذاری توسط کاربر لغو شد.","doneOne":"فایل با موفقیت بارگذاری شد.","doneMany":"%1 از فایلها با موفقیت بارگذاری شد.","uploadOne":"بارگذاری فایل ({percentage}%)...","uploadMany":"بارگذاری فایلها, {current} از {max} انجام شده ({percentage}%)..."},"wsc":{"btnIgnore":"چشمپوشی","btnIgnoreAll":"چشمپوشی همه","btnReplace":"جایگزینی","btnReplaceAll":"جایگزینی همه","btnUndo":"واچینش","changeTo":"تغییر به","errorLoading":"خطا در بارگیری برنامه خدمات میزبان: %s.","ieSpellDownload":"بررسی کنندهٴ املا نصب نشده است. آیا میخواهید آن را هماکنون دریافت کنید؟","manyChanges":"بررسی املا انجام شد. %1 واژه تغییر یافت","noChanges":"بررسی املا انجام شد. هیچ واژهای تغییر نیافت","noMispell":"بررسی املا انجام شد. هیچ غلط املائی یافت نشد","noSuggestions":"- پیشنهادی نیست -","notAvailable":"با عرض پوزش خدمات الان در دسترس نیستند.","notInDic":"در واژه~نامه یافت نشد","oneChange":"بررسی املا انجام شد. یک واژه تغییر یافت","progress":"بررسی املا در حال انجام...","title":"بررسی املا","toolbar":"بررسی املا"}}; | PypiClean |
/Gates-v1.0.0.beta.tar.gz/Gates-v1.0.0.beta/src/gates/core/application.py | import webob
import inspect
import logging
import urlparse
import threading
from traceback import print_exc
from routes import route
from routes.mapper import Mapper
from .exceptions import status_map, HTTPInternalServerError, HTTPException, HTTPNotFound
__version__ = "v1.0.0.beta"
__all__ = ["Application", "Resource", "Path", "Route", "abort", "run", ]
HTTP_METHODS = ["HEAD", "GET", "PUT", "DELETE", "POST",]
__all__.extend(HTTP_METHODS)
logger = logging.getLogger('gates.logger')
###############################################################################
# Exceptions ##################################################################
###############################################################################
class DuplicatePathError(Exception):
'''Another Request handler is already bound to this path'''
pass
###############################################################################
# HTTP Routing Tools ##########################################################
###############################################################################
'''
RouteBuilder:
This class uses the builder pattern to configure and create
routes.Route objects which will be added to the global mapper.
This is necessary because Gates' decorators configure routes
on many different levels, so we need a consistent interface
for creating routes.Route objects.
Sample code:
builder = RouteBuilder()
builder = builder.route("/{home}/edit/{action: save|discard}") #Add this route to this builder.
builder = builder.controller("DocumentController").httpverb("PUT")
builder = builder.action("do")
route = builder.build() #Create the route object finally.
'''
class RouteBuilder(object):
'''Make an instance routes.Route using the builder pattern'''
def __init__(self):
'''An empty constructor'''
self.__method = set()
self.__route = None
def route(self, route):
'''Create a new RouteBuilder'''
assert route is not None, "Routes cannot be None"
assert route.startswith('/'), "Routes must begin with a '/'"
self.__route = route
return self
def controller(self, name):
'''Set the controller for the current route'''
self.__controller = name
return self
def hasHTTPVerb(self):
'''Checks if there is HTTP verb set on this route'''
return bool(self.__method)
def hasRoute(self):
'''Checks if there is a route set on this ``RouteBuilder``'''
return bool(self.__route)
def action(self, action):
'''Set the action for the current controller'''
self.__action = action
return self
def httpverb(self, method):
'''Set the HTTP method for the ``RouteBuilder``'''
assert method in HTTP_METHODS, "Invalid HTTP Method please use one of: %s" % HTTP_METHODS
self.__method.add(method)
return self
def build(self):
'''Create a route.Route object from this ``RouteBuilder`` and return it'''
return route.Route(None, routepath=self.__route, controller=self.__controller,
action=self.__action, conditions=dict(method=list(self.__method)))
###############################################################################
# Decorators ##################################################################
###############################################################################
'''
Path:
A Decorator that hooks a ``Resource`` with the internal
HTTP Routing mechanism.
Sample use:
@Path('/home')
class HomeHandler(Resource):
pass
Implementation Notes:
What we want here is for the @Path decorator to register the handler
with the framework's ``Registry``. It makes sure that every handler
is mapped to a unique path within the registry.
'''
def Path(base):
'''The @Path Decorator'''
#Create and Register the route if it doesn't exist
def inner(clasz):
'''Wrapper for the function '''
assert issubclass(clasz, Resource), "@Path can only be used on Resources"
assert base.startswith("/"), "URLs used in route should start with /"
logger.info('Wrapping Class: %s' % clasz.__name__)
Registry.addHandler(base, clasz)
return clasz
return inner
'''
Route:
Used to bind a method in a ``Resource`` to a URL Route;
'''
def Route(route):
'''The @Route Decorator'''
def inner(action):
'''Wrapper for the action '''
assert route.startswith("/"), "All routes must start with '/'"
logger.info('Configuring Routes and Action for method: %s' % action.__name__)
builder = Registry.builderFor(action)
builder.route(route).action(action)
if not builder.hasHTTPVerb():
GET(action)
return action
return inner
###############################################################################
# HTTP Verbs ##################################################################
###############################################################################
'''
GET:
Binds a method to the HTTP GET Verb;
'''
def GET(action):
'''Binds a method and route to the HTTP Verb "GET"'''
logger.info("Configuring HTTP GET Verb for method: %s" % action.__name__)
HEAD(action)
builder = Registry.builderFor(action)
builder.httpverb("GET")
return action
'''
POST:
Binds a method to the HTTP POST Verb;
'''
def POST(action):
'''Binds a method and route to the HTTP Verb "POST"'''
logger.info("Configuring HTTP POST Verb for method: %s" % action.__name__)
HEAD(action)
builder = Registry.builderFor(action)
builder.httpverb("POST")
return action
'''
PUT:
Binds a method to the HTTP PUT Verb;
'''
def PUT(action):
'''Binds a method and route to the HTTP Verb "PUT"'''
logger.info("Configuring HTTP PUT Verb for method: %s" % action.__name__)
HEAD(action)
builder = Registry.builderFor(action)
builder.httpverb("PUT")
return action
'''
DELETE:
Binds a method to the HTTP DELETE Verb;
'''
def DELETE(action):
'''Binds a method and route to the HTTP Verb "DELETE"'''
logger.info("Configuring HTTP PUT Verb for method: %s" % action.__name__)
HEAD(action)
builder = Registry.builderFor(action)
builder.httpverb("DELETE")
return action
'''
HEAD:
Binds a method to the HTTP HEAD Verb;
'''
def HEAD(action):
'''Binds a method and route to the HTTP Verb "HEAD"'''
logger.info("Configuring HTTP HEAD Verb for method: %s" % action.__name__)
builder = Registry.builderFor(action)
builder.httpverb("HEAD")
if not builder.hasRoute():
do = Route("/")
do(action) # Make every method marked with @HEAD have a default route '/'
return action
###############################################################################
# HTTP Core Tools #############################################################
###############################################################################
'''
Application:
Primary used for grouping and running``Resource`` objects, it dispatches
HTTP requests to the appropriate handler at run time.
'''
class Application(object):
'''Used for grouping request handlers and running them'''
def __init__(self, base="/", resources=[]):
'''Initialize this application'''
self.router = Registry.mapperFor(base, *resources) # Create a Matcher.
def before(self, request):
'''Override this to customize all requests'''
return request
def after(self, response):
'''Override this to customize all responses'''
return response
def __call__(self, environ, start_response):
'''WSGI entry point'''
request = Request(environ)
response = Response(conditional=True)
try:
result = self.router.match(environ=environ, url=request.path)
if result is None:
raise HTTPNotFound("The requested Resource [%s] was not found this server; " % request.path)
request = self.before(request) #Invoke the global request pre-processor
clasz, method = result.pop('controller'), result.pop('action')
instance = clasz(self, request, response)
instance.before() #Invoke the local request pre-processor
method(instance, **result)
instance.after() #Invoke the local response post-processor
response = self.after(response) #Invoke the global response post-processor
except HTTPException as e:
response = Response(impl=e)
response.contenttype = "text/plain"
except Exception as e:
e = HTTPInternalServerError("Internal Server Error, We'll check this out in a few hours")
response = Response(impl=e)
response.contenttype = "text/plain"
return response(environ, start_response)
'''
Registry:
Global storage for tracking ``Resource`` objects and Routes.
'''
class Registry(threading.local):
'''Global Configuration object'''
paths = {} #Maps {'/path': Resource}
handlers = {} #Maps {Resource : '/path'}
builders = {} #Cached RouteBuilders' { function : RouteBuilder} }
@classmethod
def clear(cls):
'''Clears all the entries from the Registry'''
cls.paths.clear()
cls.handlers.clear()
cls.builders.clear()
@classmethod
def addHandler(cls, base, handler):
'''Add particular handler to this base url'''
assert base.startswith("/") and handler is not None, "Bad Arguments to addHandler"
assert issubclass(handler, Resource), "@handler must be a Resource"
if base in cls.paths:
raise DuplicatePathError("Another Handler is already bound to Path: %s" % base)
logger.info("Adding %s to route => %s" % (handler.__name__, base))
cls.paths[base] = handler
cls.handlers[handler] = base
@classmethod
def handlerFor(cls, base):
'''Returns the handler for a particular path'''
return cls.paths[base]
@classmethod
def pathFor(cls, handler):
'''Returns the path for a particular Resource'''
return cls.handlers[handler]
@classmethod
def routesFor(cls, handler):
'''Returns all the ``routes.Route`` objects for a particular handler, building them on the fly'''
assert handler in cls.handlers, "Unregistered Handler Error"
for func in handler.__dict__.values():
if inspect.isfunction(func) and func in cls.builders:
builder = cls.builders[func]
yield builder.build()
@classmethod
def mapperFor(cls, base, *handlers):
'''Returns a new mapper that will match all the methods in @handlers'''
base = base.rstrip(" /")
mapper = Mapper()
for handler in handlers:
path = cls.pathFor(handler)
if base:
path = base + path
created = list(cls.routesFor(handler))
mapper.extend(created, path)
return mapper
@classmethod
def builderFor(cls, function):
'''
Returns the RouteBuilder for a particular bound method,
if no builder exists it creates one and returns it, every RouteBuilder
returned always has a Controller that is a Resource
'''
assert inspect.isfunction(function), "you can only create builder for a function"
builder = cls.builders.setdefault(function, RouteBuilder())
return builder
"""
__scan__:
searches the class for any unbound methods and creates a route builder for
each of those methods, effectively pre-registring them with ``Registry``
and setting their controllers, which will eventually allow all http bound
methods (Methods decorated with a http verb e.g. POST) to discover their
controllers (Their parent Resources).
This is useful because, at the point when function decorators on classes are
evaluated the callables they decorate are not 'class bound methods yet', so
its not possible to know the class which a decorated method is bound to at
class creation time.
"""
class __scan__(type):
def __new__(cls, name, bases, dict):
'''A meta-callable that is used to do basic registration of http bound methods in a Resource.'''
clasz = type.__new__(cls, name, bases, dict)
for v in dict.values():
if inspect.isfunction(v):
builder = Registry.builderFor(v)
builder.controller(clasz)
return clasz
'''
Resource:
The threadsafe base class for all classes that will handle HTTP requests.
'''
class Resource(threading.local):
'''The Base class for all HTTP Request Handlers'''
__metaclass__ = __scan__
def __init__(self, application, request, response):
'''Initialize the Request Handler'''
self.request = request
self.response = response
self.application = application
def redirect(self, location, code=None, permanent=True, stop=True):
'''Redirect to @location, if stop is true it breaks execution'''
if location.startswith(('.', '/')):
location = str(urlparse.urljoin(self.request.url, location))
if code is None:
code = 301 if permanent else 302
assert code in (301, 302, 303, 305, 307),'Invalid redirect status code.'
if stop:
abort(code, headers=[('Location', location)])
self.response.headers['Location'] = location
self.response.statuscode = code
def before(self):
'''Request pre-processor'''
pass
def after(self):
'''Response post-processor'''
pass
'''
Request:
An object that encapsulates incoming HTTP requests, It provides
convenient methods methods that can be used to access the properties
of the Request.
'''
class Request(object):
'''Abstracts the WSGI environ into a Request object'''
def __init__(self, environ):
'''Initialize the Environ'''
assert environ != None, "You must provide a non-null WSGI environ dict"
self.impl = impl = webob.Request(environ)
## REMAKE PROPERTIES AND RE-ASSIGN THEM TO SELF ##
makeProperty(self, "body", "impl", "body")
makeProperty(self, "host", "impl", "host")
makeProperty(self, "pathinfo", "impl", "path_info")
makeProperty(self, "scheme", "impl", "scheme")
makeProperty(self, "scriptname", "impl", "script_name")
makeProperty(self, "accept", "impl", "accept")
makeProperty(self, "headers", "impl", "headers")
makeProperty(self, "method", "impl", "method")
makeProperty(self, "charset", "impl", "charset")
makeProperty(self, "query", "impl", "query_string")
makeProperty(self, "url", "impl", "url")
makeProperty(self, "cookies", "impl", "cookies")
makeProperty(self, "accept", "impl", "accept")
makeProperty(self, "params", "impl", "params")
makeProperty(self, "path", "impl", "path")
makeProperty(self, "contenttype","impl", "content_type")
makeProperty(self, "contentlength", "impl", "content_length")
def get(self, name, default=None):
'''Returns the query or POST argument with the given name'''
params = self.getall(name)
if len(params) > 0:
return params[0]
else:
return default
def getall(self, name, default=None):
'''Returns the query or POST argument with the given name'''
if self.charset:
name = name.encode(self.charset)
params = self.params.getall(name)
if params is None or len(params) == 0:
return default
for i in xrange(len(params)):
if isinstance(params[i], cgi.FieldStorage):
params[i] = params[i].value
return params
def arguments(self):
"""Returns a list of the arguments provided in the query and/or POST."""
return list(set(self.params.keys()))
def getResponse(self, application, catchExceptions=False):
'''Return a `Response` after invoking @application'''
impl = self.impl.get_response(application=application, catch_exc_info=catchExceptions)
return Response(impl=impl)
@classmethod
def blank(cls, path):
'''Useful for creating empty requests that are useful for testing.'''
impl = webob.Request.blank(path)
request = Request(impl.environ.copy())
return request
'''
Response:
Abstracts a HTTP Response, Basically what I did here
is to make the webob API conform to our coding style
after removing deprecated components.
'''
class Response(object):
'''Represents a Response object'''
def __init__(self, body=None, status=None, conditional=None, impl=None):
'''Creates a new Response object'''
if not impl:
new = webob.Response(body=body, status=status,conditional_response=conditional)
self.impl = impl = new
else:
self.impl = impl
## REMAKE PROPERTIES AND RE-ASSIGN THEM TO SELF##
makeProperty(self, "body", "impl", "body")
makeProperty(self, "text", "impl", "text")
makeProperty(self, "status", "impl", "status")
makeProperty(self, "headers", "impl", "headers")
makeProperty(self, "bodyfile", "impl", "body_file")
makeProperty(self, "charset", "impl", "charset")
makeProperty(self, "expires","impl", "expires")
makeProperty(self, "headerlist","impl", "headerlist")
makeProperty(self, "contenttype","impl", "content_type")
makeProperty(self, "statuscode", "impl", "status_int")
makeProperty(self, "contentlength", "impl", "content_length")
makeProperty(self, "vary", "impl", "vary")
makeProperty(self, "lastmodified", "impl", "last_modified")
makeProperty(self, "date", "impl", "date")
makeProperty(self, "retryafter", "impl", "retry_after")
makeProperty(self, "location", "impl", "location")
makeProperty(self, "age", "impl", "age")
## RENAME AND REWIRE OK METHODS ##
self.write = impl.write
self.addCookie = impl.set_cookie
self.removeCookie = impl.unset_cookie
self.encodeContent = impl.encode_content
self.decodeContent = impl.decode_content
self.deleteCookieFromClient = impl.delete_cookie
def __call__(self, environ, function):
'''Every response is a WSGI compliant app also'''
return self.impl(environ, function)
def clear(self):
'''Clears the body of the Response'''
self.body = ''
def md5ETag(self):
'''Generates an Md5 E-Tag for the response object'''
self.impl.md5_etag(self, set_content_md5=True)
###############################################################################
# Runtime Tools. ####################################################
###############################################################################
'''
ServerAdapter:
Base class that all servers will have to conform to.
'''
class ServerAdapter(object):
quiet = False
def __init__(self, host='127.0.0.1', port=8080, **config):
self.options = config
self.host = host
self.port = int(port)
def run(self, handler): # pragma: no cover
pass
def __repr__(self):
args = ', '.join(['%s=%s'%(k,repr(v)) for k, v in self.options.items()])
return "%s(%s)" % (self.__class__.__name__, args)
'''
WSGIRefServer:
Uses the python inbuilt WSGI server to run Gates applications.
'''
class WSGIRefServer(ServerAdapter):
'''Single threaded WSGI server that is useful for testing'''
def run(self, handler):
from wsgiref.simple_server import make_server, WSGIRequestHandler
if self.quiet:
class QuietHandler(WSGIRequestHandler):
def log_request(*args, **kw): pass
self.options['handler_class'] = QuietHandler
srv = make_server(self.host, self.port, handler, **self.options)
srv.serve_forever()
'''
MeinheldServer:
An adapter that uses Meinheld to run a Gates application
'''
class MeinheldServer(ServerAdapter):
'''Server adapter for Meinheld'''
def run(self, handler):
'''Runs the Gates application in Meinheld'''
from meinheld import server
server.listen((self.host, self.port))
server.run(handler)
###############################################################################
# Public Helper Functions. ####################################################
###############################################################################
'''
abort:
Invoke abort to break execution and raise a valid HTTP Exception;
'''
def abort(code=500, detail="We're sorry you have to see this, we'll fix it soon", **arguments):
'''A helper that is used to raise a valid HTTPException at any point'''
cls = status_map.get(code)
if not cls:
raise KeyError('No exception is defined for code %r.' % code)
raise cls(detail=detail, **arguments)
'''
run:
A helper method that is used to run a Gates application in a particular
server, This method blocks until the server terminates.
@app : The Gates Application,
@server : The Server you want to run the application in.
@arguments: A list of other arguments you may want to pass to the Server.
@port : The port to run the application at.
@host : The IP address to bind the application to.
quiet : If true, print debug information to standard error.
'''
def run(application=None, server=WSGIRefServer, host="127.0.0.1", port=8080, quiet=False, **arguments):
'''A helper method used to run the application'''
try:
server = server(host=host, port=port, **arguments)
server.quiet = quiet
print("\nBill Gates Invented the Web...")
print("Gates %s server starting up (using %s)..." % (__version__, repr(server)))
print("Listening on http://%s:%d/" % (server.host, server.port))
print("Hit Ctrl-C to quit.")
server.run(application)
except KeyboardInterrupt:
print("\n")
print("Closing Gates...")
###############################################################################
# Internal Helper functions ###################################################
###############################################################################
"""
makeProperty:
Creates a new property from an already defined property.
This used in the `Response` object to rewire defined properties
in webob.Response.
@owner: The class or instance to set the property on
@member: A str that represents the name of the new property
@instance: The member variable you want to copy the property from
@name: The name of the property in @instance.
@doc: The documentation for the new property.
Doesn't work with hidden variables.
"""
def makeProperty(owner, member, instance, name, doc=""):
'''Creates a new property from an existing property'''
for name in (member, instance, name):
if name.startswith("_"):
raise AssertionError("No support for hidden variables")
if hasattr(owner, member):
logger.info("Property seems to exist already, skipping call")
return
fget = lambda self: getattr(getattr(self, instance), name)
fdel = lambda self: delattr(getattr(self, instance), name)
fset = lambda self, value: setattr(getattr(self, instance), name, value)
new = property(fget, fset, fdel, doc)
cls = owner if isinstance(owner, type) else owner.__class__
setattr(cls, member, new) | PypiClean |
/Beat_ML1-0.13.1.tar.gz/Beat_ML1-0.13.1/econml/orf/_causal_tree.py | import numpy as np
from sklearn.utils import check_random_state
class Node:
"""Building block of :class:`CausalTree` class.
Parameters
----------
sample_inds : array-like, shape (n, )
Indices defining the sample that the split criterion will be computed on.
estimate_inds : array-like, shape (n, )
Indices defining the sample used for calculating balance criteria.
"""
def __init__(self, sample_inds, estimate_inds):
self.feature = -1
self.threshold = np.inf
self.split_sample_inds = sample_inds
self.est_sample_inds = estimate_inds
self.left = None
self.right = None
def find_tree_node(self, value):
"""
Recursively find and return the node of the causal tree that corresponds
to the input feature vector.
Parameters
----------
value : array-like, shape (d_x,)
Feature vector whose node we want to find.
"""
if self.feature == -1:
return self
elif value[self.feature] < self.threshold:
return self.left.find_tree_node(value)
else:
return self.right.find_tree_node(value)
class CausalTree:
"""Base class for growing an OrthoForest.
Parameters
----------
nuisance_estimator : method
Method that estimates the nuisances at each node.
Takes in (Y, T, X, W) and returns nuisance estimates.
parameter_estimator : method
Method that estimates the parameter of interest at each node.
Takes in (Y, T, nuisance_estimates) and returns the parameter estimate.
moment_and_mean_gradient_estimator : method
Method that estimates the moments and mean moment gradient at each node.
Takes in (Y, T, X, W, nuisance_estimates, parameter_estimate) and returns
the moments and the mean moment gradient.
min_leaf_size : integer, optional (default=10)
The minimum number of samples in a leaf.
max_depth : integer, optional (default=10)
The maximum number of splits to be performed when expanding the tree.
n_proposals : int, optional (default=1000)
Number of split proposals to be considered. A smaller number will improve
execution time, but might reduce accuracy of prediction.
balancedness_tol : float, optional (default=.3)
Tolerance for balance between child nodes in a split. A smaller value
will result in an unbalanced tree prone to overfitting. Has to lie
between 0 and .5 as it is used to control both directions of imbalancedness.
With the default value we guarantee that each child of a split contains
at least 20% and at most 80% of the data of the parent node.
random_state : int, :class:`~numpy.random.mtrand.RandomState` instance or None, optional (default=None)
If int, random_state is the seed used by the random number generator;
If :class:`~numpy.random.mtrand.RandomState` instance, random_state is the random number generator;
If None, the random number generator is the :class:`~numpy.random.mtrand.RandomState` instance used
by :mod:`np.random<numpy.random>`.
"""
def __init__(self,
min_leaf_size=10,
max_depth=10,
n_proposals=1000,
balancedness_tol=.3,
random_state=None):
# Causal tree parameters
self.min_leaf_size = min_leaf_size
self.max_depth = max_depth
self.balancedness_tol = balancedness_tol
self.n_proposals = n_proposals
self.random_state = check_random_state(random_state)
# Tree structure
self.tree = None
def create_splits(self, Y, T, X, W,
nuisance_estimator, parameter_estimator, moment_and_mean_gradient_estimator):
"""
Recursively build a causal tree.
Parameters
----------
Y : array-like, shape (n, d_y)
Outcome for the treatment policy.
T : array-like, shape (n, d_t)
Treatment policy.
X : array-like, shape (n, d_x)
Feature vector that captures heterogeneity.
W : array-like, shape (n, d_w) or None (default=None)
High-dimensional controls.
"""
# No need for a random split since the data is already
# a random subsample from the original input
n = Y.shape[0] // 2
self.tree = Node(np.arange(n), np.arange(n, Y.shape[0]))
# node list stores the nodes that are yet to be splitted
node_list = [(self.tree, 0)]
while len(node_list) > 0:
node, depth = node_list.pop()
# If by splitting we have too small leaves or if we reached the maximum number of splits we stop
if node.split_sample_inds.shape[0] // 2 >= self.min_leaf_size and depth < self.max_depth:
# Create local sample set
node_X = X[node.split_sample_inds]
node_W = W[node.split_sample_inds] if W is not None else None
node_T = T[node.split_sample_inds]
node_Y = Y[node.split_sample_inds]
node_X_estimate = X[node.est_sample_inds]
node_size_split = node_X.shape[0]
node_size_est = node_X_estimate.shape[0]
# Compute nuisance estimates for the current node
nuisance_estimates = nuisance_estimator(node_Y, node_T, node_X, node_W)
if nuisance_estimates is None:
# Nuisance estimate cannot be calculated
continue
# Estimate parameter for current node
node_estimate = parameter_estimator(node_Y, node_T, node_X, nuisance_estimates)
if node_estimate is None:
# Node estimate cannot be calculated
continue
# Calculate moments and gradient of moments for current data
moments, mean_grad = moment_and_mean_gradient_estimator(
node_Y, node_T, node_X, node_W,
nuisance_estimates,
node_estimate)
# Calculate inverse gradient
try:
inverse_grad = np.linalg.inv(mean_grad)
except np.linalg.LinAlgError as exc:
if 'Singular matrix' in str(exc):
# The gradient matrix is not invertible.
# No good split can be found
continue
else:
raise exc
# Calculate point-wise pseudo-outcomes rho
rho = np.matmul(moments, inverse_grad)
# a split is determined by a feature and a sample pair
# the number of possible splits is at most (number of features) * (number of node samples)
n_proposals = min(self.n_proposals, node_X.size)
# we draw random such pairs by drawing a random number in {0, n_feats * n_node_samples}
random_pair = self.random_state.choice(node_X.size, size=n_proposals, replace=False)
# parse row and column of random pair
thr_inds, dim_proposals = np.unravel_index(random_pair, node_X.shape)
# the sample of the pair is the integer division of the random number with n_feats
thr_proposals = node_X[thr_inds, dim_proposals]
# calculate the binary indicator of whether sample i is on the left or the right
# side of proposed split j. So this is an n_samples x n_proposals matrix
side = node_X[:, dim_proposals] < thr_proposals
# calculate the number of samples on the left child for each proposed split
size_left = np.sum(side, axis=0)
# calculate the analogous binary indicator for the samples in the estimation set
side_est = node_X_estimate[:, dim_proposals] < thr_proposals
# calculate the number of estimation samples on the left child of each proposed split
size_est_left = np.sum(side_est, axis=0)
# find the upper and lower bound on the size of the left split for the split
# to be valid so as for the split to be balanced and leave at least min_leaf_size
# on each side.
lower_bound = max((.5 - self.balancedness_tol) * node_size_split, self.min_leaf_size)
upper_bound = min((.5 + self.balancedness_tol) * node_size_split, node_size_split - self.min_leaf_size)
valid_split = (lower_bound <= size_left)
valid_split &= (size_left <= upper_bound)
# similarly for the estimation sample set
lower_bound_est = max((.5 - self.balancedness_tol) * node_size_est, self.min_leaf_size)
upper_bound_est = min((.5 + self.balancedness_tol) * node_size_est, node_size_est - self.min_leaf_size)
valid_split &= (lower_bound_est <= size_est_left)
valid_split &= (size_est_left <= upper_bound_est)
# if there is no valid split then don't create any children
if ~np.any(valid_split):
continue
# filter only the valid splits
valid_dim_proposals = dim_proposals[valid_split]
valid_thr_proposals = thr_proposals[valid_split]
valid_side = side[:, valid_split]
valid_size_left = size_left[valid_split]
valid_side_est = side_est[:, valid_split]
# calculate the average influence vector of the samples in the left child
left_diff = np.matmul(rho.T, valid_side)
# calculate the average influence vector of the samples in the right child
right_diff = np.matmul(rho.T, 1 - valid_side)
# take the square of each of the entries of the influence vectors and normalize
# by size of each child
left_score = left_diff**2 / valid_size_left.reshape(1, -1)
right_score = right_diff**2 / (node_size_split - valid_size_left).reshape(1, -1)
# calculate the vector score of each candidate split as the average of left and right
# influence vectors
spl_score = (right_score + left_score) / 2
# eta specifies how much weight to put on individual heterogeneity vs common heterogeneity
# across parameters. we give some benefit to individual heterogeneity factors for cases
# where there might be large discontinuities in some parameter as the conditioning set varies
eta = np.random.uniform(0.25, 1)
# calculate the scalar score of each split by aggregating across the vector of scores
split_scores = np.max(spl_score, axis=0) * eta + np.mean(spl_score, axis=0) * (1 - eta)
# Find split that minimizes criterion
best_split_ind = np.argmax(split_scores)
node.feature = valid_dim_proposals[best_split_ind]
node.threshold = valid_thr_proposals[best_split_ind]
# Create child nodes with corresponding subsamples
left_split_sample_inds = node.split_sample_inds[valid_side[:, best_split_ind]]
left_est_sample_inds = node.est_sample_inds[valid_side_est[:, best_split_ind]]
node.left = Node(left_split_sample_inds, left_est_sample_inds)
right_split_sample_inds = node.split_sample_inds[~valid_side[:, best_split_ind]]
right_est_sample_inds = node.est_sample_inds[~valid_side_est[:, best_split_ind]]
node.right = Node(right_split_sample_inds, right_est_sample_inds)
# add the created children to the list of not yet split nodes
node_list.append((node.left, depth + 1))
node_list.append((node.right, depth + 1))
def print_tree_rec(self, node):
if not node:
return
print("Node: ({}, {})".format(node.feature, node.threshold))
print("Left Child")
self.print_tree_rec(node.left)
print("Right Child")
self.print_tree_rec(node.right)
def print_tree(self):
self.print_tree_rec(self.tree)
def find_split(self, value):
return self.tree.find_tree_node(value.astype(np.float64)) | PypiClean |
/HTSQL-2.3.3.tar.gz/HTSQL-2.3.3/src/htsql/core/tr/flow.py | from ..util import (maybe, listof, tupleof, Clonable, Hashable, Printable,
cachedproperty)
from ..entity import TableEntity, ColumnEntity, Join
from ..domain import Domain, BooleanDomain, ListDomain, IdentityDomain
from ..error import point
from .binding import Binding, QueryBinding, SegmentBinding
from .signature import Signature, Bag, Formula
class Expression(Hashable, Clonable, Printable):
"""
Represents an expression node.
This is an abstract class; most of its subclasses belong to one of the
two categories: *flow* and *code* nodes (see :class:`Flow` and
:class:`Code`).
A flow graph is an intermediate phase of the HTSQL translator. It is
translated from the binding graph by the *encoding* process. The flow
graph is used to *compile* the term tree and then *assemble* the frame
structure.
A flow graph reflects the flow structure of the HTSQL query: each
expression node represents either a data flow or an expression over
a data flow.
Expression nodes support equality by value: that is, two expression
nodes are equal if they are of the same type and all their (essential)
attributes are equal. Some attributes (e.g. `binding`) are not
considered essential and do not participate in comparison. By-value
semantics is respected when expression nodes are used as dictionary
keys.
The constructor arguments:
`binding` (:class:`htsql.core.tr.binding.Binding`)
The binding node that gave rise to the expression; should be used
only for presentation or error reporting.
Other attributes:
`syntax` (:class:`htsql.core.tr.syntax.Syntax`)
The syntax node that gave rise to the expression; for debugging
purposes only.
`mark` (:class:`htsql.core.mark.Mark`)
The location of the node in the original query; for error reporting.
`hash` (an integer)
The node hash; if two nodes are considered equal, their hashes
must be equal too.
"""
def __init__(self, binding):
assert isinstance(binding, Binding)
self.binding = binding
self.syntax = binding.syntax
point(self, binding)
def __str__(self):
# Display the syntex node that gave rise to the expression.
return str(self.syntax)
class QueryExpr(Expression):
"""
Represents the whole HTSQL query.
`segment` (:class:`SegmentCode` or ``None``)
The query segment.
"""
def __init__(self, segment, binding):
assert isinstance(segment, maybe(SegmentCode))
assert isinstance(binding, QueryBinding)
super(QueryExpr, self).__init__(binding)
self.segment = segment
class Family(object):
"""
Represents the target class of a flow.
The flow family specifies the type of values produced by
a flow. There are three distinct flow families:
- *scalar*, which indicates that the flow produces
scalar values;
- *table*, which indicates that the flow produces
records from a database table;
- *quotient*, which indicates that the flow produces
records from a derived *quotient* class.
Class attributes:
`is_scalar` (Boolean)
Set for a scalar family.
`is_table` (Boolean)
Set for a table family.
`is_quotient` (Boolean)
Set for a quotient family.
"""
is_scalar = False
is_table = False
is_quotient = False
class ScalarFamily(Family):
"""
Represents a scalar flow family.
A scalar flow produces values of a primitive type.
"""
is_scalar = True
class TableFamily(Family):
"""
Represents a table flow family.
A table flow produces records from a database table.
`table` (:class:`htsql.core.entity.TableEntity`)
The table.
"""
is_table = True
def __init__(self, table):
assert isinstance(table, TableEntity)
self.table = table
class QuotientFamily(Family):
"""
Represents a quotient flow family.
A quotient flow produces records from a derived quotient class.
The quotient class contains records formed from the kernel expressions
as they run over the `seed` flow.
`seed` (:class:`Flow`)
The dividend flow.
`ground` (:class:`Flow`)
The ground flow of the dividend.
`kernels` (list of :class:`Code`)
The kernel expressions of the quotient.
"""
is_quotient = True
def __init__(self, seed, ground, kernels):
assert isinstance(seed, Flow)
assert isinstance(ground, Flow)
assert ground.is_axis and seed.concludes(ground)
assert isinstance(kernels, listof(Code))
self.seed = seed
self.ground = ground
self.kernels = kernels
class Flow(Expression):
"""
Represents a flow node.
A data flow is a sequence of homogeneous values. A flow is generated
by a series of flow operations applied sequentially to the root flow.
Each flow operation takes an input flow as an argument and produces
an output flow as a result. The operation transforms each element
from the input row into zero, one, or more elements of the output
flow; the generating element is called *the origin* of the generated
elements. Thus, with every element of a flow, we could associate
a sequence of origin elements, one per each elementary flow operation
that together produce the flow.
Each instance of :class:`Flow` represents a single flow operation
applied to some input flow. The `base` attribute of the instance
represents the input flow while the type of the instance and the
other attributes reflect the properies of the operation. The root
flow is denoted by an instance of:class:`RootFlow`, different
subclasses of :class:`Flow` correspond to different types of
flow operations.
The type of values produced by a flow is indicated by the `family`
attribute. We distinguish three flow families: *scalar*, *table*
and *quotient*. A scalar flow produces values of an elementary data
type; a table flow produces records of some table; a quotient flow
produces elements of a derived quotient class.
Among others, we consider the following flow operations:
*The root flow* `I`
The initial flow that contains one empty record.
*A direct product* `A * T`
Given a scalar flow `A` and a table `T`, the direct product
`A * T` generates all records of `T` for each element of `A`.
*A fiber product* `A . T`
Given an input flow `A` that produces records of some table `S`
and a table `T` linked to `S`, for each element of `A`,
the fiber product `A . T` generates all associated records
from `T`.
*Filtering* `A ? p`
Given a flow `A` and a predicate `p` defined on `A`,
the filtered flow `A ? p` consists of all elements of `A`
satisfying condition `p`.
*Ordering* `A [e,...]`
Given a flow `A` and a list of expressions `e,...`, the
ordered flow `A [e,...]` consists of elements of `A` reordered
by the values of `e,...`.
*Quotient* `A ^ k`
Given a flow `A` and a kernel expression `k` defined on `A`,
a quotient `A ^ k` produces all unique values of the kernel
as it runs over `A`.
Flow operations for which the output flow does not consist of
elements of the input flow are called *axial*. If we take an
arbitrary flow `A`, disassemble it into individual operations,
and then reapply only axial operations, we get the new flow `A'`,
which we call *the inflation* of `A`. Note that elements of `A`
form a subset of elements of `A'`.
Now we can establish how different flows are related to each other.
Formally, for each pair of flows `A` and `B`, we define a relation
`<->` ("converges to") on elements from `A` and `B`, that is,
a subset of the Cartesian product `A x B`, by the following rules:
(1) For any flow `A`, `<->` is the identity relation on `A`,
that is, each element converges only to itself.
For a flow `A` and its inflation `A'`, each element from `A`
converges to an equal element from `A'`.
(2) Suppose `A` and `B` are flows such that `A` is produced
from `B` as a result of some axial flow operation. Then
each element from `A` converges to its origin element
from `B`.
By transitivity, we could extend `<->` on `A` and any of its
*ancestor flows*, that is, the parent flow of `A`, the
parent of the parent of `A` and so on.
In particular, this defines `<->` on an arbitrary flow `A`
and the root flow `I` since `I` is an ancestor of any flow.
By the above definition, any element of `A` converges to
the (only) record of `I`.
(3) Finally, we are ready to define `<->` on an arbitrary pair
of flows `A` and `B`. First, suppose that `A` and `B`
share the same inflated flow: `A' = B'`. Then we could
define `<->` on `A` and `B` transitively via `A'`: `a` from `A`
converges to `b` from `B` if there exists `a'` from `A'` such
that `a <-> a'` and `a' <-> b`.
In the general case, find the closest ancestors `C` of `A`
and `D` of `B` such that `C` and `D` have the same
inflated flow: `C' = D'`. Rules `(1)` and `(2)` establish
`<->` for the pairs `A` and `C`, `C` and `C' = D'`,
`C' = D'` and `D`, and `D` and `B`. We define `<->`
on `A` and `B` transitively: `a` from `A` converges to
`b` from `B` if there exist elements `c` from `C`,
`c'` from `C' = D'`, `d` from `D` such that
`a <-> c <-> c' <-> d <-> b`.
Note that it is important that we take among the common inflated
ancestors the closest one. Any two flows have a common inflated
ancestor: the root flow. If the root flow is, indeed, the closest
common inflated ancestor of `A` and `B`, then each element of `A`
converges to every element of `B`.
Now we are ready to introduce several important relations between
flows:
`A` *spans* `B`
A flow `A` spans a flow `B` if for every element `a` from `A`:
`card { b` from `B | a <-> b } <= 1`.
Informally, it means that the statement::
SELECT * FROM A
and the statement::
SELECT * FROM A LEFT OUTER JOIN B ON (A <-> B)
produce the same number of rows.
`A` *dominates* `B`
A flow `A` dominates a flow `B` if `A` spans `B` and
for every element `b` from `B`:
`card { a` from `A | a <-> b } >= 1`.
Informally, it implies that the statement::
SELECT * FROM B INNER JOIN A ON (A <-> B)
and the statement::
SELECT * FROM B LEFT OUTER JOIN A ON (A <-> B)
produce the same number of rows.
`A` *conforms* `B`
A flow `A` conforms a flow `B` if `A` dominates `B`
and `B` dominates `A`. Alternatively, we could say
`A` conforms `B` if the `<->` relation establishes
a bijection between `A` and `B`.
Informally, it means that the statement::
SELECT * FROM A
and the statement::
SELECT * FROM B
produce the same number of rows.
Note that `A` conforming `B` is not the same as `A` being equal
to `B`; even if `A` conforms `B`, elements of `A` and `B` may
be of different types, therefore as sets, they are different.
Now take an arbitrary flow `A` and its parent flow `B`. We say:
`A` *contracts* `B`
A flow `A` contracts its parent `B` if for any element from `B`
there is no more than one converging element from `A`.
Typically, it is non-axis flows that contract their bases,
although in some cases, an axis flow could do it too.
`A` *expands* `B`
A flow `A` expands its parent `B` if for any element from `B`
there is at least one converging element from `A`.
Note that it is possible that a flow `A` both contracts and
expands its base `B`, and also that `A` neither contracts
nor expands `B`. The former means that `A` conforms `B`.
The latter holds, in particular, for the direct table flow
`A * T`. `A * T` violates the contraction condition when
`T` contains more than one record and violates the expansion
condition when `T` has no records.
A few words about how elements of a flow are ordered. The default
(also called *weak*) ordering rules are:
- a table flow `T = I * T` is sorted by the lexicographic order
of the table primary key;
- a non-axial flow keeps the order of its base;
- an axial table flow `A * T` or `A . T` respects the order its
base `A`; records with the same origin are sorted by the table order.
An alternative sort order could be specified explicitly (also called
*strong* ordering). Whenever strong ordering is specified, it
overrides the weak ordering. Thus, elements of an ordered flow `A [e]`
are sorted first by expression `e`, and then elements which are not
differentiated by `e` are sorted using the weak ordering of `A`.
However, if `A` already has a strong ordering, it must be respected.
Therefore, the general rule for sorting `A [e]` is:
- first, sort the flow by the strong ordering of `A`;
- then, by `e`;
- finally, by the weak ordering of `A`.
Class attributes:
`is_axis` (Boolean)
Indicates whether the flow is axial, that is, the elements
of the flow do not necessarily coincide with their origins.
`is_root` (Boolean)
Indicates that the flow is the root flow.
The constructor arguments:
`base` (:class:`Flow` or ``None``)
The parent input flow; ``None`` for the root flow.
`family` (:class:`Family`)
Specifies the type of the elements produced by the flow.
`is_contracting` (Boolean)
Indicates if the flow contracts its base flow.
`is_expanding` (Boolean)
Indicates if the flow expands its base flow.
Other attributes:
`is_inflated` (Boolean)
Indicates if the flow is an inflation, that is, this flow
operation and all its ancestors are axial.
"""
is_axis = False
is_root = False
is_commutative = True
def __init__(self, base, family, is_contracting, is_expanding, binding):
assert isinstance(base, maybe(Flow))
assert isinstance(family, Family)
assert isinstance(is_contracting, bool)
assert isinstance(is_expanding, bool)
super(Flow, self).__init__(binding)
self.base = base
self.family = family
self.is_contracting = is_contracting
self.is_expanding = is_expanding
# Indicates that the flow itself and all its ancestors are axes.
self.is_inflated = (self.is_root or
(base.is_inflated and self.is_axis))
def unfold(self):
"""
Produces a list of ancestor flows.
The method returns a list composed of the flow itself,
its base, the base of its base and so on.
"""
ancestors = []
ancestor = self
while ancestor is not None:
ancestors.append(ancestor)
# Note: `ancestor.base` is None for the root flow.
ancestor = ancestor.base
return ancestors
def resembles(self, other):
"""
Verifies if the flows represent the same operation.
Typically, it means that `self` and `other` have the same type
and equal attributes, but may have different bases.
"""
# We rely upon an assumption that the equality vector of a flow node
# is a tuple of all its essential attributes and the first element
# of the tuple is the flow base. So we skip the base flow and
# compare the remaining attributes.
if not isinstance(other, self.__class__):
return False
try:
_basis = self._basis
except AttributeError:
self._rehash()
_basis = self._basis
try:
_other_basis = other._basis
except AttributeError:
other._rehash()
_other_basis = other._basis
return (_basis[1:] == _other_basis[1:])
def inflate(self):
"""
Produces the inflation of the flow.
If we represent a flow as a series of operations sequentially
applied to the scalar flow, the inflation of the flow is obtained
by ignoring any non-axial operations and applying axial operations
only.
"""
# Shortcut: check if the flow is already an inflation.
if self.is_inflated:
return self
# This is going to become a new inflated flow.
flow = None
# Iterate over all ancestors starting from the scalar flow.
for ancestor in reversed(self.unfold()):
# Skip non-axial operations, reapply axial operations to
# a new base.
if ancestor.is_axis:
flow = ancestor.clone(base=flow)
# This is the inflated flow now.
return flow
def prune(self, other):
"""
Prunes shared non-axial operations.
Given flows `A` and `B`, this function produces a new flow
`A'` such that `A` is a subset of `A'` and the convergence
of `A` and `B` coincides with the convergence of `A'` and `B`.
This is done by pruning any non-axial operations of `A` that
also occur in `B`.
"""
# Sanity check on the argument.
assert isinstance(other, Flow)
# Shortcut: we cannot further prune an inflated flow.
if self.is_inflated:
return self
# Unfold the flows into individual operations.
my_ancestors = self.unfold()
their_ancestors = other.unfold()
# This is going to become the pruned flow.
flow = None
# Iterate until the ancestors are exhausted or diverged.
while my_ancestors and their_ancestors:
# Get the next operation.
my_ancestor = my_ancestors[-1]
their_ancestor = their_ancestors[-1]
# Compare the ancestors.
if my_ancestor.resembles(their_ancestor):
# So both ancestors represent the same operation.
# If it is an axis operation, apply it; otherwise,
# discard it.
# FIXME: may break if the flow contains a non-matching
# `limit/offset` operation?
if not (my_ancestor.is_commutative or
my_ancestor == their_ancestor):
return self
if my_ancestor.is_axis:
flow = my_ancestor.clone(base=flow)
my_ancestors.pop()
their_ancestors.pop()
elif not their_ancestor.is_axis:
# The ancestors represent different operations and `B`'s
# ancestor is not an axis. Discard it, we will try the
# next ancestor.
# FIXME: we may miss an opportunity to compare `B`'s ancestor
# with other `A`'s ancestors. It is not a big deal though,
# we do not need to generate an optimal result here.
their_ancestors.pop()
elif not my_ancestor.is_axis:
# The ancestors represent different operations, `B`'s ancestor
# is an axis, and `A`'s ancestor is not. Here we apply the
# `A`'s ancestor.
if not my_ancestor.is_commutative:
return self
flow = my_ancestor.clone(base=flow)
my_ancestors.pop()
else:
# The ancestors are both axial and differ from each other.
# At this point, the ancestors diverge and are not
# comparable anymore. Break from the loop.
break
# Reapply the unprocessed ancestors.
while my_ancestors:
my_ancestor = my_ancestors.pop()
if not my_ancestor.is_commutative:
return self
flow = my_ancestor.clone(base=flow)
# We have a pruned flow here.
return flow
def spans(self, other):
"""
Verifies if the flow spans another flow.
"""
# Sanity check on the argument.
assert isinstance(other, Flow)
# Shortcut: any flow spans itself.
if self == other:
return True
# Extract axial ancestors from both flows.
my_axes = [ancestor for ancestor in self.unfold()
if ancestor.is_axis]
their_axes = [ancestor for ancestor in other.unfold()
if ancestor.is_axis]
# Iterate until the axes are exhausted or diverged.
while my_axes and their_axes:
# Check if the next pair of axes represent the same operation.
if my_axes[-1].resembles(their_axes[-1]):
# If so, go compare the next pair of axes.
my_axes.pop()
their_axes.pop()
else:
# Otherwise, the axes diverge.
break
# At this point, all processed pairs of axes converge by identity.
# If the other flow has no more axes left, it is spanned. Otherwise,
# it is spanned only if its remaining unprocessed axes represent
# contracting operations.
for their_axis in their_axes:
if not their_axis.is_contracting:
return False
return True
def conforms(self, other):
"""
Verifies if the flow conforms another flow.
"""
# Sanity check on the argument.
assert isinstance(other, Flow)
# Shortcut: any flow conforms itself.
if self == other:
return True
# Unfold the flows into individual operations.
my_ancestors = self.unfold()
their_ancestors = other.unfold()
# Iterate until the ancestors are exhausted or diverged.
while my_ancestors and their_ancestors:
# Get the next pair of ancestors.
my_ancestor = my_ancestors[-1]
their_ancestor = their_ancestors[-1]
# Compare the ancestors.
if my_ancestor.resembles(their_ancestor):
# If the ancestors represent the same operation, we could
# proceed to the next pair of ancestors.
my_ancestors.pop()
their_ancestors.pop()
elif (my_ancestor.is_contracting and
my_ancestor.is_expanding and
not my_ancestor.is_axis):
# Ok, the ancestors represent different operations, but
# one of them is not an axis and does not change the
# cardinality of its base. We could skip this ancestor
# and proceed further.
my_ancestors.pop()
elif (their_ancestor.is_contracting and
their_ancestor.is_expanding and
not their_ancestor.is_axis):
# Same with the other ancestor.
their_ancestors.pop()
else:
# The ancestors start to diverge; break from the loop.
break
# If all ancestors are processed, the flows conform each other.
# Otherwise, they conform each other only if the remaining unprocessed
# ancestors do not change the cardinality of their bases.
for ancestor in my_ancestors + their_ancestors:
if not (ancestor.is_contracting and ancestor.is_expanding):
return False
return True
def dominates(self, other):
"""
Verifies if the flow dominates another flow.
"""
# Sanity check on the argument.
assert isinstance(other, Flow)
# Shortcut: any flow dominates itself.
if self == other:
return True
# Unfold the flows into individual operations.
my_ancestors = self.unfold()
their_ancestors = other.unfold()
# Iterate until the ancestors are exhausted or diverged.
while my_ancestors and their_ancestors:
# Get the next pair of ancestors.
my_ancestor = my_ancestors[-1]
their_ancestor = their_ancestors[-1]
# Compare the ancestors.
if my_ancestor.resembles(their_ancestor):
# If the ancestors represent the same operation, we could
# proceed to the next pair of ancestors.
my_ancestors.pop()
their_ancestors.pop()
elif their_ancestor.is_contracting and not their_ancestor.is_axis:
# We got ancestors representing different operations; however
# the dominated ancestor represents a non-axis operation that
# does not increase the cardinality of its base. Therefore
# we could ignore this ancestor and proceed further.
their_ancestors.pop()
else:
# The ancestors start to diverge; break from the loop.
break
# If all ancestors are processed, the flow dominates the other.
# Otherwise, it is only possible if the remaining ancestors of
# the flow do not decrease the base cardinality while the
# remaining ancestors of the other flow do not increase the
# base cardinality.
for my_ancestor in my_ancestors:
if not my_ancestor.is_expanding:
return False
for their_ancestor in their_ancestors:
if not their_ancestor.is_contracting:
return False
return True
def concludes(self, other):
"""
Verifies if the other flow is a ancestor of the flow.
"""
# Sanity check on the argument.
assert isinstance(other, Flow)
# Iterate over all ancestors of the flow comparing them with
# the given other flow.
flow = self
while flow is not None:
if flow == other:
return True
flow = flow.base
# None of the ancestors matched, the flows must be unrelated.
return False
class RootFlow(Flow):
"""
Represents a root scalar flow.
A root flow `I` contains one record ``()``. Any other flow is generated
by applying a sequence of elementary flow operations to `I`.
`base` (always ``None``)
The root flow (and only the root flow) has no parent flow.
"""
# Scalar flow is an axial flow.
is_axis = True
is_root = True
def __init__(self, base, binding):
# We keep `base` among constructor arguments despite it always being
# equal to `None` to make
# flow = flow.clone(base=new_base)
# work for all types of flows.
assert base is None
# Note that we must satisfy the assumption that the first element
# of the equality vector is the flow base (used by `Flow.resembles`).
super(RootFlow, self).__init__(
base=None,
family=ScalarFamily(),
is_contracting=False,
is_expanding=False,
binding=binding)
def __basis__(self):
return (self.base,)
def __str__(self):
# Display a table expression in an algebraic form.
return "I"
class ScalarFlow(Flow):
"""
Represents a link to the scalar class.
Traversing a link to the scalar class produces an empty record ``()``
for each element of the input flow.
"""
is_axis = True
def __init__(self, base, binding):
super(ScalarFlow, self).__init__(
base=base,
family=ScalarFamily(),
is_contracting=True,
is_expanding=True,
binding=binding)
def __basis__(self):
return (self.base,)
def __str__(self):
# Display:
# (<base> * I)
return "(%s * I)" % self.base
class TableFlow(Flow):
"""
Represents a product of an input flow to a table.
A product operation generates a subset of a Cartesian product
between the base flow and records of a table. This is an abstract
class, see concrete subclasses :class:`DirectTableFlow` and
:class:`FiberTableFlow`.
`table` (:class:`htsql.core.entity.TableEntity`)
The table.
"""
# All subclasses of `TableFlow` are axial flows.
is_axis = True
class DirectTableFlow(TableFlow):
"""
Represents a direct product between a scalar flow and a table.
A direct product `A * T` produces all records of the table `T`
for each element of the input flow `A`.
`base` (:class:`Flow`)
The base flow.
`table` (:class:`htsql.core.entity.TableEntity`)
The table.
"""
def __init__(self, base, table, binding):
assert isinstance(base, Flow) and base.family.is_scalar
super(DirectTableFlow, self).__init__(
base=base,
family=TableFamily(table),
is_contracting=False,
is_expanding=False,
binding=binding)
self.table = table
def __basis__(self):
return (self.base, self.table)
def __str__(self):
# Display:
# (<base> * <schema>.<table>)
return "(%s * %s)" % (self.base, self.family.table)
class FiberTableFlow(TableFlow):
"""
Represents a fiber product between a table flow and a linked table.
Let `A` be a flow producing records of table `S`, `j` be a join
condition between tables `S` and `T`. A fiber product `A .j T`
(or `A . T` when the join condition is implied) of the flow `A`
and the table `T` is a sequence of records of `T` that for each
record of `A` generates all records of `T` satisfying the join
condition `j`.
`base` (:class:`Flow`)
The base flow.
`join` (:class:`htsql.core.entity.Join`)
The join condition.
"""
def __init__(self, base, join, binding):
assert isinstance(join, Join)
# Check that the join origin is the table of the base flow.
assert isinstance(base, Flow) and base.family.is_table
assert base.family.table is join.origin
super(FiberTableFlow, self).__init__(
base=base,
family=TableFamily(join.target),
is_contracting=join.is_contracting,
is_expanding=join.is_expanding,
binding=binding)
self.join = join
def __basis__(self):
return (self.base, self.join)
def __str__(self):
# Display:
# (<base> . <schema>.<table>)
return "(%s . %s)" % (self.base, self.family.table)
class QuotientFlow(Flow):
"""
Represents a quotient operation.
A quotient operation takes three arguments: an input flow `A`,
a seed flow `S`, which should be a descendant of the input flow,
and a kernel expression `k` on the seed flow. For each element
of the input flow, the output flow `A . (S ^ k)` generates unique
values of `k` as it runs over convergent elements of `S`.
`base` (:class:`Flow`)
The base flow.
`seed` (:class:`Flow`)
The seed flow of the quotient; must be a descendant
of the base flow.
`kernels` (a list of :class:`Code`)
Kernel expressions of the quotient.
`companions` (a list of :class:`Code`)
An auxiliary hint to the compiler indicating that the term
representing the flow needs to export extra aggregate units.
The value of this attribute has no effect on the semantics of
the flow graph and node comparison.
Other attributes:
`ground` (:class:`Flow`)
The closest axial ancestor of `seed` that is spanned
by the `base` flow.
"""
is_axis = True
def __init__(self, base, seed, kernels, binding,
companions=[]):
assert isinstance(base, Flow)
assert isinstance(seed, Flow)
# Check that `seed` is a plural descendant of `base`.
assert seed.spans(base)
assert not base.spans(seed)
assert isinstance(kernels, listof(Code))
assert isinstance(companions, listof(Code))
# Find an ancestor of `seed` that is spanned by `base`.
ground = seed
while not base.spans(ground.base):
ground = ground.base
# The quotient flow conforms its base flow only when
# the kernel expression is constant.
is_contracting = (not kernels)
# FIXME: this is wrong, but the assembler relies on it
# to collapse `GROUP BY` to the segment frame.
is_expanding = (base.is_root and not kernels)
super(QuotientFlow, self).__init__(
base=base,
family=QuotientFamily(seed, ground, kernels),
is_contracting=is_contracting,
is_expanding=is_expanding,
binding=binding)
self.seed = seed
self.ground = ground
self.kernels = kernels
self.companions = companions
def __basis__(self):
return (self.base, self.seed, tuple(self.kernels))
def __str__(self):
# Display:
# (<base> . (<seed> ^ {<kernels>}))
return "(%s . (%s ^ {%s}))" % (self.base, self.seed,
", ".join(str(kernel) for kernel in self.kernels))
class ComplementFlow(Flow):
"""
Represents a complement to a quotient.
A complement takes a quotient as an input flow and generates
elements of the quotient seed.
`base` (:class:`Flow`)
The base flow.
`companions` (list of :class:`Code`)
An auxiliary hint to the compiler indicating that the term
representing the flow needs to export extra covering units.
The value of this attribute has no effect on the semantics of
the flow graph and node comparison.
Other attributes:
`seed` (:class:`Flow`)
The seed flow of the quotient.
`ground` (:class:`Flow`)
The grond flow of the quotient.
`kernels` (list of :class:`Code`)
Kernel expressions of the quotient.
"""
is_axis = True
def __init__(self, base, binding, companions=[]):
assert isinstance(base, Flow)
assert base.family.is_quotient
assert isinstance(companions, listof(Code))
super(ComplementFlow, self).__init__(
base=base,
family=base.family.seed.family,
is_contracting=False,
is_expanding=True,
binding=binding)
self.seed = base.family.seed
self.ground = base.family.ground
self.kernels = base.family.kernels
self.companions = companions
def __basis__(self):
return (self.base,)
def __str__(self):
# Display:
# (<base> . ^)
return "(%s . ^)" % self.base
class MonikerFlow(Flow):
"""
Represents an moniker operation.
A moniker masks an arbitrary sequence of operations
as a single axial flow operation.
`base` (:class:`Flow`)
The base flow.
`seed` (:class:`Flow`)
The seed flow.
`companions` (list of :class:`Code`)
An auxiliary hint to the compiler indicating that the term
representing the flow must export extra covering units.
The value of this attribute has no effect on the semantics of
the flow graph and node comparison.
Other attributes:
`ground` (:class:`Flow`)
The closest axial ancestor of `seed` spanned by `base`.
"""
is_axis = True
def __init__(self, base, seed, binding, companions=[]):
assert isinstance(base, Flow)
assert isinstance(seed, Flow)
assert seed.spans(base)
# We don't need `seed` to be plural or even axial against `base`.
#assert not base.spans(seed)
assert isinstance(companions, listof(Code))
# Determine an axial ancestor of `seed` spanned by `base`
# (could be `seed` itself).
ground = seed
while not ground.is_axis:
ground = ground.base
if not base.spans(ground):
while not base.spans(ground.base):
ground = ground.base
super(MonikerFlow, self).__init__(
base=base,
family=seed.family,
is_contracting=base.spans(seed),
is_expanding=seed.dominates(base),
binding=binding)
self.seed = seed
self.ground = ground
self.companions = companions
def __basis__(self):
return (self.base, self.seed)
def __str__(self):
# Display:
# (<base> . (<seed>))
return "(%s . (%s))" % (self.base, self.seed)
class ForkedFlow(Flow):
"""
Represents a fork expression.
A fork expression associated each element of the input flow
with every element of the input flow sharing the same origin
and values of the kernel expression.
`base` (:class:`Flow`)
The base flow.
`seed` (:class:`Flow`)
The flow to fork (typically coincides with the base flow).
`kernels` (list of :class:`Code`)
The kernel expressions.
`companions` (list of :class:`Code`)
An auxiliary hint to the compiler indicating that the term
representing the flow must export extra covering units.
The value of this attribute has no effect on the semantics of
the flow graph and node comparison.
Other attributes:
`ground` (:class:`Flow`)
The closest axial ancestor of the seed flow.
"""
is_axis = True
def __init__(self, base, seed, kernels, binding, companions=[]):
assert isinstance(base, Flow)
assert isinstance(seed, Flow)
assert isinstance(kernels, listof(Code))
assert base.spans(seed) and seed.spans(base)
# FIXME: this condition could be violated after the rewrite step
# (also, equal-by-value is not implemented for `Family`):
#assert base.family == seed.family
# FIXME: we don't check for this constraint in the encoder anymore.
#assert all(base.spans(unit.flow) for code in kernels
# for unit in code.units)
assert isinstance(companions, listof(Code))
ground = seed
while not ground.is_axis:
ground = ground.base
is_contracting = ground.is_contracting
is_expanding = (not kernels and seed.dominates(base))
super(ForkedFlow, self).__init__(
base=base,
family=base.family,
is_contracting=is_contracting,
is_expanding=is_expanding,
binding=binding)
self.seed = seed
self.ground = ground
self.kernels = kernels
self.companions = companions
def __basis__(self):
return (self.base, self.seed, tuple(self.kernels))
def __str__(self):
# Display:
# (<base> . fork({<kernels>}))
return "(%s . fork({%s}))" \
% (self.base, ", ".join(str(code) for code in self.kernels))
class AttachFlow(Flow):
"""
Represents a linking operation.
A linking operation generates, for every element of the input flow,
convergent elements from the seed flow with the same image value.
`base` (:class:`Flow`)
The base flow.
`seed` (:class:`Flow`)
The seed flow.
`images` (list of pairs of :class:`Code`)
Pairs of expressions forming a fiber join condition.
`companions` (list of :class:`Code`)
An auxiliary hint to the compiler indicating that the term
representing the flow must export extra covering units.
The value of this attribute has no effect on the semantics of
the flow graph and node comparison.
Other attributes:
`ground` (:class:`Flow`)
The closest axial ancestor of `seed` spanned by `base`.
"""
is_axis = True
def __init__(self, base, seed, images, filter, binding, companions=[]):
assert isinstance(base, Flow)
assert isinstance(seed, Flow)
assert seed.spans(base)
assert not base.spans(seed)
assert isinstance(images, listof(tupleof(Code, Code)))
assert isinstance(filter, maybe(Code))
if filter is not None:
assert isinstance(filter.domain, BooleanDomain)
assert isinstance(companions, listof(Code))
ground = seed
while not base.spans(ground.base):
ground = ground.base
super(AttachFlow, self).__init__(
base=base,
family=seed.family,
is_contracting=False,
is_expanding=False,
binding=binding)
self.seed = seed
self.ground = ground
self.images = images
self.filter = filter
self.companions = companions
def __basis__(self):
return (self.base, self.seed, tuple(self.images), self.filter)
def __str__(self):
# Display:
# (<base> . ({<limages>} -> <seed>{<rimages>}))
return "(%s . ({%s} -> %s{%s}))" \
% (self.base, ", ".join(str(lop) for lop, rop in self.images),
self.seed, ", ".join(str(rop) for lop, rop in self.images))
class ClippedFlow(Flow):
is_axis = True
def __init__(self, base, seed, limit, offset, binding, companions=[]):
assert isinstance(base, Flow)
assert isinstance(seed, Flow)
assert seed.spans(base)
assert not base.spans(seed)
assert isinstance(limit, maybe(int))
assert isinstance(offset, maybe(int))
assert isinstance(companions, listof(Code))
# Determine an axial ancestor of `seed` spanned by `base`.
ground = seed
while not ground.is_axis:
ground = ground.base
assert not base.spans(ground)
while not base.spans(ground.base):
ground = ground.base
is_contracting = (limit is None)
is_expanding = (seed.dominates(base) and offset is None
and (limit is None or limit > 0))
super(ClippedFlow, self).__init__(
base=base,
family=seed.family,
is_contracting=is_contracting,
is_expanding=is_expanding,
binding=binding)
self.seed = seed
self.ground = ground
self.limit = limit
self.offset = offset
self.companions = companions
def __basis__(self):
return (self.base, self.seed, self.limit, self.offset)
def __str__(self):
# Display:
# (<base> . (<seed>) [<offset>:<offset>+<limit>])
return "(%s . (%s) [%s:%s+%s])" \
% (self.base, self.seed,
self.offset if self.offset is not None else 0,
self.offset if self.offset is not None else 0,
self.limit if self.limit is not None else 1)
class LocatorFlow(AttachFlow):
is_axis = True
def __init__(self, base, seed, images, filter, binding, companions=[]):
assert isinstance(base, Flow)
assert isinstance(seed, Flow)
assert isinstance(images, listof(tupleof(Code, Code)))
assert isinstance(filter, maybe(Code))
if filter is not None:
assert isinstance(filter.domain, BooleanDomain)
assert seed.spans(base)
# We don't need `seed` to be plural or even axial against `base`.
#assert not base.spans(seed)
assert isinstance(companions, listof(Code))
# Determine an axial ancestor of `seed` spanned by `base`
# (could be `seed` itself).
ground = seed
while not ground.is_axis:
ground = ground.base
if not base.spans(ground):
while not base.spans(ground.base):
ground = ground.base
axis = seed
while not axis.is_axis:
axis = axis.base
is_contracting = (axis.base is None or base.spans(axis.base))
# Note: skip Attach constructor.
super(AttachFlow, self).__init__(
base=base,
family=seed.family,
is_contracting=is_contracting,
is_expanding=False,
binding=binding)
self.seed = seed
self.images = images
self.filter = filter
self.ground = ground
self.companions = companions
def __basis__(self):
return (self.base, self.seed, tuple(self.images), self.filter)
class FilteredFlow(Flow):
"""
Represents a filtering operation.
A filtered flow `A ? f`, where `A` is the input flow and `f` is
a predicate expression on `A`, consists of rows of `A` satisfying
the condition `f`.
`base` (:class:`Flow`)
The base flow.
`filter` (:class:`Code`)
The predicate expression.
"""
def __init__(self, base, filter, binding):
assert isinstance(filter, Code)
assert isinstance(filter.domain, BooleanDomain)
super(FilteredFlow, self).__init__(
base=base,
family=base.family,
is_contracting=True,
is_expanding=False,
binding=binding)
self.filter = filter
def __basis__(self):
return (self.base, self.filter)
def __str__(self):
# Display:
# (<base> ? <filter>)
return "(%s ? %s)" % (self.base, self.filter)
class OrderedFlow(Flow):
"""
Represents an ordered flow.
An ordered flow `A [e,...;p:q]` is a flow with explicitly specified
strong ordering. It also may extract a slice of the input flow.
`base` (:class:`Flow`)
The base flow.
`order` (a list of pairs `(code, direction)`)
Expressions to sort the flow by.
Here `code` is a :class:`Code` instance, `direction` is either
``+1`` (indicates ascending order) or ``-1`` (indicates descending
order).
`limit` (a non-negative integer or ``None``)
If set, the flow extracts the first `limit` rows from the base
flow (with respect to the flow ordering). The remaining rows
are discarded.
`offset` (a non-negative integer or ``None``)
If set, indicates that when extracting rows from the base flow,
the first `offset` rows should be skipped.
"""
# FIXME: Non-commutativity of the ordered flow may affect `prune`
# and other functions. Add class attribute `is_commutative`?
# Or override `resembles` to return `True` only for equal nodes?
def __init__(self, base, order, limit, offset, binding):
assert isinstance(order, listof(tupleof(Code, int)))
assert isinstance(limit, maybe(int))
assert isinstance(offset, maybe(int))
assert limit is None or limit >= 0
assert offset is None or offset >= 0
super(OrderedFlow, self).__init__(
base=base,
family=base.family,
is_contracting=True,
is_expanding=(limit is None and offset is None),
binding=binding)
self.order = order
self.limit = limit
self.offset = offset
self.is_commutative = (limit is None and offset is None)
def __basis__(self):
return (self.base, tuple(self.order))
def __str__(self):
# Display:
# <base> [<code>,...;<offset>:<limit>+<offset>]
indicators = []
if self.order:
indicator = ",".join(str(code) for code, dir in self.order)
indicators.append(indicator)
if self.limit is not None and self.offset is not None:
indicator = "%s:%s+%s" % (self.offset, self.offset, self.limit)
indicators.append(indicator)
elif self.limit is not None:
indicator = ":%s" % self.limit
indicators.append(indicator)
elif self.offset is not None:
indicator = "%s:" % self.offset
indicators.append(indicator)
indicators = ";".join(indicators)
return "%s [%s]" % (self.base, indicators)
class Code(Expression):
"""
Represents a code expression.
A code expression is a function on flows. Specifically, it is a
functional (possibly of several variables) that maps a flow
(or a Cartesian product of several flows) to some scalar domain.
:class:`Code` is an abstract base class for all code expressions;
see its subclasses for concrete types of expressions.
Among all code expressions, we distinguish *unit expressions*:
elementary functions on flows. There are several kinds of units:
among them are table columns and aggregate functions (see :class:`Unit`
for more detail). A non-unit code could be expressed as
a composition of a scalar function and one or several units:
`f = f(a,b,...) = F(u(a),v(b),...)`,
where
- `f` is a code expression;
- `F` is a scalar function;
- `a`, `b`, ... are elements of flows `A`, `B`, ...;
- `u`, `v`, ... are unit expressions on `A`, `B`, ....
Note: special forms like `COUNT` or `EXISTS` are also expressed
as code nodes. Since they are not regular functions, special care
must be taken to properly wrap them with appropriate
:class:`ScalarUnit` and/or :class:`AggregateUnit` instances.
`domain` (:class:`htsql.core.domain.Domain`)
The co-domain of the code expression.
`units` (a list of :class:`Unit`)
The unit expressions of which the code is composed.
"""
def __init__(self, domain, binding):
assert isinstance(domain, Domain)
super(Code, self).__init__(binding)
self.domain = domain
@cachedproperty
def units(self):
return self.get_units()
@cachedproperty
def segments(self):
return self.get_segments()
def get_units(self):
return []
def get_segments(self):
return []
class SegmentCode(Code):
"""
Represents a segment of an HTSQL query.
`flow` (:class:`Flow`)
The output flow of the segment.
"""
def __init__(self, root, flow, code, binding):
assert isinstance(root, Flow)
assert isinstance(flow, Flow)
assert isinstance(code, Code)
assert isinstance(binding, SegmentBinding)
super(SegmentCode, self).__init__(
domain=ListDomain(code.domain),
binding=binding)
self.root = root
self.flow = flow
self.code = code
def __basis__(self):
return (self.root, self.flow, self.code)
@property
def segments(self):
# Do not cache to avoid reference cycles.
return [self]
class LiteralCode(Code):
"""
Represents a literal value.
`value` (valid type depends on the domain)
The value.
`domain` (:class:`htsql.core.domain.Domain`)
The value type.
"""
def __init__(self, value, domain, binding):
super(LiteralCode, self).__init__(
domain=domain,
binding=binding)
self.value = value
def __basis__(self):
return (self.value, self.domain)
def __str__(self):
# The actual value is often more helpful than the expression
# that generated it.
return repr(self.value)
class CastCode(Code):
"""
Represents a type conversion operator.
`base` (:class:`Code`)
The expression to convert.
`domain` (:class:`htsql.core.domain.Domain`)
The target domain.
"""
def __init__(self, base, domain, binding):
super(CastCode, self).__init__(
domain=domain,
binding=binding)
self.base = base
def __basis__(self):
return (self.base, self.domain)
def get_units(self):
return self.base.units
def get_segments(self):
return self.base.segments
class RecordCode(Code):
def __init__(self, fields, domain, binding):
assert isinstance(fields, listof(Code))
super(RecordCode, self).__init__(
domain=domain,
binding=binding)
self.fields = fields
def __basis__(self):
return (tuple(self.fields), self.domain)
def get_units(self):
units = []
for field in self.fields:
units.extend(field.units)
return units
def get_segments(self):
segments = []
for field in self.fields:
segments.extend(field.segments)
return segments
class IdentityCode(Code):
def __init__(self, fields, binding):
assert isinstance(fields, listof(Code))
domain = IdentityDomain([field.domain for field in fields])
super(IdentityCode, self).__init__(
domain=domain,
binding=binding)
self.fields = fields
def __basis__(self):
return (tuple(self.fields),)
def get_units(self):
units = []
for field in self.fields:
units.extend(field.units)
return units
class AnnihilatorCode(Code):
def __init__(self, code, indicator, binding):
assert isinstance(code, Code)
assert isinstance(indicator, Unit)
super(AnnihilatorCode, self).__init__(
domain=code.domain,
binding=binding)
self.code = code
self.indicator = indicator
def __basis__(self):
return (self.code, self.indicator)
def get_units(self):
return [self.indicator]+self.code.units
def get_segments(self):
return self.indicator.segments+self.code.segments
class FormulaCode(Formula, Code):
"""
Represents a formula code.
A formula code represents a function or an operator call as a code node.
`signature` (:class:`htsql.core.tr.signature.Signature`)
The signature of the formula.
`domain` (:class:`Domain`)
The co-domain of the formula.
`arguments` (a dictionary)
The arguments of the formula.
Note that all the arguments become attributes of the node object.
"""
def __init__(self, signature, domain, binding, **arguments):
assert isinstance(signature, Signature)
# Check that the arguments match the formula signature.
arguments = Bag(**arguments)
assert arguments.admits(Code, signature)
# The first two arguments are processed by the `Formula`
# constructor, the rest of them go to the `Binding` constructor.
super(FormulaCode, self).__init__(
signature, arguments,
domain=domain,
binding=binding)
def __basis__(self):
return (self.signature, self.domain, self.arguments.freeze())
def get_units(self):
units = []
for cell in self.arguments.cells():
units.extend(cell.units)
return units
def get_segments(self):
segments = []
for cell in self.arguments.cells():
segments.extend(cell.segments)
return segments
class Unit(Code):
"""
Represents a unit expression.
A unit is an elementary function on a flow. There are several kinds
of units; see subclasses :class:`ColumnUnit`, :class:`ScalarUnit`,
:class:`AggregateUnit`, and :class:`CorrelatedUnit` for more detail.
Units are divided into two categories: *primitive* and *compound*.
A primitive unit is an intrinsic function of its flow; no additional
calculations are required to generate a primitive unit. Currently,
the only example of a primitive unit is :class:`ColumnUnit`.
A compound unit requires calculating some non-intrinsic function
on the target flow. Among compound units there are :class:`ScalarUnit`
and :class:`AggregateUnit`, which correspond respectively to
scalar and aggregate functions on a flow.
Note that it is easy to *lift* a unit code from one flow to another.
Specifically, suppose a unit `u` is defined on a flow `A` and `B`
is another flow such that `B` spans `A`. Then for each row `b`
from `B` there is no more than one row `a` from `A` such that `a <-> b`.
Therefore we could define `u` on `B` as follows:
- `u(b) = u(a)` if there exists `a` from `A` such that `a <-> b`;
- `u(b) =` ``NULL`` if there is no rows in `A` convergent to `b`.
When a flow `B` spans the flow `A` of a unit `u`, we say that
`u` is *singular* on `B`. By the previous argument, `u` could be
lifted to `B`. Thus any unit is well-defined not only on the
flow where it is originally defined, but also on any flow where
it is singular.
Attributes:
`flow` (:class:`Flow`)
The flow on which the unit is defined.
`domain` (:class:`htsql.core.domain.Domain`)
The unit co-domain.
Class attributes:
`is_primitive` (Boolean)
If set, indicates that the unit is primitive.
`is_compound` (Boolean)
If set, indicates that the unit is compound.
"""
is_primitive = False
is_compound = False
def __init__(self, flow, domain, binding):
assert isinstance(flow, Flow)
super(Unit, self).__init__(
domain=domain,
binding=binding)
self.flow = flow
@property
def units(self):
# Use `property` instead of `cachedproperty` to avoid
# creating a reference cycle.
return [self]
def singular(self, flow):
"""
Verifies if the unit is singular (well-defined) on the given flow.
"""
return flow.spans(self.flow)
class PrimitiveUnit(Unit):
"""
Represents a primitive unit.
A primitive unit is an intrinsic function on a flow.
This is an abstract class; for the (only) concrete subclass, see
:class:`ColumnUnit`.
"""
is_primitive = True
class CompoundUnit(Unit):
"""
Represents a compound unit.
A compound unit is some non-intrinsic function on a flow.
This is an abstract class; for concrete subclasses, see
:class:`ScalarUnit`, :class:`AggregateUnit`, etc.
`code` (:class:`Code`)
The expression to evaluate on the unit flow.
"""
is_compound = True
def __init__(self, code, flow, domain, binding):
assert isinstance(code, Code)
super(CompoundUnit, self).__init__(
flow=flow,
domain=domain,
binding=binding)
self.code = code
def get_segments(self):
return self.code.segments
class ColumnUnit(PrimitiveUnit):
"""
Represents a column unit.
A column unit is a function on a flow that returns a column of the
prominent table of the flow.
`column` (:class:`htsql.core.entity.ColumnEntity`)
The column produced by the unit.
`flow` (:class:`Flow`)
The unit flow. The flow must be of a table family and the flow
table must coincide with the column table.
"""
def __init__(self, column, flow, binding):
assert isinstance(column, ColumnEntity)
assert (flow.family.is_table and
flow.family.table == column.table)
super(ColumnUnit, self).__init__(
flow=flow,
domain=column.domain,
binding=binding)
self.column = column
def __basis__(self):
return (self.column, self.flow)
class ScalarUnit(CompoundUnit):
"""
Represents a scalar unit.
A scalar unit is an expression evaluated in the specified flow.
Recall that any expression has the following form:
`F(u(a),v(b),...)`,
where
- `F` is a scalar function;
- `a`, `b`, ... are elements of flows `A`, `B`, ...;
- `u`, `v`, ... are unit expressions on `A`, `B`, ....
We require that the units of the expression are singular on the given
flow. If so, the expression units `u`, `v`, ... could be lifted to
the given slace (see :class:`Unit`). The scalar unit is defined as
`F(u(x),v(x),...)`,
where `x` is an element of the flow where the scalar unit is defined.
`code` (:class:`Code`)
The expression to evaluate.
`flow` (:class:`Flow`)
The flow on which the unit is defined.
`companions` (list of :class:`Code`)
An auxiliary hint to the compiler indicating that the term
exporting the unit must also export extra scalar units.
The value of this attribute has no effect on the semantics of
the flow graph and node comparison.
"""
def __init__(self, code, flow, binding, companions=[]):
assert isinstance(companions, listof(Code))
super(ScalarUnit, self).__init__(
code=code,
flow=flow,
domain=code.domain,
binding=binding)
self.companions = companions
def __basis__(self):
return (self.code, self.flow)
class AggregateUnitBase(CompoundUnit):
"""
Represents an aggregate unit.
Aggregate units express functions on sets. Specifically, let `A` and `B`
be flows such that `B` spans `A`, but `A` does not span `B`, and
let `g` be a function that takes subsets of `B` as an argument. Then
we could define an aggregate unit `u` on `A` as follows:
`u(a) = g({b | a <-> b})`
Here, for each row `a` from `A`, we take the subset of convergent
rows from `B` and apply `g` to it; the result is the value of `u(a)`.
The flow `A` is the unit flow, the flow `B` is called *the plural
flow* of an aggregate unit, and `g` is called *the composite expression*
of an aggregate unit.
`code` (:class:`Code`)
The composite expression of the aggregate unit.
`plural_flow` (:class:`Flow`)
The plural flow of the aggregate unit, that is, the flow
which subsets form the argument of the composite expression.
`flow` (:class:`Flow`)
The flow on which the unit is defined.
"""
def __init__(self, code, plural_flow, flow, binding):
assert isinstance(code, Code)
assert isinstance(plural_flow, Flow)
# FIXME: consider lifting the requirement that the plural
# flow spans the unit flow. Is it really necessary?
assert plural_flow.spans(flow)
assert not flow.spans(plural_flow)
super(AggregateUnitBase, self).__init__(
code=code,
flow=flow,
domain=code.domain,
binding=binding)
self.plural_flow = plural_flow
def __basis__(self):
return (self.code, self.plural_flow, self.flow)
class AggregateUnit(AggregateUnitBase):
"""
Represents a regular aggregate unit.
A regular aggregate unit is expressed in SQL using an aggregate
expression with ``GROUP BY`` clause.
`companions` (list of :class:`Code`)
An auxiliary hint to the compiler indicating that the term
exporting the unit must also export extra aggregate units.
The value of this attribute has no effect on the semantics of
the flow graph and node comparison.
"""
def __init__(self, code, plural_flow, flow, binding, companions=[]):
assert isinstance(companions, listof(Code))
super(AggregateUnit, self).__init__(code, plural_flow, flow, binding)
self.companions = companions
class CorrelatedUnit(AggregateUnitBase):
"""
Represents a correlated aggregate unit.
A correlated aggregate unit is expressed in SQL using a correlated
subquery.
"""
class KernelUnit(CompoundUnit):
"""
Represents a value generated by a quotient flow.
A value generated by a quotient is either a part of a kernel
expression or a unit from a ground flow.
`code` (:class:`Code`)
An expression (calculated against the seed flow of the quotient).
`flow` (:class:`Flow`)
The flow of the quotient family on which the unit is defined.
"""
def __init__(self, code, flow, binding):
assert flow.family.is_quotient
super(KernelUnit, self).__init__(
code=code,
flow=flow,
domain=code.domain,
binding=binding)
def __basis__(self):
return (self.code, self.flow)
class CoveringUnit(CompoundUnit):
"""
Represents a value generated by a covering flow.
A covering flow represents another flow expression as
a single axial flow operation.
`code` (:class:`Code`)
An expression (calculated against the seed flow of
the covering flow).
`flow` (:class:`Flow`)
The flow on which the unit is defined.
"""
def __init__(self, code, flow, binding):
assert isinstance(flow, (ComplementFlow,
MonikerFlow,
ForkedFlow,
AttachFlow,
ClippedFlow,
LocatorFlow))
super(CoveringUnit, self).__init__(
code=code,
flow=flow,
domain=code.domain,
binding=binding)
def __basis__(self):
return (self.code, self.flow)
class CorrelationCode(Code):
def __init__(self, code):
super(CorrelationCode, self).__init__(code.domain, code.binding)
self.code = code
def __basis__(self):
return (self.code,) | PypiClean |
/Lin-CMS-0.4.11.tar.gz/Lin-CMS-0.4.11/src/lin/encoder.py | from datetime import date, datetime
from decimal import Decimal
from enum import Enum
from typing import Iterable
from flask import json, jsonify
from flask.json import JSONEncoder as _JSONEncoder
from flask.wrappers import Response
from .apidoc import BaseModel
from .db import Record, RecordCollection
class JSONEncoder(_JSONEncoder):
def default(self, o):
if isinstance(o, BaseModel):
if hasattr(o, "__root__") and o.__root__.__class__.__name__ in (
"list",
"int",
"set",
"tuple",
):
return o.__root__
return o.dict()
if isinstance(o, (int, float, list, set, tuple)):
return json.dumps(o, cls=JSONEncoder)
if isinstance(o, bytes):
return o.decode("utf8")
if isinstance(o, datetime):
return o.strftime("%Y-%m-%dT%H:%M:%SZ")
if isinstance(o, date):
return o.strftime("%Y-%m-%d")
if isinstance(o, Enum):
return o.value
if isinstance(o, (RecordCollection, Record)):
return o.as_dict()
if isinstance(o, Decimal):
return json.dumps(o, use_decimal=True)
if isinstance(o, Iterable):
return list(o)
if isinstance(o, complex):
return f"{o.real}+{o.imag}j"
if hasattr(o, "keys") and hasattr(o, "__getitem__"):
return dict(o)
return JSONEncoder.default(self, o)
def auto_response(func):
def make_lin_response(o):
if not isinstance(o, str) and (
isinstance(o, (RecordCollection, Record, BaseModel, Iterable))
or (hasattr(o, "keys") and hasattr(o, "__getitem__"))
or isinstance(o, (int, float, list, set, complex, Decimal, Enum))
):
o = jsonify(o)
elif isinstance(o, tuple) and not isinstance(o[0], (Response, str)):
oc = list(o)
oc[0] = json.dumps(o[0])
o = tuple(oc)
return func(o)
return make_lin_response | PypiClean |
/Hikka_TL-1.24.14-py3-none-any.whl/telethon/tl/types/updates.py | from ...tl.tlobject import TLObject
from typing import Optional, List, Union, TYPE_CHECKING
import os
import struct
from datetime import datetime
if TYPE_CHECKING:
from ...tl.types import TypeChat, TypeDialog, TypeEncryptedMessage, TypeMessage, TypeUpdate, TypeUser
from ...tl.types.updates import TypeState
class ChannelDifference(TLObject):
CONSTRUCTOR_ID = 0x2064674e
SUBCLASS_OF_ID = 0x29896f5d
def __init__(self, pts: int, new_messages: List['TypeMessage'], other_updates: List['TypeUpdate'], chats: List['TypeChat'], users: List['TypeUser'], final: Optional[bool]=None, timeout: Optional[int]=None):
"""
Constructor for updates.ChannelDifference: Instance of either ChannelDifferenceEmpty, ChannelDifferenceTooLong, ChannelDifference.
"""
self.pts = pts
self.new_messages = new_messages
self.other_updates = other_updates
self.chats = chats
self.users = users
self.final = final
self.timeout = timeout
def to_dict(self):
return {
'_': 'ChannelDifference',
'pts': self.pts,
'new_messages': [] if self.new_messages is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.new_messages],
'other_updates': [] if self.other_updates is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.other_updates],
'chats': [] if self.chats is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.chats],
'users': [] if self.users is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.users],
'final': self.final,
'timeout': self.timeout
}
def _bytes(self):
return b''.join((
b'Ngd ',
struct.pack('<I', (0 if self.final is None or self.final is False else 1) | (0 if self.timeout is None or self.timeout is False else 2)),
struct.pack('<i', self.pts),
b'' if self.timeout is None or self.timeout is False else (struct.pack('<i', self.timeout)),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.new_messages)),b''.join(x._bytes() for x in self.new_messages),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.other_updates)),b''.join(x._bytes() for x in self.other_updates),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.chats)),b''.join(x._bytes() for x in self.chats),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.users)),b''.join(x._bytes() for x in self.users),
))
@classmethod
def from_reader(cls, reader):
flags = reader.read_int()
_final = bool(flags & 1)
_pts = reader.read_int()
if flags & 2:
_timeout = reader.read_int()
else:
_timeout = None
reader.read_int()
_new_messages = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_new_messages.append(_x)
reader.read_int()
_other_updates = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_other_updates.append(_x)
reader.read_int()
_chats = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_chats.append(_x)
reader.read_int()
_users = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_users.append(_x)
return cls(pts=_pts, new_messages=_new_messages, other_updates=_other_updates, chats=_chats, users=_users, final=_final, timeout=_timeout)
class ChannelDifferenceEmpty(TLObject):
CONSTRUCTOR_ID = 0x3e11affb
SUBCLASS_OF_ID = 0x29896f5d
def __init__(self, pts: int, final: Optional[bool]=None, timeout: Optional[int]=None):
"""
Constructor for updates.ChannelDifference: Instance of either ChannelDifferenceEmpty, ChannelDifferenceTooLong, ChannelDifference.
"""
self.pts = pts
self.final = final
self.timeout = timeout
def to_dict(self):
return {
'_': 'ChannelDifferenceEmpty',
'pts': self.pts,
'final': self.final,
'timeout': self.timeout
}
def _bytes(self):
return b''.join((
b'\xfb\xaf\x11>',
struct.pack('<I', (0 if self.final is None or self.final is False else 1) | (0 if self.timeout is None or self.timeout is False else 2)),
struct.pack('<i', self.pts),
b'' if self.timeout is None or self.timeout is False else (struct.pack('<i', self.timeout)),
))
@classmethod
def from_reader(cls, reader):
flags = reader.read_int()
_final = bool(flags & 1)
_pts = reader.read_int()
if flags & 2:
_timeout = reader.read_int()
else:
_timeout = None
return cls(pts=_pts, final=_final, timeout=_timeout)
class ChannelDifferenceTooLong(TLObject):
CONSTRUCTOR_ID = 0xa4bcc6fe
SUBCLASS_OF_ID = 0x29896f5d
def __init__(self, dialog: 'TypeDialog', messages: List['TypeMessage'], chats: List['TypeChat'], users: List['TypeUser'], final: Optional[bool]=None, timeout: Optional[int]=None):
"""
Constructor for updates.ChannelDifference: Instance of either ChannelDifferenceEmpty, ChannelDifferenceTooLong, ChannelDifference.
"""
self.dialog = dialog
self.messages = messages
self.chats = chats
self.users = users
self.final = final
self.timeout = timeout
def to_dict(self):
return {
'_': 'ChannelDifferenceTooLong',
'dialog': self.dialog.to_dict() if isinstance(self.dialog, TLObject) else self.dialog,
'messages': [] if self.messages is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.messages],
'chats': [] if self.chats is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.chats],
'users': [] if self.users is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.users],
'final': self.final,
'timeout': self.timeout
}
def _bytes(self):
return b''.join((
b'\xfe\xc6\xbc\xa4',
struct.pack('<I', (0 if self.final is None or self.final is False else 1) | (0 if self.timeout is None or self.timeout is False else 2)),
b'' if self.timeout is None or self.timeout is False else (struct.pack('<i', self.timeout)),
self.dialog._bytes(),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.messages)),b''.join(x._bytes() for x in self.messages),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.chats)),b''.join(x._bytes() for x in self.chats),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.users)),b''.join(x._bytes() for x in self.users),
))
@classmethod
def from_reader(cls, reader):
flags = reader.read_int()
_final = bool(flags & 1)
if flags & 2:
_timeout = reader.read_int()
else:
_timeout = None
_dialog = reader.tgread_object()
reader.read_int()
_messages = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_messages.append(_x)
reader.read_int()
_chats = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_chats.append(_x)
reader.read_int()
_users = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_users.append(_x)
return cls(dialog=_dialog, messages=_messages, chats=_chats, users=_users, final=_final, timeout=_timeout)
class Difference(TLObject):
CONSTRUCTOR_ID = 0xf49ca0
SUBCLASS_OF_ID = 0x20482874
def __init__(self, new_messages: List['TypeMessage'], new_encrypted_messages: List['TypeEncryptedMessage'], other_updates: List['TypeUpdate'], chats: List['TypeChat'], users: List['TypeUser'], state: 'TypeState'):
"""
Constructor for updates.Difference: Instance of either DifferenceEmpty, Difference, DifferenceSlice, DifferenceTooLong.
"""
self.new_messages = new_messages
self.new_encrypted_messages = new_encrypted_messages
self.other_updates = other_updates
self.chats = chats
self.users = users
self.state = state
def to_dict(self):
return {
'_': 'Difference',
'new_messages': [] if self.new_messages is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.new_messages],
'new_encrypted_messages': [] if self.new_encrypted_messages is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.new_encrypted_messages],
'other_updates': [] if self.other_updates is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.other_updates],
'chats': [] if self.chats is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.chats],
'users': [] if self.users is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.users],
'state': self.state.to_dict() if isinstance(self.state, TLObject) else self.state
}
def _bytes(self):
return b''.join((
b'\xa0\x9c\xf4\x00',
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.new_messages)),b''.join(x._bytes() for x in self.new_messages),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.new_encrypted_messages)),b''.join(x._bytes() for x in self.new_encrypted_messages),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.other_updates)),b''.join(x._bytes() for x in self.other_updates),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.chats)),b''.join(x._bytes() for x in self.chats),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.users)),b''.join(x._bytes() for x in self.users),
self.state._bytes(),
))
@classmethod
def from_reader(cls, reader):
reader.read_int()
_new_messages = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_new_messages.append(_x)
reader.read_int()
_new_encrypted_messages = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_new_encrypted_messages.append(_x)
reader.read_int()
_other_updates = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_other_updates.append(_x)
reader.read_int()
_chats = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_chats.append(_x)
reader.read_int()
_users = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_users.append(_x)
_state = reader.tgread_object()
return cls(new_messages=_new_messages, new_encrypted_messages=_new_encrypted_messages, other_updates=_other_updates, chats=_chats, users=_users, state=_state)
class DifferenceEmpty(TLObject):
CONSTRUCTOR_ID = 0x5d75a138
SUBCLASS_OF_ID = 0x20482874
def __init__(self, date: Optional[datetime], seq: int):
"""
Constructor for updates.Difference: Instance of either DifferenceEmpty, Difference, DifferenceSlice, DifferenceTooLong.
"""
self.date = date
self.seq = seq
def to_dict(self):
return {
'_': 'DifferenceEmpty',
'date': self.date,
'seq': self.seq
}
def _bytes(self):
return b''.join((
b'8\xa1u]',
self.serialize_datetime(self.date),
struct.pack('<i', self.seq),
))
@classmethod
def from_reader(cls, reader):
_date = reader.tgread_date()
_seq = reader.read_int()
return cls(date=_date, seq=_seq)
class DifferenceSlice(TLObject):
CONSTRUCTOR_ID = 0xa8fb1981
SUBCLASS_OF_ID = 0x20482874
def __init__(self, new_messages: List['TypeMessage'], new_encrypted_messages: List['TypeEncryptedMessage'], other_updates: List['TypeUpdate'], chats: List['TypeChat'], users: List['TypeUser'], intermediate_state: 'TypeState'):
"""
Constructor for updates.Difference: Instance of either DifferenceEmpty, Difference, DifferenceSlice, DifferenceTooLong.
"""
self.new_messages = new_messages
self.new_encrypted_messages = new_encrypted_messages
self.other_updates = other_updates
self.chats = chats
self.users = users
self.intermediate_state = intermediate_state
def to_dict(self):
return {
'_': 'DifferenceSlice',
'new_messages': [] if self.new_messages is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.new_messages],
'new_encrypted_messages': [] if self.new_encrypted_messages is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.new_encrypted_messages],
'other_updates': [] if self.other_updates is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.other_updates],
'chats': [] if self.chats is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.chats],
'users': [] if self.users is None else [x.to_dict() if isinstance(x, TLObject) else x for x in self.users],
'intermediate_state': self.intermediate_state.to_dict() if isinstance(self.intermediate_state, TLObject) else self.intermediate_state
}
def _bytes(self):
return b''.join((
b'\x81\x19\xfb\xa8',
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.new_messages)),b''.join(x._bytes() for x in self.new_messages),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.new_encrypted_messages)),b''.join(x._bytes() for x in self.new_encrypted_messages),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.other_updates)),b''.join(x._bytes() for x in self.other_updates),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.chats)),b''.join(x._bytes() for x in self.chats),
b'\x15\xc4\xb5\x1c',struct.pack('<i', len(self.users)),b''.join(x._bytes() for x in self.users),
self.intermediate_state._bytes(),
))
@classmethod
def from_reader(cls, reader):
reader.read_int()
_new_messages = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_new_messages.append(_x)
reader.read_int()
_new_encrypted_messages = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_new_encrypted_messages.append(_x)
reader.read_int()
_other_updates = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_other_updates.append(_x)
reader.read_int()
_chats = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_chats.append(_x)
reader.read_int()
_users = []
for _ in range(reader.read_int()):
_x = reader.tgread_object()
_users.append(_x)
_intermediate_state = reader.tgread_object()
return cls(new_messages=_new_messages, new_encrypted_messages=_new_encrypted_messages, other_updates=_other_updates, chats=_chats, users=_users, intermediate_state=_intermediate_state)
class DifferenceTooLong(TLObject):
CONSTRUCTOR_ID = 0x4afe8f6d
SUBCLASS_OF_ID = 0x20482874
def __init__(self, pts: int):
"""
Constructor for updates.Difference: Instance of either DifferenceEmpty, Difference, DifferenceSlice, DifferenceTooLong.
"""
self.pts = pts
def to_dict(self):
return {
'_': 'DifferenceTooLong',
'pts': self.pts
}
def _bytes(self):
return b''.join((
b'm\x8f\xfeJ',
struct.pack('<i', self.pts),
))
@classmethod
def from_reader(cls, reader):
_pts = reader.read_int()
return cls(pts=_pts)
class State(TLObject):
CONSTRUCTOR_ID = 0xa56c2a3e
SUBCLASS_OF_ID = 0x23df1a01
def __init__(self, pts: int, qts: int, date: Optional[datetime], seq: int, unread_count: int):
"""
Constructor for updates.State: Instance of State.
"""
self.pts = pts
self.qts = qts
self.date = date
self.seq = seq
self.unread_count = unread_count
def to_dict(self):
return {
'_': 'State',
'pts': self.pts,
'qts': self.qts,
'date': self.date,
'seq': self.seq,
'unread_count': self.unread_count
}
def _bytes(self):
return b''.join((
b'>*l\xa5',
struct.pack('<i', self.pts),
struct.pack('<i', self.qts),
self.serialize_datetime(self.date),
struct.pack('<i', self.seq),
struct.pack('<i', self.unread_count),
))
@classmethod
def from_reader(cls, reader):
_pts = reader.read_int()
_qts = reader.read_int()
_date = reader.tgread_date()
_seq = reader.read_int()
_unread_count = reader.read_int()
return cls(pts=_pts, qts=_qts, date=_date, seq=_seq, unread_count=_unread_count) | PypiClean |
/MLStudio-0.1.15.tar.gz/MLStudio-0.1.15/mlstudio/supervised/machine_learning/linear_regression.py | from abc import ABC, abstractmethod
import numpy as np
from sklearn.base import BaseEstimator, RegressorMixin
from sklearn.utils.validation import check_array
from mlstudio.supervised.machine_learning.base import BaseRegressor
# --------------------------------------------------------------------------- #
# LINEAR REGRESSION #
# --------------------------------------------------------------------------- #
class LinearRegression(BaseRegressor, RegressorMixin):
"""Linear Regression algorithm."""
def __init__(self):
pass
@property
def name(self):
return "Linear Regression"
@property
def task(self):
return "Regression"
def compute_output(self, X, theta):
"""Computes output based upon inputs and model parameters.
Parameters
----------
X : array of shape [n_samples, n_features]
The model inputs. Note the number of features includes the coefficient
for the bias term
theta : array of shape [n_features,] or [n_features, n_classes]
Model parameters
Returns
-------
output : Model output
"""
return X.dot(theta)
def predict(self, X, theta):
"""Computes the prediction as linear combination of inputs and parameters.
Parameter
---------
X : array of shape [n_samples, n_features]
The model inputs.
theta : array of shape [n_features,]
Model parameters
Note: n_features may or may not include the bias term added prior to
training, so we will need to accommodate X of either dimension.
Returns
-------
prediction : Linear combination of inputs.
"""
if X.shape[1] == len(theta):
y_pred = X.dot(theta)
else:
y_pred = theta[0] + X.dot(theta[1:])
return y_pred
def compute_cost(self, y, y_out, theta):
"""Computes the mean squared error cost.
Parameters
----------
y : array of shape (n_features,)
Ground truth target values
y_out : array of shape (n_features,)
Output from the model
theta : array of shape (n_features,)
The model parameters
Returns
-------
cost : The quadratic cost
"""
J = np.mean(0.5 * (y-y_out)**2)
return J
def compute_gradient(self, X, y, y_out, theta):
"""Computes quadratic costs gradient with respect to weights.
Parameters
----------
X : array of shape (m_observations, n_features)
Input data
y : array of shape (n_features,)
Ground truth target values
y_out : array of shape (n_features,)
Output from the model
theta : array of shape (n_features,)
The model parameters
Returns
-------
gradient of the cost function w.r.t. the parameters.
"""
n_samples = X.shape[0]
dZ = y_out-y
dW = float(1./n_samples) * X.T.dot(dZ)
return(dW)
# --------------------------------------------------------------------------- #
# LASSO REGRESSION #
# --------------------------------------------------------------------------- #
class LassoRegression(LinearRegression):
"""Lasso Regression algorithm."""
def __init__(self, lambda_reg=0.0001):
self.lambda_reg = lambda_reg
@property
def name(self):
return "Lasso Regression"
def compute_cost(self, y, y_out, theta):
"""Computes the mean squared error cost.
Parameters
----------
y : array of shape (n_features,)
Ground truth target values
y_out : array of shape (n_features,)
Output from the model
theta : array of shape (n_features,)
The model parameters
Returns
-------
cost : The quadratic cost
"""
self._validate_hyperparam(self.lambda_reg)
n_samples = y.shape[0]
J_reg = (self.lambda_reg / n_samples) * np.linalg.norm(theta, ord=1)
J = np.mean(0.5 * (y-y_out)**2) + J_reg
return J
def compute_gradient(self, X, y, y_out, theta):
"""Computes quadratic costs gradient with respect to weights.
Parameters
----------
X : array of shape (m_observations, n_features)
Input data
y : array of shape (n_features,)
Ground truth target values
y_out : array of shape (n_features,)
Output from the model
theta : array of shape (n_features,)
The model parameters
Returns
-------
gradient of the cost function w.r.t. the parameters.
"""
n_samples = X.shape[0]
dZ = y_out-y
dW = 1/n_samples * (X.T.dot(dZ) + self.lambda_reg * np.sign(theta))
return(dW)
# --------------------------------------------------------------------------- #
# RIDGE REGRESSION #
# --------------------------------------------------------------------------- #
class RidgeRegression(LinearRegression):
"""Ridge Regression algorithm."""
def __init__(self, lambda_reg=0.0001):
self.lambda_reg=lambda_reg
@property
def name(self):
return "Ridge Regression"
def compute_cost(self, y, y_out, theta):
"""Computes the mean squared error cost.
Parameters
----------
y : array of shape (n_features,)
Ground truth target values
y_out : array of shape (n_features,)
Output from the model
theta : array of shape (n_features,)
The model parameters
Returns
-------
cost : The quadratic cost
"""
self._validate_hyperparam(self.lambda_reg)
n_samples = y.shape[0]
J_reg = (self.lambda_reg / (2*n_samples)) * np.linalg.norm(theta)**2
J = np.mean(0.5 * (y-y_out)**2) + J_reg
return J
def compute_gradient(self, X, y, y_out, theta):
"""Computes quadratic costs gradient with respect to weights.
Parameters
----------
X : array of shape (m_observations, n_features)
Input data
y : array of shape (n_features,)
Ground truth target values
y_out : array of shape (n_features,)
Output from the model
theta : array of shape (n_features,)
The model parameters
Returns
-------
gradient of the cost function w.r.t. the parameters.
"""
n_samples = X.shape[0]
dZ = y_out-y
dW = 1/n_samples * (X.T.dot(dZ) + self.lambda_reg * theta)
return(dW)
# --------------------------------------------------------------------------- #
# ELASTIC NET REGRESSION #
# --------------------------------------------------------------------------- #
class ElasticNetRegression(LinearRegression):
"""Elastic Net Regression algorithm."""
def __init__(self, lambda_reg=0.0001, ratio=0.15):
self.lambda_reg=lambda_reg
self.ratio=ratio
@property
def name(self):
return "ElasticNet Regression"
def compute_cost(self, y, y_out, theta):
"""Computes the mean squared error cost.
Parameters
----------
y : array of shape (n_features,)
Ground truth target values
y_out : array of shape (n_features,)
Output from the model
theta : array of shape (n_features,)
The model parameters
Returns
-------
cost : The quadratic cost
"""
n_samples = y.shape[0]
self._validate_hyperparam(self.lambda_reg)
self._validate_hyperparam(self.ratio)
l1_contr = self.ratio * np.linalg.norm(theta, ord=1)
l2_contr = (1 - self.ratio) * 0.5 * np.linalg.norm(theta)**2
J_reg = float(1./n_samples) * self.lambda_reg * (l1_contr + l2_contr)
J = np.mean(0.5 * (y-y_out)**2) + J_reg
return J
def compute_gradient(self, X, y, y_out, theta):
"""Computes quadratic costs gradient with respect to weights.
Parameters
----------
X : array of shape (m_observations, n_features)
Input data
y : array of shape (n_features,)
Ground truth target values
y_out : array of shape (n_features,)
Output from the model
theta : array of shape (n_features,)
The model parameters
Returns
-------
gradient of the cost function w.r.t. the parameters.
"""
n_samples = X.shape[0]
l1_contr = self.ratio * np.sign(theta)
l2_contr = (1 - self.ratio) * theta
lambda_reg = np.asarray(self.lambda_reg, dtype='float64')
dZ = y_out-y
dW = 1/n_samples * (X.T.dot(dZ) + np.multiply(lambda_reg, np.add(l1_contr, l2_contr)))
return(dW) | PypiClean |
/Lavapy-1.6.1-py3-none-any.whl/lavapy/events.py | from __future__ import annotations
from typing import TYPE_CHECKING, Any, Dict, Optional
if TYPE_CHECKING:
from .player import Player
from .pool import Node
from .tracks import Track
__all__ = ("LavapyEvent",
"TrackStartEvent",
"TrackEndEvent",
"TrackExceptionEvent",
"TrackStuckEvent",
"WebsocketOpenEvent",
"WebsocketClosedEvent")
class LavapyEvent:
"""
Base Lavapy event. Every event inherits from this.
If you want to listen to these events, use a :meth:`discord.ext.commands.Bot.listen()`.
Parameters
----------
event: str
The event name.
player: Optional[Player]
A Lavapy player object.
"""
def __init__(self, event: str, player: Optional[Player]) -> None:
self._event: str = event
self._payload: Dict[str, Any] = {}
if player is not None:
self._payload["player"] = player
def __repr__(self) -> str:
return f"<Lavapy LavapyEvent (Payload={self.payload})>"
@property
def event(self) -> str:
"""Returns the event name."""
return self._event
@property
def payload(self) -> Dict[str, Any]:
"""Returns a dict containing the payload sent to discord.py. This must be parsed to `**kwargs`."""
return self._payload
class TrackStartEvent(LavapyEvent):
"""
Fired when a :class:`Track` starts playing. This can be listened to with:
.. code-block:: python
@bot.listen()
async def on_lavapy_track_start(player, track):
...
Parameters
----------
player: Player
A Lavapy player object.
track: Track
A Lavapy track object.
"""
def __init__(self, player: Player, track: Track) -> None:
super().__init__("track_start", player)
self._payload["track"] = track
def __repr__(self) -> str:
return f"<Lavapy TrackStartEvent (Payload={self.payload})>"
class TrackEndEvent(LavapyEvent):
"""
Fired when a :class:`Track` stops playing. This can be listened to with:
.. code-block:: python
@bot.listen()
async def on_lavapy_track_end(player, track, reason):
...
Parameters
----------
player: Player
A Lavapy player object.
track: Track
A Lavapy track object.
data: Dict[str, Any]
The raw event data.
"""
def __init__(self, player: Player, track: Track, data: Dict[str, Any]) -> None:
super().__init__("track_end", player)
self._payload["track"] = track
self._payload["reason"] = data["reason"]
def __repr__(self) -> str:
return f"<Lavapy TrackStopEvent (Payload={self.payload})>"
class TrackExceptionEvent(LavapyEvent):
"""
Fired when a :class:`Track` error has occurred in Lavalink. This can be listened to with:
.. code-block:: python
@bot.listen()
async def on_lavapy_track_exception(player, track, exception):
...
Parameters
----------
player: Player
A Lavapy player object.
track: Track
A Lavapy track object.
data: Dict[str, Any]
The raw event data.
"""
def __init__(self, player: Player, track: Track, data: Dict[str, Any]) -> None:
super().__init__("track_exception", player)
self._payload["track"] = track
if data.get("error"):
# User is running Lavalink <= 3.3
self._payload["exception"] = data["error"]
else:
# User is running Lavalink >= 3.4
self._payload["exception"] = data["exception"]
def __repr__(self) -> str:
return f"<Lavapy TrackExceptionEvent (Payload={self.payload})>"
class TrackStuckEvent(LavapyEvent):
"""
Fired when a :class:`Track` is stuck and cannot be played. This can be listened to with:
.. code-block:: python
@bot.listen()
async def on_lavapy_track_stuck(player, track, threshold):
pass
Parameters
----------
player: Player
A Lavapy player object.
track: Track
A Lavapy track object.
data: Dict[str, Any]
The raw event data.
"""
def __init__(self, player: Player, track: Track, data: Dict[str, Any]) -> None:
super().__init__("track_stuck", player)
self._payload["track"] = track
self._payload["threshold"] = data["thresholdMs"]
def __repr__(self) -> str:
return f"<Lavapy TrackStuckEvent (Payload={self.payload})>"
class WebsocketOpenEvent(LavapyEvent):
"""
Fired when a websocket connection to a :class:`Node` is open. This can be listened to with:
.. code-block:: python
@bot.listen()
async def on_lavapy_websocket_open(node):
pass
Parameters
----------
node: lavapy.pool.Node
A Lavapy node object.
"""
def __init__(self, node: Node) -> None:
super().__init__("websocket_open", None)
self._payload["node"] = node
def __repr__(self) -> str:
return f"<Lavapy WebsocketOpenEvent (Payload={self.payload})>"
class WebsocketClosedEvent(LavapyEvent):
"""
Fired when a websocket connection to a :class:`Node` is closed. This can be listened to with:
.. code-block:: python
@bot.listen()
async def on_lavapy_websocket_closed(node, reason, code, byRemote):
pass
Parameters
----------
node: lavapy.pool.Node
A Lavapy node object.
data: Dict[str, Any]
The raw event data.
"""
def __init__(self, node: Node, data: Dict[str, Any]) -> None:
super().__init__("websocket_closed", None)
self._payload["node"] = node
self._payload["reason"] = data["reason"]
self._payload["code"] = data["code"]
self._payload["byRemote"] = data["byRemote"]
def __repr__(self) -> str:
return f"<Lavapy WebsocketClosedEvent (Payload={self.payload})>" | PypiClean |
/Booktype-1.5.tar.gz/Booktype-1.5/lib/booki/site_static/js/tiny_mce/themes/advanced/js/about.js | tinyMCEPopup.requireLangPack();
function init() {
var ed, tcont;
tinyMCEPopup.resizeToInnerSize();
ed = tinyMCEPopup.editor;
// Give FF some time
window.setTimeout(insertHelpIFrame, 10);
tcont = document.getElementById('plugintablecontainer');
document.getElementById('plugins_tab').style.display = 'none';
var html = "";
html += '<table id="plugintable">';
html += '<thead>';
html += '<tr>';
html += '<td>' + ed.getLang('advanced_dlg.about_plugin') + '</td>';
html += '<td>' + ed.getLang('advanced_dlg.about_author') + '</td>';
html += '<td>' + ed.getLang('advanced_dlg.about_version') + '</td>';
html += '</tr>';
html += '</thead>';
html += '<tbody>';
tinymce.each(ed.plugins, function(p, n) {
var info;
if (!p.getInfo)
return;
html += '<tr>';
info = p.getInfo();
if (info.infourl != null && info.infourl != '')
html += '<td width="50%" title="' + n + '"><a href="' + info.infourl + '" target="_blank">' + info.longname + '</a></td>';
else
html += '<td width="50%" title="' + n + '">' + info.longname + '</td>';
if (info.authorurl != null && info.authorurl != '')
html += '<td width="35%"><a href="' + info.authorurl + '" target="_blank">' + info.author + '</a></td>';
else
html += '<td width="35%">' + info.author + '</td>';
html += '<td width="15%">' + info.version + '</td>';
html += '</tr>';
document.getElementById('plugins_tab').style.display = '';
});
html += '</tbody>';
html += '</table>';
tcont.innerHTML = html;
tinyMCEPopup.dom.get('version').innerHTML = tinymce.majorVersion + "." + tinymce.minorVersion;
tinyMCEPopup.dom.get('date').innerHTML = tinymce.releaseDate;
}
function insertHelpIFrame() {
var html;
if (tinyMCEPopup.getParam('docs_url')) {
html = '<iframe width="100%" height="300" src="' + tinyMCEPopup.editor.baseURI.toAbsolute(tinyMCEPopup.getParam('docs_url')) + '"></iframe>';
document.getElementById('iframecontainer').innerHTML = html;
document.getElementById('help_tab').style.display = 'block';
document.getElementById('help_tab').setAttribute("aria-hidden", "false");
}
}
tinyMCEPopup.onInit.add(init); | PypiClean |
/OASYS1-PaNOSC-0.3.2.tar.gz/OASYS1-PaNOSC-0.3.2/orangecontrib/panosc/util/gui/ow_esrf_widget.py | import sys
import numpy, matplotlib
from silx.gui.plot import Plot2D
from silx.gui.plot.StackView import StackViewMainWindow
from PyQt5 import QtGui, QtWidgets
from PyQt5.QtCore import QRect
from PyQt5.QtWidgets import QApplication
from matplotlib.backends.backend_qt5agg import FigureCanvasQTAgg as FigureCanvas
from srxraylib.plot import gol
from orangewidget import gui
from orangewidget.settings import Setting
from orangewidget.widget import OWAction
from oasys.widgets import widget
from oasys.widgets import gui as oasysgui
from oasys.widgets.exchange import DataExchangeObject
from oasys.util.oasys_util import EmittingStream
class ESRFWidget(widget.OWWidget):
author = "Luca Rebuffi"
maintainer_email = "[email protected]"
outputs = [{"name": "esrf_data",
"type": DataExchangeObject,
"doc": ""}]
IMAGE_WIDTH = 760
IMAGE_HEIGHT = 545
MAX_WIDTH = 1320
MAX_HEIGHT = 700
CONTROL_AREA_WIDTH = 405
TABS_AREA_HEIGHT = 560
view_type=Setting(1)
calculated_data = None
want_main_area = 1
def __init__(self):
super().__init__()
self.runaction = OWAction("Compute", self)
self.runaction.triggered.connect(self.compute)
self.addAction(self.runaction)
geom = QApplication.desktop().availableGeometry()
self.setGeometry(QRect(round(geom.width()*0.05),
round(geom.height()*0.05),
round(min(geom.width()*0.98, self.MAX_WIDTH)),
round(min(geom.height()*0.95, self.MAX_HEIGHT))))
self.setMaximumHeight(self.geometry().height())
self.setMaximumWidth(self.geometry().width())
self.controlArea.setFixedWidth(self.CONTROL_AREA_WIDTH)
box0 = gui.widgetBox(self.controlArea, "", orientation="horizontal")
#widget buttons: compute, set defaults, help
gui.button(box0, self, "Compute", callback=self.compute)
gui.button(box0, self, "Defaults", callback=self.defaults)
gui.button(box0, self, "Help", callback=self.help1)
gui.separator(self.controlArea, height=10)
self.build_gui()
gui.rubber(self.controlArea)
self.main_tabs = oasysgui.tabWidget(self.mainArea)
plot_tab = oasysgui.createTabPage(self.main_tabs, "Results")
out_tab = oasysgui.createTabPage(self.main_tabs, "Output")
view_box = oasysgui.widgetBox(plot_tab, "Results Options", addSpace=False, orientation="horizontal")
view_box_1 = oasysgui.widgetBox(view_box, "", addSpace=False, orientation="vertical", width=350)
self.view_type_combo = gui.comboBox(view_box_1, self, "view_type", label="View Results",
labelWidth=220,
items=["No", "Yes"],
callback=self.set_ViewType, sendSelectedValue=False, orientation="horizontal")
self.tab = []
self.tabs = oasysgui.tabWidget(plot_tab)
self.initializeTabs()
self.esrf_output = QtWidgets.QTextEdit()
self.esrf_output.setReadOnly(True)
out_box = gui.widgetBox(out_tab, "System Output", addSpace=True, orientation="horizontal")
out_box.layout().addWidget(self.esrf_output)
self.esrf_output.setFixedHeight(600)
self.esrf_output.setFixedWidth(600)
self.current_tab = -1
gui.rubber(self.mainArea)
def build_gui(self):
pass
def initializeTabs(self):
size = len(self.tab)
indexes = range(0, size)
for index in indexes:
self.tabs.removeTab(size-1-index)
titles = self.getTitles()
self.tab = []
self.plot_canvas = []
for index in range(0, len(titles)):
self.tab.append(oasysgui.createTabPage(self.tabs, titles[index]))
self.plot_canvas.append(None)
for tab in self.tab:
tab.setFixedHeight(self.IMAGE_HEIGHT)
tab.setFixedWidth(self.IMAGE_WIDTH)
def getDefaultPlotTabIndex(self):
return -1
def getTitles(self):
return ["Calculation Result"]
def getXTitles(self):
return ["Energy [eV]"]
def getYTitles(self):
return ["X [$\mu$m]"]
def getVariablesToPlot(self):
return [(0, 1)]
def getLogPlot(self):
return [(False, False)]
def set_ViewType(self):
self.progressBarInit()
if not self.calculated_data==None:
try:
self.initializeTabs()
self.plot_results(self.calculated_data)
except Exception as exception:
QtWidgets.QMessageBox.critical(self, "Error",
str(exception),
QtWidgets.QMessageBox.Ok)
self.progressBarFinished()
def plot_results(self, calculated_data, progressBarValue=80):
if not self.view_type == 0:
if not calculated_data is None:
current_index = self.tabs.currentIndex()
self.view_type_combo.setEnabled(False)
esrf_data = calculated_data.get_content("esrf_data")
titles = self.getTitles()
xtitles = self.getXTitles()
ytitles = self.getYTitles()
progress_bar_step = (100-progressBarValue)/len(titles)
for index in range(0, len(titles)):
x_index, y_index = self.getVariablesToPlot()[index]
log_x, log_y = self.getLogPlot()[index]
try:
self.plot_histo(esrf_data[:, x_index],
esrf_data[:, y_index],
progressBarValue + ((index+1)*progress_bar_step),
tabs_canvas_index=index,
plot_canvas_index=index,
title=titles[index],
xtitle=xtitles[index],
ytitle=ytitles[index],
log_x=log_x,
log_y=log_y)
# self.tabs.setCurrentIndex(index)
except Exception as e:
self.view_type_combo.setEnabled(True)
raise Exception("Data not plottable: bad content\n" + str(e))
self.view_type_combo.setEnabled(True)
try:
self.tabs.setCurrentIndex(current_index)
except:
if self.getDefaultPlotTabIndex() == -1:
self.tabs.setCurrentIndex(len(titles) - 1)
else:
self.tabs.setCurrentIndex(self.getDefaultPlotTabIndex())
else:
raise Exception("Empty Data")
def writeStdOut(self, text):
cursor = self.esrf_output.textCursor()
cursor.movePosition(QtGui.QTextCursor.End)
cursor.insertText(text)
self.esrf_output.setTextCursor(cursor)
self.esrf_output.ensureCursorVisible()
def plot_histo(self, x, y, progressBarValue, tabs_canvas_index, plot_canvas_index, title="", xtitle="", ytitle="",
log_x=False, log_y=False, color='blue', replace=True, control=False):
if self.plot_canvas[plot_canvas_index] is None:
self.plot_canvas[plot_canvas_index] = oasysgui.plotWindow(parent=None,
backend=None,
resetzoom=True,
autoScale=False,
logScale=True,
grid=True,
curveStyle=True,
colormap=False,
aspectRatio=False,
yInverted=False,
copy=True,
save=True,
print_=True,
control=control,
position=True,
roi=False,
mask=False,
fit=False)
self.plot_canvas[plot_canvas_index].setDefaultPlotLines(True)
self.plot_canvas[plot_canvas_index].setActiveCurveColor(color="#00008B")
self.plot_canvas[plot_canvas_index].setGraphXLabel(xtitle)
self.plot_canvas[plot_canvas_index].setGraphYLabel(ytitle)
self.tab[tabs_canvas_index].layout().addWidget(self.plot_canvas[plot_canvas_index])
ESRFWidget.plot_histo_2(self.plot_canvas[plot_canvas_index], x, y, title, xtitle, ytitle, color, replace)
self.plot_canvas[plot_canvas_index].setXAxisLogarithmic(log_x)
self.plot_canvas[plot_canvas_index].setYAxisLogarithmic(log_y)
if min(y) < 0:
if log_y:
self.plot_canvas[plot_canvas_index].setGraphYLimits(min(y)*1.2, max(y)*1.2)
else:
self.plot_canvas[plot_canvas_index].setGraphYLimits(min(y)*1.01, max(y)*1.01)
else:
if log_y:
self.plot_canvas[plot_canvas_index].setGraphYLimits(min(y), max(y)*1.2)
else:
self.plot_canvas[plot_canvas_index].setGraphYLimits(min(y), max(y)*1.01)
self.progressBarSet(progressBarValue)
def plot_data1D(self, dataX, dataY, tabs_canvas_index, plot_canvas_index, title="", xtitle="", ytitle=""):
self.tab[tabs_canvas_index].layout().removeItem(self.tab[tabs_canvas_index].layout().itemAt(0))
self.plot_canvas[plot_canvas_index] = oasysgui.plotWindow()
self.plot_canvas[plot_canvas_index].addCurve(dataX, dataY,)
self.plot_canvas[plot_canvas_index].resetZoom()
self.plot_canvas[plot_canvas_index].setXAxisAutoScale(True)
self.plot_canvas[plot_canvas_index].setYAxisAutoScale(True)
self.plot_canvas[plot_canvas_index].setGraphGrid(False)
self.plot_canvas[plot_canvas_index].setXAxisLogarithmic(False)
self.plot_canvas[plot_canvas_index].setYAxisLogarithmic(False)
self.plot_canvas[plot_canvas_index].setGraphXLabel(xtitle)
self.plot_canvas[plot_canvas_index].setGraphYLabel(ytitle)
self.plot_canvas[plot_canvas_index].setGraphTitle(title)
self.tab[tabs_canvas_index].layout().addWidget(self.plot_canvas[plot_canvas_index])
def plot_data2D(self, data2D, dataX, dataY, tabs_canvas_index, plot_canvas_index, title="", xtitle="", ytitle="", mode=2):
for i in range(1+self.tab[tabs_canvas_index].layout().count()):
self.tab[tabs_canvas_index].layout().removeItem(self.tab[tabs_canvas_index].layout().itemAt(i))
if mode == 0:
figure = FigureCanvas(gol.plot_image(data2D,
dataX,
dataY,
xtitle=xtitle,
ytitle=ytitle,
title=title,
show=False,
aspect='auto'))
self.plot_canvas[plot_canvas_index] = figure
else:
origin = (dataX[0],dataY[0])
scale = (dataX[1]-dataX[0],dataY[1]-dataY[0])
data_to_plot = data2D.T
colormap = {"name":"temperature", "normalization":"linear", "autoscale":True, "vmin":0, "vmax":0, "colors":256}
if mode == 1:
#TODO: delete: srio commented this part as it is never used
raise Exception("Cannot use ImageView")
elif mode == 2:
self.plot_canvas[plot_canvas_index] = Plot2D()
self.plot_canvas[plot_canvas_index].resetZoom()
self.plot_canvas[plot_canvas_index].setXAxisAutoScale(True)
self.plot_canvas[plot_canvas_index].setYAxisAutoScale(True)
self.plot_canvas[plot_canvas_index].setGraphGrid(False)
self.plot_canvas[plot_canvas_index].setKeepDataAspectRatio(True)
self.plot_canvas[plot_canvas_index].yAxisInvertedAction.setVisible(False)
self.plot_canvas[plot_canvas_index].setXAxisLogarithmic(False)
self.plot_canvas[plot_canvas_index].setYAxisLogarithmic(False)
#silx 0.4.0
self.plot_canvas[plot_canvas_index].getMaskAction().setVisible(False)
self.plot_canvas[plot_canvas_index].getRoiAction().setVisible(False)
self.plot_canvas[plot_canvas_index].getColormapAction().setVisible(False)
self.plot_canvas[plot_canvas_index].setKeepDataAspectRatio(False)
self.plot_canvas[plot_canvas_index].addImage(numpy.array(data_to_plot),
legend="zio billy",
scale=scale,
origin=origin,
colormap=colormap,
replace=True)
self.plot_canvas[plot_canvas_index].setActiveImage("zio billy")
self.plot_canvas[plot_canvas_index].setGraphXLabel(xtitle)
self.plot_canvas[plot_canvas_index].setGraphYLabel(ytitle)
self.plot_canvas[plot_canvas_index].setGraphTitle(title)
self.tab[tabs_canvas_index].layout().addWidget(self.plot_canvas[plot_canvas_index])
def plot_data3D(self, data3D, dataE, dataX, dataY, tabs_canvas_index, plot_canvas_index, title="", xtitle="", ytitle=""):
for i in range(1+self.tab[tabs_canvas_index].layout().count()):
self.tab[tabs_canvas_index].layout().removeItem(self.tab[tabs_canvas_index].layout().itemAt(i))
#self.tab[tabs_canvas_index].layout().removeItem(self.tab[tabs_canvas_index].layout().itemAt(0))
xmin = numpy.min(dataX)
xmax = numpy.max(dataX)
ymin = numpy.min(dataY)
ymax = numpy.max(dataY)
stepX = dataX[1]-dataX[0]
stepY = dataY[1]-dataY[0]
if len(dataE) > 1: stepE = dataE[1]-dataE[0]
else: stepE = 1.0
if stepE == 0.0: stepE = 1.0
if stepX == 0.0: stepX = 1.0
if stepY == 0.0: stepY = 1.0
dim0_calib = (dataE[0],stepE)
dim1_calib = (ymin, stepY)
dim2_calib = (xmin, stepX)
data_to_plot = numpy.swapaxes(data3D,1,2)
colormap = {"name":"temperature", "normalization":"linear", "autoscale":True, "vmin":0, "vmax":0, "colors":256}
self.plot_canvas[plot_canvas_index] = StackViewMainWindow()
self.plot_canvas[plot_canvas_index].setGraphTitle(title)
self.plot_canvas[plot_canvas_index].setLabels(["Photon Energy [eV]",ytitle,xtitle])
self.plot_canvas[plot_canvas_index].setColormap(colormap=colormap)
self.plot_canvas[plot_canvas_index].setStack(numpy.array(data_to_plot),
calibrations=[dim0_calib, dim1_calib, dim2_calib] )
self.tab[tabs_canvas_index].layout().addWidget(self.plot_canvas[plot_canvas_index])
@classmethod
def plot_histo_2(cls, plot_window, x, y, title, xtitle, ytitle, color='blue', replace=True):
matplotlib.rcParams['axes.formatter.useoffset']='False'
plot_window.addCurve(x, y, title, symbol='', color=color, xlabel=xtitle, ylabel=ytitle, replace=replace) #'+', '^', ','
if not xtitle is None: plot_window.setGraphXLabel(xtitle)
if not ytitle is None: plot_window.setGraphYLabel(ytitle)
if not title is None: plot_window.setGraphTitle(title)
# plot_window.setDrawModeEnabled(True, 'rectangle')
plot_window.setInteractiveMode('zoom',color='orange')
plot_window.resetZoom()
plot_window.replot()
plot_window.setActiveCurve(title)
def compute(self):
self.setStatusMessage("Running Command")
self.progressBarInit()
try:
self.esrf_output.setText("")
sys.stdout = EmittingStream(textWritten=self.writeStdOut)
self.progressBarSet(20)
self.check_fields()
calculation_output = self.do_calculation()
self.progressBarSet(50)
if calculation_output is None:
raise Exception("No result")
else:
self.calculated_data = self.extract_data_from_calculation_output(calculation_output)
self.add_specific_content_to_calculated_data(self.calculated_data)
self.setStatusMessage("Plotting Results")
self.plot_results(self.calculated_data, progressBarValue=60)
self.setStatusMessage("")
self.send("esrf_data", self.calculated_data)
except Exception as exception:
QtWidgets.QMessageBox.critical(self, "Error",
str(exception), QtWidgets.QMessageBox.Ok)
self.setStatusMessage("Error!")
if self.IS_DEVELOP: raise exception
self.progressBarFinished()
def defaults(self):
self.resetSettings()
def help1(self):
raise Exception("This method should be reimplementd in subclasses!")
def get_help_name(self):
raise Exception("This method should be reimplementd in subclasses!")
def check_fields(self):
raise Exception("This method should be reimplementd in subclasses!")
def do_calculation(self):
raise Exception("This method should be reimplementd in subclasses!")
def extract_data_from_calculation_output(self, calculation_output):
calculated_data = DataExchangeObject("ESRF", self.get_data_exchange_widget_name())
calculated_data.add_content("esrf_data", calculation_output)
return calculated_data
def get_data_exchange_widget_name(self):
raise Exception("This method should be reimplementd in subclasses!")
def add_specific_content_to_calculated_data(self, calculated_data):
pass
if __name__ == "__main__":
a = QApplication(sys.argv)
ow = ESRFWidget()
ow.show()
a.exec_()
ow.saveSettings() | PypiClean |
/Django-Pizza-16.10.1.tar.gz/Django-Pizza-16.10.1/pizza/kitchen_sink/static/ks/ckeditor/plugins/specialchar/dialogs/lang/zh-cn.js | /*
Copyright (c) 2003-2013, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license
*/
CKEDITOR.plugins.setLang("specialchar","zh-cn",{euro:"欧元符号",lsquo:"左单引号",rsquo:"右单引号",ldquo:"左双引号",rdquo:"右双引号",ndash:"短划线",mdash:"长划线",iexcl:"竖翻叹号",cent:"分币符号",pound:"英镑符号",curren:"货币符号",yen:"日元符号",brvbar:"间断条",sect:"节标记",uml:"分音符",copy:"版权所有标记",ordf:"阴性顺序指示符",laquo:"左指双尖引号",not:"非标记",reg:"注册标记",macr:"长音符",deg:"度标记",sup2:"上标二",sup3:"上标三",acute:"锐音符",micro:"微符",para:"段落标记",middot:"中间点",cedil:"下加符",sup1:"上标一",ordm:"阳性顺序指示符",raquo:"右指双尖引号",frac14:"普通分数四分之一",frac12:"普通分数二分之一",frac34:"普通分数四分之三",iquest:"竖翻问号",
Agrave:"带抑音符的拉丁文大写字母 A",Aacute:"带锐音符的拉丁文大写字母 A",Acirc:"带扬抑符的拉丁文大写字母 A",Atilde:"带颚化符的拉丁文大写字母 A",Auml:"带分音符的拉丁文大写字母 A",Aring:"带上圆圈的拉丁文大写字母 A",AElig:"拉丁文大写字母 Ae",Ccedil:"带下加符的拉丁文大写字母 C",Egrave:"带抑音符的拉丁文大写字母 E",Eacute:"带锐音符的拉丁文大写字母 E",Ecirc:"带扬抑符的拉丁文大写字母 E",Euml:"带分音符的拉丁文大写字母 E",Igrave:"带抑音符的拉丁文大写字母 I",Iacute:"带锐音符的拉丁文大写字母 I",Icirc:"带扬抑符的拉丁文大写字母 I",Iuml:"带分音符的拉丁文大写字母 I",ETH:"拉丁文大写字母 Eth",Ntilde:"带颚化符的拉丁文大写字母 N",Ograve:"带抑音符的拉丁文大写字母 O",Oacute:"带锐音符的拉丁文大写字母 O",Ocirc:"带扬抑符的拉丁文大写字母 O",Otilde:"带颚化符的拉丁文大写字母 O",
Ouml:"带分音符的拉丁文大写字母 O",times:"乘号",Oslash:"带粗线的拉丁文大写字母 O",Ugrave:"带抑音符的拉丁文大写字母 U",Uacute:"带锐音符的拉丁文大写字母 U",Ucirc:"带扬抑符的拉丁文大写字母 U",Uuml:"带分音符的拉丁文大写字母 U",Yacute:"带抑音符的拉丁文大写字母 Y",THORN:"拉丁文大写字母 Thorn",szlig:"拉丁文小写字母清音 S",agrave:"带抑音符的拉丁文小写字母 A",aacute:"带锐音符的拉丁文小写字母 A",acirc:"带扬抑符的拉丁文小写字母 A",atilde:"带颚化符的拉丁文小写字母 A",auml:"带分音符的拉丁文小写字母 A",aring:"带上圆圈的拉丁文小写字母 A",aelig:"拉丁文小写字母 Ae",ccedil:"带下加符的拉丁文小写字母 C",egrave:"带抑音符的拉丁文小写字母 E",eacute:"带锐音符的拉丁文小写字母 E",ecirc:"带扬抑符的拉丁文小写字母 E",euml:"带分音符的拉丁文小写字母 E",igrave:"带抑音符的拉丁文小写字母 I",
iacute:"带锐音符的拉丁文小写字母 I",icirc:"带扬抑符的拉丁文小写字母 I",iuml:"带分音符的拉丁文小写字母 I",eth:"拉丁文小写字母 Eth",ntilde:"带颚化符的拉丁文小写字母 N",ograve:"带抑音符的拉丁文小写字母 O",oacute:"带锐音符的拉丁文小写字母 O",ocirc:"带扬抑符的拉丁文小写字母 O",otilde:"带颚化符的拉丁文小写字母 O",ouml:"带分音符的拉丁文小写字母 O",divide:"除号",oslash:"带粗线的拉丁文小写字母 O",ugrave:"带抑音符的拉丁文小写字母 U",uacute:"带锐音符的拉丁文小写字母 U",ucirc:"带扬抑符的拉丁文小写字母 U",uuml:"带分音符的拉丁文小写字母 U",yacute:"带抑音符的拉丁文小写字母 Y",thorn:"拉丁文小写字母 Thorn",yuml:"带分音符的拉丁文小写字母 Y",OElig:"拉丁文大写连字 Oe",oelig:"拉丁文小写连字 Oe",372:"带扬抑符的拉丁文大写字母 W",374:"带扬抑符的拉丁文大写字母 Y",
373:"带扬抑符的拉丁文小写字母 W",375:"带扬抑符的拉丁文小写字母 Y",sbquo:"单下 9 形引号",8219:"单高横翻 9 形引号",bdquo:"双下 9 形引号",hellip:"水平省略号",trade:"商标标志",9658:"实心右指指针",bull:"加重号",rarr:"向右箭头",rArr:"向右双线箭头",hArr:"左右双线箭头",diams:"实心方块纸牌",asymp:"约等于"}); | PypiClean |
/Khayyam-3.0.17-cp27-cp27m-macosx_10_11_x86_64.whl/Khayyam-3.0.17.dist-info/DESCRIPTION.rst | khayyam
=======
.. image:: http://img.shields.io/pypi/v/khayyam.svg
:target: https://pypi.python.org/pypi/khayyam
.. image:: https://requires.io/github/pylover/khayyam/requirements.svg?branch=master
:target: https://requires.io/github/pylover/khayyam/requirements/?branch=master
:alt: Requirements Status
.. image:: https://travis-ci.org/pylover/khayyam.svg?branch=master
:target: https://travis-ci.org/pylover/khayyam
.. image:: https://coveralls.io/repos/github/pylover/khayyam/badge.svg?branch=master
:target: https://coveralls.io/github/pylover/khayyam?branch=master
.. image:: https://img.shields.io/badge/license-GPLv3-brightgreen.svg
:target: https://github.com/pylover/khayyam/blob/master/LICENSE
Jump To:
^^^^^^^^
* `Documentation <http://khayyam.dobisel.com>`_
* `Python package index <https://pypi.python.org/pypi/khayyam>`_
* `Source on github <https://github.com/pylover/khayyam>`_
* `Downloads <https://pypi.python.org/pypi/Khayyam#downloads>`_
Basic Usage
^^^^^^^^^^^
>>> from khayyam import *
>>> JalaliDate(1346, 12, 30)
khayyam.JalaliDate(1346, 12, 30, Chaharshanbeh)
>>> JalaliDatetime(989, 3, 25, 10, 43, 23, 345453)
khayyam.JalaliDatetime(989, 3, 25, 10, 43, 23, 345453, Seshanbeh)
Current date and time
^^^^^^^^^^^^^^^^^^^^^
>>> print(JalaliDatetime.now())
khayyam.JalaliDatetime(1394, 5, 18, 16, 4, 48, 628383, Yekshanbeh)
>>> print(JalaliDatetime.now(TehranTimezone()) - timedelta(days=6*30))
1393-11-02 20:01:11.663719+03:30
>>> print(JalaliDate.today())
1394-4-30
Parsing & Formatting
^^^^^^^^^^^^^^^^^^^^
>>> print(JalaliDatetime.now().strftime('%C'))
شنبه ۳ مرداد ۱۳۹۴ ۰۲:۳۷:۵۲ ب.ظ
>>> JalaliDatetime.strptime(u'چهارشنبه ۳۱ تیر ۱۳۹۴ ۰۵:۴۵:۴۰ ب.ظ', '%C')
khayyam.JalaliDatetime(1394, 4, 31, 17, 45, 40, 0, Chaharshanbeh)
Converting
^^^^^^^^^^
>>> from datetime import date, datetime
>>> JalaliDate(1394, 4, 31).todate()
datetime.date(2015, 7, 22)
>>> now = JalaliDatetime(1394, 4, 31, 15, 38, 6, 37269)
>>> now.todate()
datetime.date(2015, 7, 22)
>>> now.todatetime()
datetime.datetime(2015, 7, 22, 15, 38, 6, 37269)
>>> JalaliDatetime(datetime(2015, 7, 22, 14, 47, 9, 821830))
khayyam.JalaliDatetime(1394, 4, 31, 14, 47, 9, 821830, Chaharshanbeh)
>>> JalaliDatetime(datetime(2015, 7, 22, 14, 47, 9, 821830, TehranTimezone()))
khayyam.JalaliDatetime(1394, 4, 31, 14, 47, 9, 821830, tzinfo=+03:30 dst:60, Chaharshanbeh)
>>> JalaliDate(date(2015, 7, 22))
khayyam.JalaliDate(1394, 4, 31, Chaharshanbeh)
Arithmetics & Operators
^^^^^^^^^^^^^^^^^^^^^^^
>>> from datetime import timedelta
>>> from khayyam import JalaliDate, JalaliDatetime
>>> now = JalaliDatetime(1394, 4, 31, 16, 17, 31, 374398)
>>> now + timedelta(days=1)
khayyam.JalaliDatetime(1394, 5, 1, 16, 17, 31, 374398, Panjshanbeh)
>>> now + timedelta(seconds=3600)
khayyam.JalaliDatetime(1394, 4, 31, 17, 17, 31, 374398, Chaharshanbeh)
>>> now - timedelta(seconds=3600)
khayyam.JalaliDatetime(1394, 4, 31, 15, 17, 31, 374398, Chaharshanbeh)
>>> yesterday = now - timedelta(1)
>>> yesterday
khayyam.JalaliDatetime(1394, 4, 30, 16, 17, 31, 374398, Seshanbeh)
>>> now - yesterday
datetime.timedelta(1)
>>> JalaliDatetime.now() - now
datetime.timedelta(0, 478, 328833) # 478 seconds taken to writing this section
Comparison
^^^^^^^^^^
>>> now > yesterday
True
>>> now != yesterday
True
>>> now.todate() == yesterday.todate()
False
Change Log
^^^^^^^^^^
* 3.0.15:
* BUGFIX: Removing `from sphinx.util import inspect` from jalali_datetime.py.
* 3.0.11
* Allmost the all API's are documented.
* 3.0.10
* Imoprting JalaliDateFormatter & JalaliDatetimeFormatter in __init__.py
* Accept also callable in JalaliDatetime.__init__(tzinfo=...) for tzinfo argument.
* JalaliDatetime APIs, well documented.
* 3.0.9
* Removing pypy from travis, due the travis bug: buffer overflow !
* 3.0.0
* Raising TypeError instead of ValueError on overloaded operators, if the given type is mismatch.
* Fixing tests to pass on systems that configured as UTC.
* 100% Code coverage
* Moving all tests to khayyam/tests
* Removing some uncovered, obsolete code lines
* 2.11.0
* Using pure C instead of cython. Now the C extension is as fast as possible.
* Including OSX in travis.yaml for python 2 & 3
* Including pypy 2 & 3 tests in travis.yaml
* 2.10.0
* Add english weekday format directive
* 2.9.8
* Registering on travis-ci.org
* Registering on coveralls.io
* Registering on requires.io
* 2.9.7
* Fixing problem in setup.py in python3 #15
* 2.9.3
* setup.py for windows
* 2.9.1
* Release !
* 2.9.1b2
* encoding all __repr__ functions
* 2.9.1b1 (2015-07-30)
* Fixing setup.py bug
* 2.9.1b0 (2015-07-30)
* Using any available C compiler if cython is not available.
* Using pure python if any error raised on installation with extension.
* 2.9.0b0 (2015-07-30)
* All algorithms reimplemented using cython and static typing, so the calculation
with new C extension is very faster than python pure algorithm implementation.
* Fallback switch to pure python algorithm implementation, if compiled binaries not available.
* Test case(s) for algorithms(C & Python).
* 2.8.0b1 (2015-07-28)
* `Jalalidate.timetuple` method implemented from scratch including unit test.
* Some methods with underscore renamed: `JalaliDate.*_ascii` to `JalaliDate.*ascii`
* 2.7.0b2 (2015-07-26)
* README.rst
* 2.7.0-beta (2015-07-25)
* some bug fixes.
* method `Jalalidate.fromdate` removed. use constructor instead: `JalaliDate(date)`
* method `Jalalidate.fromjulianday` removed. use constructor instead: `JalaliDate(julian_days=..)`
* method `Jalalidate.fromdatetime` removed. use constructor instead: `JalaliDatetime(datetime)`
* 2.6.0-beta (2015-07-25)
* All possible formatting directives(a-z, A-Z) are supported, except: T
* All format directives are tested.
* Formatting and parsing test cases moved to `khayyam.formatting.tests` package.
* In project: docs/html
* `JalaliDate.from_julian_days` renamed to `JalaliDate.fromjuliandays`
* `JalaliDate.from_julian_days` renamed to `JalaliDate.fromjuliandays`
* `JalaliDate.days_in_month` renamed to `JalaliDate.daysinmonth`
* `JalaliDate.is_leap` renamed to `JalaliDate.isleap`
* `JalaliDatetime` Modern properties.
* README.md updated
* 2.5.0-beta (2015-07-23)
* Doc: doctest
* Doc: formatting and parsing directives table.
* Doc: adding examples of new formatting directives in introduction: %D, %J, %R, %N, %n, %F, %h, %i, %r, %s, %o.
* local date & time formats are changed: digits -> persian
* Formatting and parsing test cases has been separated
* 2.4.0-beta (2015-07-22)
* Persian Numbers
* %D, %J, %R, %N, %n, %F, %h, %i, %r, %s, %o directives has been added.
* 2.3.0-alpha (2015-07-22)
* Constants are moved to formatting packages except MINYEAR, MAXYEAR ans weekdays.
* Doc: Introduction -> Formatting & parsing
* Doc: Introduction -> Converting
* New methods `jalaliDate.todate`, `jalaliDate.fromdate`, `jalaliDatetime.todatetime` and `jalaliDatetime.fromdatetime`
* Removed methods `jalaliDate.to_date`, `jalaliDate.from_date`, `jalaliDatetime.to_datetime` and `jalaliDatetime.fromdate_time`
* 2.2.1-alpha (2015-07-21)
* Doc: Reading package's version automatically from khayyam/__init__.py in `sphinx/conf.py`
* Doc: Installation: (PYPI, Development version)
* Doc: Testing
* Doc: Contribution
* 2.2.0-alpha (2015-07-21)
* Generating API Documentation
* 2.1.0-alpha (2015-07-20)
* Adding ascii weekdayname in `JalaliDatetime` and `JalaliDate` representation(__repr__).
* 2.0.0-alpha (2015-07-19) Incompatible with < 2.0.0
* JalaliDate: method `localformat` renamed to `localdateformat`.
* JalaliDatetime: method `localformat` renamed to `localdatetimeformat`.
* JalaliDatetime: method `localshortformat_ascii` renamed to `localshortformatascii`.
* JalaliDatetime: method `localdatetimeformat_ascii` renamed to `localdatetimeformatascii`.
* JalaliDatetime: method `ampm_ascii` renamed to `ampmascii`.
* JalaliDatetime: Migrating to New Formatter/Parser Engine
* TehTz: renamed to TehranTimezone
* Comparison and Timezones
* Comparison with `datetime.date` & `datetime.datetime`
* Fixing timezone bug
* 1.1.0 (2015-07-17)
* JalaliDate: New Formatter/Parser & full unittests.
| PypiClean |
/AutoYOLObile-0.0.10.tar.gz/AutoYOLObile-0.0.10/ModelOpt/rt3d_pruning/options_backup.py | import argparse
def parse_args():
# Training settings
parser = argparse.ArgumentParser(description='PyTorch ADMM pruning for 3D CNNs')
parser.add_argument('--logger', action='store_true', default=True,
help='whether to use logger')
parser.add_argument('--log-interval', type=int, default=50, metavar='N',
help='how many batches to wait before logging training status')
parser.add_argument('--seed', type=int, default=2019, metavar='S',
help='random seed (default: 2019)')
parser.add_argument('-j', '--workers', default=12, type=int, metavar='N',
help='number of data loading workers (default: 12)')
parser.add_argument('--multi-gpu', action='store_true', default=False,
help='for multi-gpu training')
parser.add_argument('--no-cuda', action='store_true', default=False,
help='disables CUDA training')
parser.add_argument('--load-path', default='',
help='pretrained model path')
parser.add_argument('--arch', type=str, default='r2+1d',
help='[c3d, r2+1d-pretrained, s3d]')
parser.add_argument('--batch-size', type=int, default=32, metavar='N',
help='input batch size for training (default: 64)')
parser.add_argument('--epochs', type=int, default=100, metavar='N',
help='number of epochs to train (default: 100)')
parser.add_argument('--admm-epochs', type=int, default=10, metavar='N',
help='number of interval epochs to update admm (default: 10)')
parser.add_argument('--optmzr', type=str, default='sgd', metavar='OPTMZR',
help='optimizer used (default: adam)')
parser.add_argument('--lr', type=float, default=5e-4, metavar='LR',
help='learning rate (default: 0.1)')
parser.add_argument('--lr-decay', type=int, default=30, metavar='LR_decay',
help='how many every epoch before lr drop (default: 30)')
parser.add_argument('--momentum', type=float, default=0.9, metavar='M',
help='SGD momentum (default: 0.9)')
parser.add_argument('--weight-decay', '--wd', default=5e-4, type=float,
metavar='W', help='weight decay (default: 1e-4)')
parser.add_argument('--sparsity-type', type=str, default='blk-kgs',
help ='define sparsity_type: [blk-vanilla, blk-kgs]')
parser.add_argument('--s', type=float, default=0.0001,
help='scale sparse rate (default: 0.0001)')
parser.add_argument('--config-file', type=str, default='c3d',
help ='config file name')
parser.add_argument('--admm', action='store_true', default=False,
help='for admm training')
parser.add_argument('--masked-retrain', action='store_true', default=False,
help='for masked retrain')
parser.add_argument('--combine-progressive', action='store_true', default=False,
help='for progressive pruning')
parser.add_argument('--rho', type=float, default = 0.0001,
help ='define rho for ADMM')
parser.add_argument('--rho-num', type=int, default = 4,
help ='define how many rhos for ADMM training')
parser.add_argument('--data_location', default='/raid10/ms/dataset',
help='training and testing data location')
# Tricks
parser.add_argument('--lr-scheduler', type=str, default='cosine',
help='define lr scheduler')
parser.add_argument('--warmup', action='store_true', default=False,
help='warm-up scheduler')
parser.add_argument('--warmup-lr', type=float, default=0.00001, metavar='M',
help='warmup-lr, smaller than original lr')
parser.add_argument('--warmup-epochs', type=int, default=5, metavar='M',
help='number of epochs for lr warmup')
parser.add_argument('--mixup', action='store_true', default=False,
help='ce mixup')
parser.add_argument('--alpha', type=float, default=0.0, metavar='M',
help='for mixup training, lambda = Beta(alpha, alpha) distribution. Set to 0.0 to disable')
parser.add_argument('--smooth', action='store_true', default=False,
help='lable smooth')
parser.add_argument('--smooth-eps', type=float, default=0.0, metavar='M',
help='smoothing rate [0.0, 1.0], set to 0.0 to disable')
parser.add_argument('--no-tricks', action='store_true', default=False,
help='disable all training tricks and restore original classic training process')
### customize
parser.add_argument('--dataset',
default='ucf101')
parser.add_argument('--resume', action='store_true', default=False,
help='resume from last epoch if model exists')
parser.add_argument('--block-size',
type=int, default=8,
help='block size in block circulant weight matrix')
parser.add_argument('--connectivity-block-size', '-conn-blk',
nargs='*', type=int, default=[8, 4],
help='block size for connectivity pruning')
# distillation
parser.add_argument('--distill', action='store_true', default=False ,
help='if distillation')
parser.add_argument('--teacharch', default=None,
help='teacher model architecture name')
parser.add_argument('--teacher-path', default='',
help='the path of teacher model')
parser.add_argument('--temperature', default=3, type=float,
help='temperature of distillation')
parser.add_argument('--kd-coefficient', default=0.5, type=float,
help='loss coefficient of knowledge distillation')
return parser | PypiClean |
/EfficientWord_Net-1.0.2-py3-none-any.whl/eff_word_net/streams.py | import pyaudio
from typing import Tuple , Callable
import numpy as np
from eff_word_net.engine import HotwordDetector
from eff_word_net import RATE
NoParameterFunction = Callable[[],None]
AudioFrameFunction = Callable[[],np.array]
class CustomAudioStream :
"""
CustomAudioStream implementation allows developers to use
any 16000Hz sampled audio streams with inference engine
It tries to add sliding window to audio streams
"""
def __init__(
self,
open_stream:Callable[[],None],
close_stream:Callable[[],None],
get_next_frame:Callable[[],np.array],
window_length_secs = 1,
sliding_window_secs:float = 1/8
):
self._open_stream = open_stream
self._close_stream = close_stream
self._get_next_frame = get_next_frame
self._window_size = int(window_length_secs * RATE)
self._sliding_window_size = int(sliding_window_secs * RATE)
self._out_audio = np.zeros(self._window_size) #blank 1 sec audio
print("Initial S",self._out_audio.shape)
def start_stream(self):
self._out_audio = np.zeros(self._window_size)
self._open_stream()
for i in range(RATE//self._sliding_window_size -1):
self.getFrame()
def close_stream(self):
self._close_stream()
self._out_audio = np.zeros(self._window_size)
def getFrame(self):
"""
Returns a 1 sec audio frame with sliding window of 1/8 sec with
sampling frequency 16000Hz
"""
new_frame = self._get_next_frame()
#print("Prior:", self._out_audio.shape, new_frame.shape )
assert new_frame.shape == (self._sliding_window_size,), \
"audio frame size from src doesnt match sliding_window_secs"
self._out_audio = np.append(
self._out_audio[self._sliding_window_size:],
new_frame
)
#print(self._out_audio.shape)
return self._out_audio
class SimpleMicStream(CustomAudioStream) :
"""
Implements mic stream with sliding window,
implemented by inheriting CustomAudioStream
"""
def __init__(self,window_length_secs=1, sliding_window_secs:float=1/8):
p=pyaudio.PyAudio()
CHUNK = int(sliding_window_secs*RATE)
print("Chunk size", CHUNK)
mic_stream=p.open(
format=pyaudio.paInt16,
channels=1,
rate=16000,
input=True,
frames_per_buffer=CHUNK
)
mic_stream.stop_stream()
CustomAudioStream.__init__(
self,
open_stream = mic_stream.start_stream,
close_stream = mic_stream.stop_stream,
get_next_frame = lambda : (
np.frombuffer(mic_stream.read(CHUNK),dtype=np.int16)
),
window_length_secs=window_length_secs,
sliding_window_secs=sliding_window_secs
) | PypiClean |
/ModelSEEDpy-freiburgermsu-0.3.1.4.tar.gz/ModelSEEDpy-freiburgermsu-0.3.1.4/modelseedpy_freiburgermsu/fbapkg/metabofbapkg.py |
from __future__ import absolute_import
import logging
from modelseedpy_freiburgermsu.fbapkg.basefbapkg import BaseFBAPkg
from optlang.symbolics import Zero
# Base class for FBA packages
class MetaboFBAPkg(BaseFBAPkg):
def __init__(self, model):
BaseFBAPkg.__init__(
self,
model,
"metabo fba",
{"met": "metabolite", "pk": "string"},
{"metc": "metabolite", "pkc": "string"},
)
self.pkgmgr.addpkgs(["SimpleThermoPkg"])
def build_package(self, parameters):
self.validate_parameters(
parameters,
["peaks"],
{
"set_objective": 1,
},
)
self.pkgmgr.getpkg("SimpleThermoPkg").build_package()
peak_hash = {}
for peak_data in self.parameters['peaks']:
peak_hash[peak_data["id"]] = peak_data
self.find_metabolites_matching_peak(peak_data)
self.build_variable(peak_data, "pk")
for met in peak_data["metabolites"]:
self.build_variable(met, "met")
self.build_constraint(met, "metc")
self.build_constraint(peak_data, "pkc")
if parameters["set_objective"] == 1:
metabolite_objective = self.model.problem.Objective(Zero, direction="max")
obj_coef = dict()
for peak_id in self.variables["pk"]:
if "wieght" in peak_hash[peak_id]:
obj_coef[self.variables["pk"][peak_id]] = peak_hash[peak_id][
"wieght"
]
else:
obj_coef[self.variables["pk"][peak_id]] = 1
self.model.objective = metabolite_objective
metabolite_objective.set_linear_coefficients(obj_coef)
def build_variable(self, cobra_obj, obj_type):
if obj_type == "met":
return BaseFBAPkg.build_variable(self, obj_type, 0, 1, "continuous", cobra_obj)
elif obj_type == "pk":
return BaseFBAPkg.build_variable(
self, obj_type, 0, 1, "continuous", cobra_obj["id"]
)
def build_constraint(self, cobra_obj, obj_type):
# TODO: need to determine coefficients
coef = {self.variables["met"][cobra_obj.id]: 1}
if obj_type == "metc":
return BaseFBAPkg.build_constraint(self, "metc", 0, 0, coef, cobra_obj)
elif obj_type == "pkc":
return BaseFBAPkg.build_constraint(self, "pkc", 0, 0, coef, cobra_obj["id"])
def find_metabolites_matching_peak(self, data):
# TODO: need to write this function
pass | PypiClean |
/2D_Panel-CFD-0.0.1.tar.gz/2D_Panel-CFD-0.0.1/src/RUN_package/UV_coeff.py | import numpy as np
def U_coeffUpwind(Uxo, Uyo, Po, Uue, Uuw, Vun, Vus, D, rho, visc, urf_UV, nx, ny, dx, dy):
Aup = np.zeros((ny, nx+1))
Aue = np.zeros((ny, nx+1))
Auw = np.zeros((ny, nx+1))
Aun = np.zeros((ny, nx+1))
Aus = np.zeros((ny, nx+1))
Bup = np.zeros((ny, nx+1))
vyx = visc*dy/dx
vxy = visc*dx/dy
for i in range (0, ny):
for j in range (0, nx+1):
if(j!=0 and j!=nx):
Aup[i, j] = rho*dy*(max(Uue[i, j], 0) + max(-Uuw[i, j], 0)) +\
rho*dx*(max(Vun[i, j], 0) + max(-Vus[i, j], 0)) +\
2*(vyx+vxy)
Aue[i, j] = rho*dy*max(-Uue[i, j], 0) + vyx
Auw[i, j] = rho*dy*max(Uuw[i, j], 0) + vyx
Aun[i, j] = rho*dx*max(-Vun[i, j], 0) + vxy
Aus[i, j] = rho*dx*max(Vus[i, j], 0) + vxy
Bup[i, j] = dy*(Po[i, j-1]-Po[i, j])
# South Boundary Outlet
if(D[0][i, j] == 2 and D[0][i+1, j] == -1):
Aup[i, j] += -vxy - rho*dx*max(Vus[i, j], 0)
Aus[i, j] = 0
# North Boundary Outlet
if(D[0][i, j] == -1 and D[0][i+1, j] == 2):
Aup[i, j] += -vxy - rho*dx*max(-Vun[i, j], 0)
Aue[i, j] = 0
# South Boundary Wall
if(D[0][i, j] == 2 and D[0][i+1, j] == 0):
Aup[i, j] += 2*vxy
Aun[i, j] += vxy/3
Aus[i, j] = 0
Bup[i, j] += vxy*8*D[1][i+1, j]/3
# North Boundary Wall
if(D[0][i, j] == 0 and D[0][i+1, j] == 2):
Aup[i, j] += 2*vxy
Aus[i, j] += vxy/3
Aun[i, j] = 0
Bup[i, j] += vxy*8*D[1][i, j]/3
# South Boundary Inlet
if(D[0][i, j] == 2 and D[0][i+1, j] == 1):
Aup[i, j] += 2*vxy + 2*rho*dx*max(Vus[i, j], 0)
Aun[i, j] += vxy/3 + rho*dx*max(Vus[i, j], 0)/3
Aus[i, j] = 0
Bup[i, j] += vxy*8*D[1][i+1, j]/3 + 8*rho*dx*D[1][i+1, j]*max(Vus[i, j], 0)/3
# North Boundary Inlet
if(D[0][i, j] == 1 and D[0][i+1, j] == 2):
Aup[i, j] += 2*vxy + 2*rho*dx*max(-Vun[i, j], 0)
Aus[i, j] += vxy/3 + rho*dx*max(-Vun[i, j], 0)/3
Aun[i, j] = 0
Bup[i, j] += vxy*8*D[1][i, j]/3 + 8*rho*dx*D[1][i, j]*max(-Vun[i, j], 0)/3
# Data Point on Boundary
if(D[0][i, j]!=2 and D[0][i+1, j]!=2):
Aup[i, j] = 1
Aue[i, j] = 0
Auw[i, j] = 0
Aun[i, j] = 0
Aus[i, j] = 0
Bup[i, j] = Uxo[i, j]
# Outlet Boundary
if(D[0][i, j]==-1 and D[0][i+1, j]==-1 and j==nx):
Aup[i, j] = 1
Aue[i, j] = 0
Auw[i, j] = 1
Aun[i, j] = 0
Aus[i, j] = 0
Bup[i, j] = 0
if(D[0][i, j]==-1 and D[0][i+1, j]==-1 and j==0):
Aup[i, j] = 1
Aue[i, j] = 1
Auw[i, j] = 0
Aun[i, j] = 0
Aus[i, j] = 0
Bup[i, j] = 0
# Under-Relaxation Factor
if(D[0][i, j]==2 or D[0][i+1, j]==2):
Aup[i, j] = Aup[i, j]/urf_UV
Bup[i, j] += (1-urf_UV)*Aup[i, j]*Uxo[i, j]
# Matrix Creation
M_Bup = Bup.flatten()
M_Au = np.zeros(((ny)*(nx+1), (ny)*(nx+1)))
ite=0
for i in range (0, ny):
for j in range (0, nx+1):
M_Au[ite, ite] = Aup[i, j]
if ((ite+1)%(nx+1)!=0):
M_Au[ite, ite+1] = -Aue[i, j]
if((ite%(nx+1)!=0) and (ite!=0)):
M_Au[ite, ite-1] = -Auw[i, j]
if (ite<(ny-1)*(nx+1)):
M_Au[ite, ite+nx+1] = -Aus[i, j]
if(ite>nx):
M_Au[ite, ite-nx-1] = -Aun[i, j]
ite+=1
return M_Au, M_Bup, Aup, Aue, Auw, Aun, Aus, Bup
def V_coeffUpwind(Uxo, Uyo, Po, Uve, Uvw, Vvn, Vvs, D, rho, visc, urf_UV, nx, ny, dx, dy):
Avp = np.zeros((ny+1, nx))
Ave = np.zeros((ny+1, nx))
Avw = np.zeros((ny+1, nx))
Avn = np.zeros((ny+1, nx))
Avs = np.zeros((ny+1, nx))
Bvp = np.zeros((ny+1, nx))
vyx = visc*dy/dx
vxy = visc*dx/dy
for i in range (0, ny+1):
for j in range (0, nx):
if(i!=0 and i!=ny):
Avp[i, j] = rho*dy*(max(Uve[i, j], 0) + max(-Uvw[i, j], 0)) +\
rho*dx*(max(Vvn[i, j], 0) + max(-Vvs[i, j], 0)) +\
2*(vxy+vyx)
Ave[i, j] = rho*dy*max(-Uve[i, j], 0) + vyx
Avw[i, j] = rho*dy*max(Uvw[i, j], 0) + vyx
Avn[i, j] = rho*dx*max(-Vvn[i, j], 0) + vxy
Avs[i, j] = rho*dx*max(Vvs[i, j], 0) + vxy
Bvp[i, j] = dx*(Po[i, j]-Po[i-1, j])
# West Boundary Outlet
if(D[0][i, j] == -1 and D[0][i, j+1] == 2):
Avp[i, j] += -vyx - rho*dy*max(Uvw[i, j], 0)
Avw[i, j] = 0
# East Boundary Outlet
if(D[0][i, j] == 2 and D[0][i, j+1] == -1):
Avp[i, j] += -vyx - rho*dy*max(-Uve[i, j], 0)
Ave[i, j] = 0
# West Boundary Wall
if(D[0][i, j] == 0 and D[0][i, j+1] == 2):
Avp[i, j] += 2*vyx
Ave[i, j] += vyx/3
Avw[i, j] = 0
Bvp[i, j] += vyx*8*D[2][i, j]/3
# East Boundary Wall
if(D[0][i, j] == 2 and D[0][i, j+1] == 0):
Avp[i, j] += 2*vyx
Avw[i, j] += vyx/3
Ave[i, j] = 0
Bvp[i, j] += vyx*8*D[2][i, j+1]/3
# West Boundary Inlet
if(D[0][i, j] == 1 and D[0][i, j+1] == 2):
Avp[i, j] += 2*vyx + 2*rho*dy*max(Uvw[i, j], 0)
Ave[i, j] += vyx/3 + rho*dy*max(Uvw[i, j], 0)/3
Avw[i, j] = 0
Bvp[i, j] += vyx*8*D[2][i, j]/3 + 8*rho*dy*D[2][i, j]*max(Uvw[i, j], 0)/3
# East Boundary Inlet
if(D[0][i, j] == 2 and D[0][i, j+1] == 1):
Avp[i, j] += 2*vyx + 2*rho*dy*max(-Uve[i, j], 0)
Avw[i, j] += vyx/3 + rho*dy*max(-Uve[i, j], 0)/3
Ave[i, j] = 0
Bvp[i, j] += vyx*8*D[2][i, j+1]/3 + 8*rho*dy*D[2][i, j+1]*max(-Uve[i, j], 0)/3
# Data Point on Boundary
if(D[0][i, j]!=2 and D[0][i, j+1]!=2):
Avp[i, j] = 1
Ave[i, j] = 0
Avw[i, j] = 0
Avn[i, j] = 0
Avs[i, j] = 0
Bvp[i, j] = Uyo[i, j]
# Outlet Boundary
if(D[0][i, j]==-1 and D[0][i, j+1]==-1 and i==0):
Avp[i, j] = 1
Ave[i, j] = 0
Avw[i, j] = 0
Avn[i, j] = 0
Avs[i, j] = 1
Bvp[i, j] = 0
if(D[0][i, j]==-1 and D[0][i, j+1]==-1 and i==ny):
Avp[i, j] = 1
Ave[i, j] = 0
Avw[i, j] = 0
Avn[i, j] = 1
Avs[i, j] = 0
Bvp[i, j] = 0
# Under-Relaxation Factor
if(D[0][i, j]==2 or D[0][i, j+1]==2):
Avp[i, j] = Avp[i, j]/urf_UV
Bvp[i, j] += (1-urf_UV)*Avp[i, j]*Uyo[i, j]
# Matrix Creation
M_Bvp = Bvp.flatten()
M_Av = np.zeros(((ny+1)*(nx), (ny+1)*(nx)))
ite=0
for i in range (0, ny+1):
for j in range (0, nx):
M_Av[ite, ite] = Avp[i, j]
if ((ite+1)%(nx)!=0):
M_Av[ite, ite+1] = -Ave[i, j]
if((ite%(nx)!=0) and (ite!=0)):
M_Av[ite, ite-1] = -Avw[i, j]
if (ite<(ny)*(nx)):
M_Av[ite, ite+nx] = -Avs[i, j]
if(ite>nx-1):
M_Av[ite, ite-nx] = -Avn[i, j]
ite+=1
return M_Av, M_Bvp, Avp, Ave, Avw, Avn, Avs, Bvp | PypiClean |
/HMF_test_folder-0.1.tar.gz/HMF_test_folder-0.1/HMF_test_folder/Gaussiandistribution.py | import math
import matplotlib.pyplot as plt
from .Generaldistribution import Distribution
class Gaussian(Distribution):
""" Gaussian distribution class for calculating and
visualizing a Gaussian distribution.
Attributes:
mean (float) representing the mean value of the distribution
stdev (float) representing the standard deviation of the distribution
data_list (list of floats) a list of floats extracted from the data file
"""
def __init__(self, mu=0, sigma=1):
Distribution.__init__(self, mu, sigma)
def calculate_mean(self):
"""Function to calculate the mean of the data set.
Args:
None
Returns:
float: mean of the data set
"""
avg = 1.0 * sum(self.data) / len(self.data)
self.mean = avg
return self.mean
def calculate_stdev(self, sample=True):
"""Function to calculate the standard deviation of the data set.
Args:
sample (bool): whether the data represents a sample or population
Returns:
float: standard deviation of the data set
"""
if sample:
n = len(self.data) - 1
else:
n = len(self.data)
mean = self.calculate_mean()
sigma = 0
for d in self.data:
sigma += (d - mean) ** 2
sigma = math.sqrt(sigma / n)
self.stdev = sigma
return self.stdev
def plot_histogram(self):
"""Function to output a histogram of the instance variable data using
matplotlib pyplot library.
Args:
None
Returns:
None
"""
plt.hist(self.data)
plt.title('Histogram of Data')
plt.xlabel('data')
plt.ylabel('count')
def pdf(self, x):
"""Probability density function calculator for the gaussian distribution.
Args:
x (float): point for calculating the probability density function
Returns:
float: probability density function output
"""
return (1.0 / (self.stdev * math.sqrt(2*math.pi))) * math.exp(-0.5*((x - self.mean) / self.stdev) ** 2)
def plot_histogram_pdf(self, n_spaces = 50):
"""Function to plot the normalized histogram of the data and a plot of the
probability density function along the same range
Args:
n_spaces (int): number of data points
Returns:
list: x values for the pdf plot
list: y values for the pdf plot
"""
mu = self.mean
sigma = self.stdev
min_range = min(self.data)
max_range = max(self.data)
# calculates the interval between x values
interval = 1.0 * (max_range - min_range) / n_spaces
x = []
y = []
# calculate the x values to visualize
for i in range(n_spaces):
tmp = min_range + interval*i
x.append(tmp)
y.append(self.pdf(tmp))
# make the plots
fig, axes = plt.subplots(2,sharex=True)
fig.subplots_adjust(hspace=.5)
axes[0].hist(self.data, density=True)
axes[0].set_title('Normed Histogram of Data')
axes[0].set_ylabel('Density')
axes[1].plot(x, y)
axes[1].set_title('Normal Distribution for \n Sample Mean and Sample Standard Deviation')
axes[0].set_ylabel('Density')
plt.show()
return x, y
def __add__(self, other):
"""Function to add together two Gaussian distributions
Args:
other (Gaussian): Gaussian instance
Returns:
Gaussian: Gaussian distribution
"""
result = Gaussian()
result.mean = self.mean + other.mean
result.stdev = math.sqrt(self.stdev ** 2 + other.stdev ** 2)
return result
def __repr__(self):
"""Function to output the characteristics of the Gaussian instance
Args:
None
Returns:
string: characteristics of the Gaussian
"""
return "mean {}, standard deviation {}".format(self.mean, self.stdev) | PypiClean |
/Electrum-CHI-3.3.8.tar.gz/Electrum-CHI-3.3.8/electrum_chi/electrum/simple_config.py | import json
import threading
import time
import os
import stat
from decimal import Decimal
from typing import Union, Optional
from numbers import Real
from copy import deepcopy
from . import util
from .util import (user_dir, make_dir,
NoDynamicFeeEstimates, format_fee_satoshis, quantize_feerate)
from .i18n import _
from .logging import get_logger, Logger
FEE_ETA_TARGETS = [25, 10, 5, 2]
FEE_DEPTH_TARGETS = [10000000, 5000000, 2000000, 1000000, 500000, 200000, 100000]
# satoshi per kbyte
FEERATE_MAX_DYNAMIC = 1500000
FEERATE_WARNING_HIGH_FEE = 600000
FEERATE_FALLBACK_STATIC_FEE = 150000
FEERATE_DEFAULT_RELAY = 1000
FEERATE_STATIC_VALUES = [1000, 2000, 5000, 10000, 20000, 30000,
50000, 70000, 100000, 150000, 200000, 300000]
config = None
_logger = get_logger(__name__)
def get_config():
global config
return config
def set_config(c):
global config
config = c
FINAL_CONFIG_VERSION = 3
class SimpleConfig(Logger):
"""
The SimpleConfig class is responsible for handling operations involving
configuration files.
There are two different sources of possible configuration values:
1. Command line options.
2. User configuration (in the user's config directory)
They are taken in order (1. overrides config options set in 2.)
"""
def __init__(self, options=None, read_user_config_function=None,
read_user_dir_function=None):
if options is None:
options = {}
Logger.__init__(self)
# This lock needs to be acquired for updating and reading the config in
# a thread-safe way.
self.lock = threading.RLock()
self.mempool_fees = {}
self.fee_estimates = {}
self.fee_estimates_last_updated = {}
self.last_time_fee_estimates_requested = 0 # zero ensures immediate fees
# The following two functions are there for dependency injection when
# testing.
if read_user_config_function is None:
read_user_config_function = read_user_config
if read_user_dir_function is None:
self.user_dir = user_dir
else:
self.user_dir = read_user_dir_function
# The command line options
self.cmdline_options = deepcopy(options)
# don't allow to be set on CLI:
self.cmdline_options.pop('config_version', None)
# Set self.path and read the user config
self.user_config = {} # for self.get in electrum_path()
self.path = self.electrum_path()
self.user_config = read_user_config_function(self.path)
if not self.user_config:
# avoid new config getting upgraded
self.user_config = {'config_version': FINAL_CONFIG_VERSION}
# config "upgrade" - CLI options
self.rename_config_keys(
self.cmdline_options, {'auto_cycle': 'auto_connect'}, True)
# config upgrade - user config
if self.requires_upgrade():
self.upgrade()
# Make a singleton instance of 'self'
set_config(self)
def electrum_path(self):
# Read electrum_path from command line
# Otherwise use the user's default data directory.
path = self.get('electrum_path')
if path is None:
path = self.user_dir()
make_dir(path, allow_symlink=False)
if self.get('testnet'):
path = os.path.join(path, 'testnet')
make_dir(path, allow_symlink=False)
elif self.get('regtest'):
path = os.path.join(path, 'regtest')
make_dir(path, allow_symlink=False)
elif self.get('simnet'):
path = os.path.join(path, 'simnet')
make_dir(path, allow_symlink=False)
self.logger.info(f"electrum-chi directory {path}")
return path
def rename_config_keys(self, config, keypairs, deprecation_warning=False):
"""Migrate old key names to new ones"""
updated = False
for old_key, new_key in keypairs.items():
if old_key in config:
if new_key not in config:
config[new_key] = config[old_key]
if deprecation_warning:
self.logger.warning('Note that the {} variable has been deprecated. '
'You should use {} instead.'.format(old_key, new_key))
del config[old_key]
updated = True
return updated
def set_key(self, key, value, save=True):
if not self.is_modifiable(key):
self.logger.warning(f"not changing config key '{key}' set on the command line")
return
try:
json.dumps(key)
json.dumps(value)
except:
self.logger.info(f"json error: cannot save {repr(key)} ({repr(value)})")
return
self._set_key_in_user_config(key, value, save)
def _set_key_in_user_config(self, key, value, save=True):
with self.lock:
if value is not None:
self.user_config[key] = value
else:
self.user_config.pop(key, None)
if save:
self.save_user_config()
def get(self, key, default=None):
with self.lock:
out = self.cmdline_options.get(key)
if out is None:
out = self.user_config.get(key, default)
return out
def requires_upgrade(self):
return self.get_config_version() < FINAL_CONFIG_VERSION
def upgrade(self):
with self.lock:
self.logger.info('upgrading config')
self.convert_version_2()
self.convert_version_3()
self.set_key('config_version', FINAL_CONFIG_VERSION, save=True)
def convert_version_2(self):
if not self._is_upgrade_method_needed(1, 1):
return
self.rename_config_keys(self.user_config, {'auto_cycle': 'auto_connect'})
try:
# change server string FROM host:port:proto TO host:port:s
server_str = self.user_config.get('server')
host, port, protocol = str(server_str).rsplit(':', 2)
assert protocol in ('s', 't')
int(port) # Throw if cannot be converted to int
server_str = '{}:{}:s'.format(host, port)
self._set_key_in_user_config('server', server_str)
except BaseException:
self._set_key_in_user_config('server', None)
self.set_key('config_version', 2)
def convert_version_3(self):
if not self._is_upgrade_method_needed(2, 2):
return
base_unit = self.user_config.get('base_unit')
if isinstance(base_unit, str):
self._set_key_in_user_config('base_unit', None)
map_ = {'chi':8, 'mchi':5, 'uchi':2, 'bits':2, 'satoshi':0}
decimal_point = map_.get(base_unit.lower())
self._set_key_in_user_config('decimal_point', decimal_point)
self.set_key('config_version', 3)
def _is_upgrade_method_needed(self, min_version, max_version):
cur_version = self.get_config_version()
if cur_version > max_version:
return False
elif cur_version < min_version:
raise Exception(
('config upgrade: unexpected version %d (should be %d-%d)'
% (cur_version, min_version, max_version)))
else:
return True
def get_config_version(self):
config_version = self.get('config_version', 1)
if config_version > FINAL_CONFIG_VERSION:
self.logger.warning('config version ({}) is higher than latest ({})'
.format(config_version, FINAL_CONFIG_VERSION))
return config_version
def is_modifiable(self, key):
return key not in self.cmdline_options
def save_user_config(self):
if not self.path:
return
path = os.path.join(self.path, "config")
s = json.dumps(self.user_config, indent=4, sort_keys=True)
try:
with open(path, "w", encoding='utf-8') as f:
f.write(s)
os.chmod(path, stat.S_IREAD | stat.S_IWRITE)
except FileNotFoundError:
# datadir probably deleted while running...
if os.path.exists(self.path): # or maybe not?
raise
def get_wallet_path(self):
"""Set the path of the wallet."""
# command line -w option
if self.get('wallet_path'):
return os.path.join(self.get('cwd', ''), self.get('wallet_path'))
# path in config file
path = self.get('default_wallet_path')
if path and os.path.exists(path):
return path
# default path
util.assert_datadir_available(self.path)
dirpath = os.path.join(self.path, "wallets")
make_dir(dirpath, allow_symlink=False)
new_path = os.path.join(self.path, "wallets", "default_wallet")
# default path in pre 1.9 versions
old_path = os.path.join(self.path, "electrum-chi.dat")
if os.path.exists(old_path) and not os.path.exists(new_path):
os.rename(old_path, new_path)
return new_path
def remove_from_recently_open(self, filename):
recent = self.get('recently_open', [])
if filename in recent:
recent.remove(filename)
self.set_key('recently_open', recent)
def set_session_timeout(self, seconds):
self.logger.info(f"session timeout -> {seconds} seconds")
self.set_key('session_timeout', seconds)
def get_session_timeout(self):
return self.get('session_timeout', 300)
def open_last_wallet(self):
if self.get('wallet_path') is None:
last_wallet = self.get('gui_last_wallet')
if last_wallet is not None and os.path.exists(last_wallet):
self.cmdline_options['default_wallet_path'] = last_wallet
def save_last_wallet(self, wallet):
if self.get('wallet_path') is None:
path = wallet.storage.path
self.set_key('gui_last_wallet', path)
def impose_hard_limits_on_fee(func):
def get_fee_within_limits(self, *args, **kwargs):
fee = func(self, *args, **kwargs)
if fee is None:
return fee
fee = min(FEERATE_MAX_DYNAMIC, fee)
fee = max(FEERATE_DEFAULT_RELAY, fee)
return fee
return get_fee_within_limits
def eta_to_fee(self, slider_pos) -> Optional[int]:
"""Returns fee in sat/kbyte."""
slider_pos = max(slider_pos, 0)
slider_pos = min(slider_pos, len(FEE_ETA_TARGETS))
if slider_pos < len(FEE_ETA_TARGETS):
num_blocks = FEE_ETA_TARGETS[slider_pos]
fee = self.eta_target_to_fee(num_blocks)
else:
fee = self.eta_target_to_fee(1)
return fee
@impose_hard_limits_on_fee
def eta_target_to_fee(self, num_blocks: int) -> Optional[int]:
"""Returns fee in sat/kbyte."""
if num_blocks == 1:
fee = self.fee_estimates.get(2)
if fee is not None:
fee += fee / 2
fee = int(fee)
else:
fee = self.fee_estimates.get(num_blocks)
return fee
def fee_to_depth(self, target_fee: Real) -> int:
"""For a given sat/vbyte fee, returns an estimate of how deep
it would be in the current mempool in vbytes.
Pessimistic == overestimates the depth.
"""
depth = 0
for fee, s in self.mempool_fees:
depth += s
if fee <= target_fee:
break
return depth
def depth_to_fee(self, slider_pos) -> int:
"""Returns fee in sat/kbyte."""
target = self.depth_target(slider_pos)
return self.depth_target_to_fee(target)
@impose_hard_limits_on_fee
def depth_target_to_fee(self, target: int) -> int:
"""Returns fee in sat/kbyte.
target: desired mempool depth in vbytes
"""
depth = 0
for fee, s in self.mempool_fees:
depth += s
if depth > target:
break
else:
return 0
# add one sat/byte as currently that is
# the max precision of the histogram
fee += 1
# convert to sat/kbyte
return fee * 1000
def depth_target(self, slider_pos):
slider_pos = max(slider_pos, 0)
slider_pos = min(slider_pos, len(FEE_DEPTH_TARGETS)-1)
return FEE_DEPTH_TARGETS[slider_pos]
def eta_target(self, i):
if i == len(FEE_ETA_TARGETS):
return 1
return FEE_ETA_TARGETS[i]
def fee_to_eta(self, fee_per_kb):
import operator
l = list(self.fee_estimates.items()) + [(1, self.eta_to_fee(4))]
dist = map(lambda x: (x[0], abs(x[1] - fee_per_kb)), l)
min_target, min_value = min(dist, key=operator.itemgetter(1))
if fee_per_kb < self.fee_estimates.get(25)/2:
min_target = -1
return min_target
def depth_tooltip(self, depth):
return "%.1f MB from tip"%(depth/1000000)
def eta_tooltip(self, x):
if x < 0:
return _('Low fee')
elif x == 1:
return _('In the next block')
else:
return _('Within {} blocks').format(x)
def get_fee_status(self):
dyn = self.is_dynfee()
mempool = self.use_mempool_fees()
pos = self.get_depth_level() if mempool else self.get_fee_level()
fee_rate = self.fee_per_kb()
target, tooltip = self.get_fee_text(pos, dyn, mempool, fee_rate)
return tooltip + ' [%s]'%target if dyn else target + ' [Static]'
def get_fee_text(self, pos, dyn, mempool, fee_rate):
"""Returns (text, tooltip) where
text is what we target: static fee / num blocks to confirm in / mempool depth
tooltip is the corresponding estimate (e.g. num blocks for a static fee)
fee_rate is in sat/kbyte
"""
if fee_rate is None:
rate_str = 'unknown'
else:
fee_rate = fee_rate/1000
rate_str = format_fee_satoshis(fee_rate) + ' swartz/byte'
if dyn:
if mempool:
depth = self.depth_target(pos)
text = self.depth_tooltip(depth)
else:
eta = self.eta_target(pos)
text = self.eta_tooltip(eta)
tooltip = rate_str
else:
text = rate_str
if mempool and self.has_fee_mempool():
depth = self.fee_to_depth(fee_rate)
tooltip = self.depth_tooltip(depth)
elif not mempool and self.has_fee_etas():
eta = self.fee_to_eta(fee_rate)
tooltip = self.eta_tooltip(eta)
else:
tooltip = ''
return text, tooltip
def get_depth_level(self):
maxp = len(FEE_DEPTH_TARGETS) - 1
return min(maxp, self.get('depth_level', 2))
def get_fee_level(self):
maxp = len(FEE_ETA_TARGETS) # not (-1) to have "next block"
return min(maxp, self.get('fee_level', 2))
def get_fee_slider(self, dyn, mempool):
if dyn:
if mempool:
pos = self.get_depth_level()
maxp = len(FEE_DEPTH_TARGETS) - 1
fee_rate = self.depth_to_fee(pos)
else:
pos = self.get_fee_level()
maxp = len(FEE_ETA_TARGETS) # not (-1) to have "next block"
fee_rate = self.eta_to_fee(pos)
else:
fee_rate = self.fee_per_kb(dyn=False)
pos = self.static_fee_index(fee_rate)
maxp = len(FEERATE_STATIC_VALUES) - 1
return maxp, pos, fee_rate
def static_fee(self, i):
return FEERATE_STATIC_VALUES[i]
def static_fee_index(self, value):
if value is None:
raise TypeError('static fee cannot be None')
dist = list(map(lambda x: abs(x - value), FEERATE_STATIC_VALUES))
return min(range(len(dist)), key=dist.__getitem__)
def has_fee_etas(self):
return len(self.fee_estimates) == 4
def has_fee_mempool(self):
return bool(self.mempool_fees)
def has_dynamic_fees_ready(self):
if self.use_mempool_fees():
return self.has_fee_mempool()
else:
return self.has_fee_etas()
def is_dynfee(self):
return bool(self.get('dynamic_fees', True))
def use_mempool_fees(self):
return bool(self.get('mempool_fees', False))
def _feerate_from_fractional_slider_position(self, fee_level: float, dyn: bool,
mempool: bool) -> Union[int, None]:
fee_level = max(fee_level, 0)
fee_level = min(fee_level, 1)
if dyn:
max_pos = (len(FEE_DEPTH_TARGETS) - 1) if mempool else len(FEE_ETA_TARGETS)
slider_pos = round(fee_level * max_pos)
fee_rate = self.depth_to_fee(slider_pos) if mempool else self.eta_to_fee(slider_pos)
else:
max_pos = len(FEERATE_STATIC_VALUES) - 1
slider_pos = round(fee_level * max_pos)
fee_rate = FEERATE_STATIC_VALUES[slider_pos]
return fee_rate
def fee_per_kb(self, dyn: bool=None, mempool: bool=None, fee_level: float=None) -> Union[int, None]:
"""Returns sat/kvB fee to pay for a txn.
Note: might return None.
fee_level: float between 0.0 and 1.0, representing fee slider position
"""
if dyn is None:
dyn = self.is_dynfee()
if mempool is None:
mempool = self.use_mempool_fees()
if fee_level is not None:
return self._feerate_from_fractional_slider_position(fee_level, dyn, mempool)
# there is no fee_level specified; will use config.
# note: 'depth_level' and 'fee_level' in config are integer slider positions,
# unlike fee_level here, which (when given) is a float in [0.0, 1.0]
if dyn:
if mempool:
fee_rate = self.depth_to_fee(self.get_depth_level())
else:
fee_rate = self.eta_to_fee(self.get_fee_level())
else:
fee_rate = self.get('fee_per_kb', FEERATE_FALLBACK_STATIC_FEE)
return fee_rate
def fee_per_byte(self):
"""Returns sat/vB fee to pay for a txn.
Note: might return None.
"""
fee_per_kb = self.fee_per_kb()
return fee_per_kb / 1000 if fee_per_kb is not None else None
def estimate_fee(self, size: Union[int, float, Decimal]) -> int:
fee_per_kb = self.fee_per_kb()
if fee_per_kb is None:
raise NoDynamicFeeEstimates()
return self.estimate_fee_for_feerate(fee_per_kb, size)
@classmethod
def estimate_fee_for_feerate(cls, fee_per_kb: Union[int, float, Decimal],
size: Union[int, float, Decimal]) -> int:
size = Decimal(size)
fee_per_kb = Decimal(fee_per_kb)
fee_per_byte = fee_per_kb / 1000
# to be consistent with what is displayed in the GUI,
# the calculation needs to use the same precision:
fee_per_byte = quantize_feerate(fee_per_byte)
return round(fee_per_byte * size)
def update_fee_estimates(self, key, value):
self.fee_estimates[key] = value
self.fee_estimates_last_updated[key] = time.time()
def is_fee_estimates_update_required(self):
"""Checks time since last requested and updated fee estimates.
Returns True if an update should be requested.
"""
now = time.time()
return now - self.last_time_fee_estimates_requested > 60
def requested_fee_estimates(self):
self.last_time_fee_estimates_requested = time.time()
def get_video_device(self):
device = self.get("video_device", "default")
if device == 'default':
device = ''
return device
def read_user_config(path):
"""Parse and store the user config settings in electrum-chi.conf into user_config[]."""
if not path:
return {}
config_path = os.path.join(path, "config")
if not os.path.exists(config_path):
return {}
try:
with open(config_path, "r", encoding='utf-8') as f:
data = f.read()
result = json.loads(data)
except:
_logger.warning(f"Cannot read config file. {config_path}")
return {}
if not type(result) is dict:
return {}
return result | PypiClean |
/Automated%20Lazy%20Unit%20Testing%20in%20Python-1.0.0.tar.gz/Automated Lazy Unit Testing in Python-1.0.0/fyp/mypkg/constants.py | import imp
import inspect
import byteplay
from blessings import Terminal
###
module_types = {
imp.PY_SOURCE: 'source',
imp.PY_COMPILED: 'compiled',
imp.C_EXTENSION: 'extension',
imp.PY_RESOURCE: 'resource',
imp.PKG_DIRECTORY: 'pkg_path',
}
inspect_types = {
'class': inspect.isclass,
'function': inspect.isfunction,
# 'generator_fn': inspect.isgeneratorfunction,
# 'method': inspect.ismethod,
# 'builtin': inspect.isbuiltin,
# 'routine': inspect.isroutine,
# 'module': inspect.ismodule,
# 'abstract': inspect.isabstract,
# 'frame': inspect.isframe,
# 'code': inspect.iscode,
# 'generator': inspect.isgenerator,
# 'tb': inspect.istraceback,
# 'data_descriptor': inspect.isdatadescriptor,
# 'method_descriptor': inspect.ismethoddescriptor,
# 'getset_descriptor': inspect.isgetsetdescriptor,
# 'member_descriptor': inspect.ismemberdescriptor,
}
###
reserved_stores = [
byteplay.STORE_ATTR,
byteplay.STORE_FAST,
byteplay.STORE_MAP,
byteplay.STORE_SLICE_0,
byteplay.STORE_SLICE_1,
byteplay.STORE_SLICE_2,
byteplay.STORE_SLICE_3,
byteplay.STORE_SUBSCR,
byteplay.STORE_DEREF,
byteplay.STORE_GLOBAL,
byteplay.STORE_NAME,
]
LOAD_SLICE = 'LOAD_SLICE'
LOAD_OBJ_FN = 'LOAD_OBJ_FN'
LOAD_LIST = 'LOAD_LIST'
reserved_loads = [
byteplay.LOAD_ATTR,
byteplay.LOAD_CLOSURE,
byteplay.LOAD_CONST,
byteplay.LOAD_DEREF,
byteplay.LOAD_FAST,
byteplay.LOAD_GLOBAL,
byteplay.LOAD_LOCALS,
byteplay.LOAD_NAME,
LOAD_SLICE, # custom
LOAD_OBJ_FN,
LOAD_LIST,
]
reserved_binary = [
byteplay.BINARY_POWER,
byteplay.BINARY_MULTIPLY,
byteplay.BINARY_DIVIDE,
byteplay.BINARY_MODULO,
byteplay.BINARY_ADD,
byteplay.BINARY_SUBTRACT,
byteplay.BINARY_SUBSCR,
byteplay.BINARY_FLOOR_DIVIDE,
byteplay.BINARY_TRUE_DIVIDE,
byteplay.BINARY_LSHIFT,
byteplay.BINARY_RSHIFT,
byteplay.BINARY_AND,
byteplay.BINARY_XOR,
byteplay.BINARY_OR,
]
reserved_slices = {
byteplay.SLICE_0: 0,
byteplay.SLICE_1: 1,
byteplay.SLICE_2: 1,
byteplay.SLICE_3: 2,
byteplay.BUILD_SLICE: None,
}
###
reserved_rel = [
byteplay.FOR_ITER,
byteplay.JUMP_FORWARD,
byteplay.SETUP_LOOP,
byteplay.SETUP_EXCEPT,
byteplay.SETUP_FINALLY,
byteplay.SETUP_WITH,
]
reserved_abs = [
byteplay.JUMP_IF_FALSE_OR_POP,
byteplay.JUMP_IF_TRUE_OR_POP,
byteplay.JUMP_ABSOLUTE,
byteplay.POP_JUMP_IF_FALSE,
byteplay.POP_JUMP_IF_TRUE,
byteplay.CONTINUE_LOOP,
]
reserved_loads = [
byteplay.LOAD_ATTR,
byteplay.LOAD_CLOSURE,
byteplay.LOAD_CONST,
byteplay.LOAD_DEREF,
byteplay.LOAD_FAST,
byteplay.LOAD_GLOBAL,
byteplay.LOAD_LOCALS,
byteplay.LOAD_NAME,
]
reserved_try = [
byteplay.POP_BLOCK,
]
###
graph_node_colors = {
'PINK': "#EE82EE",
'LIGHT_BLUE': "#87CEFA",
'GREEN': "#00FF7F",
'ORANGE': "#F4A460",
}
class ClassType: OLD, NEW = range(2)
MAX_ITERATIONS = 2**10
def f_noarg(): return # Mock parameters
def f_varg(*args, **kwargs): return
PARAM_VALUE_SEQ = [ None, 0, 0.0, '', (), [], {}, f_noarg, f_varg ]
PRIMITIVE_TYPES \
= (int, float, long, complex, basestring, bool, tuple, list, dict)
is_primitive = lambda var: isinstance(var, PRIMITIVE_TYPES)
op_arithmetic = ['+','-','*','/','%','**','//']
op_cmp = ['==','!=','<>','>','<','>=','<=']
op_assign = ['=','+=','-=','*=','/=','%=','**=','//=']
op_bitwise = ['&','|','^','~','<<','>>']
op_membership = ['in','not in']
op_id = ['is','is not'] | PypiClean |
/FSlash-0.17.0.tar.gz/FSlash-0.17.0/fslash/core/compose.py | from functools import reduce
from typing import Callable, Any, TypeVar, overload
A = TypeVar("A")
B = TypeVar("B")
C = TypeVar("C")
D = TypeVar("D")
E = TypeVar("E")
F = TypeVar("F")
G = TypeVar("G")
H = TypeVar("H")
@overload
def compose() -> Callable[[A], A]:
...
@overload
def compose(__fn1: Callable[[A], B]) -> Callable[[A], B]:
...
@overload
def compose(__fn1: Callable[[A], B], __fn2: Callable[[B], C]) -> Callable[[A], C]:
...
@overload
def compose(__fn1: Callable[[A], B], __fn2: Callable[[B], C], __fn3: Callable[[C], D]) -> Callable[[A], D]:
...
@overload
def compose(
__fn1: Callable[[A], B],
__fn2: Callable[[B], C],
__fn3: Callable[[C], D],
__fn4: Callable[[D], E],
) -> Callable[[A], E]:
...
@overload
def compose(
__fn1: Callable[[A], B],
__fn2: Callable[[B], C],
__fn3: Callable[[C], D],
__fn4: Callable[[D], E],
__fn5: Callable[[E], F],
) -> Callable[[A], F]:
...
@overload
def compose(
__fn1: Callable[[A], B],
__fn2: Callable[[B], C],
__fn3: Callable[[C], D],
__fn4: Callable[[D], E],
__fn5: Callable[[E], F],
__fn6: Callable[[F], G],
) -> Callable[[A], G]:
...
@overload
def compose(
__fn1: Callable[[A], B],
__fn2: Callable[[B], C],
__fn3: Callable[[C], D],
__fn4: Callable[[D], E],
__fn5: Callable[[E], F],
__fn6: Callable[[F], G],
__fn7: Callable[[G], H],
) -> Callable[[A], H]:
...
def compose(*fns: Callable[[Any], Any]) -> Callable[[Any], Any]:
"""Compose multiple functions left to right.
Composes zero or more functions into a functional composition. The
functions are composed left to right. A composition of zero
functions gives back the identity function.
>>> compose()(x) == x
>>> compose(f)(x) == f(x)
>>> compose(f, g)(x) == g(f(x))
>>> compose(f, g, h)(x) == h(g(f(x)))
...
Returns:
The composed function.
"""
def compose(source: Any) -> Any:
"""Return a pipeline of composed functions."""
return reduce(lambda acc, f: f(acc), fns, source)
return compose
__all__ = ["compose"] | PypiClean |
/Flask-Statics-Helper-1.0.0.tar.gz/Flask-Statics-Helper-1.0.0/flask_statics/static/angular/i18n/angular-locale_pl.js | 'use strict';
angular.module("ngLocale", [], ["$provide", function($provide) {
var PLURAL_CATEGORY = {ZERO: "zero", ONE: "one", TWO: "two", FEW: "few", MANY: "many", OTHER: "other"};
function getDecimals(n) {
n = n + '';
var i = n.indexOf('.');
return (i == -1) ? 0 : n.length - i - 1;
}
function getVF(n, opt_precision) {
var v = opt_precision;
if (undefined === v) {
v = Math.min(getDecimals(n), 3);
}
var base = Math.pow(10, v);
var f = ((n * base) | 0) % base;
return {v: v, f: f};
}
$provide.value("$locale", {
"DATETIME_FORMATS": {
"AMPMS": [
"AM",
"PM"
],
"DAY": [
"niedziela",
"poniedzia\u0142ek",
"wtorek",
"\u015broda",
"czwartek",
"pi\u0105tek",
"sobota"
],
"MONTH": [
"stycznia",
"lutego",
"marca",
"kwietnia",
"maja",
"czerwca",
"lipca",
"sierpnia",
"wrze\u015bnia",
"pa\u017adziernika",
"listopada",
"grudnia"
],
"SHORTDAY": [
"niedz.",
"pon.",
"wt.",
"\u015br.",
"czw.",
"pt.",
"sob."
],
"SHORTMONTH": [
"sty",
"lut",
"mar",
"kwi",
"maj",
"cze",
"lip",
"sie",
"wrz",
"pa\u017a",
"lis",
"gru"
],
"fullDate": "EEEE, d MMMM y",
"longDate": "d MMMM y",
"medium": "d MMM y HH:mm:ss",
"mediumDate": "d MMM y",
"mediumTime": "HH:mm:ss",
"short": "dd.MM.y HH:mm",
"shortDate": "dd.MM.y",
"shortTime": "HH:mm"
},
"NUMBER_FORMATS": {
"CURRENCY_SYM": "z\u0142",
"DECIMAL_SEP": ",",
"GROUP_SEP": "\u00a0",
"PATTERNS": [
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 3,
"minFrac": 0,
"minInt": 1,
"negPre": "-",
"negSuf": "",
"posPre": "",
"posSuf": ""
},
{
"gSize": 3,
"lgSize": 3,
"maxFrac": 2,
"minFrac": 2,
"minInt": 1,
"negPre": "-",
"negSuf": "\u00a0\u00a4",
"posPre": "",
"posSuf": "\u00a0\u00a4"
}
]
},
"id": "pl",
"pluralCat": function(n, opt_precision) { var i = n | 0; var vf = getVF(n, opt_precision); if (i == 1 && vf.v == 0) { return PLURAL_CATEGORY.ONE; } if (vf.v == 0 && i % 10 >= 2 && i % 10 <= 4 && (i % 100 < 12 || i % 100 > 14)) { return PLURAL_CATEGORY.FEW; } if (vf.v == 0 && i != 1 && i % 10 >= 0 && i % 10 <= 1 || vf.v == 0 && i % 10 >= 5 && i % 10 <= 9 || vf.v == 0 && i % 100 >= 12 && i % 100 <= 14) { return PLURAL_CATEGORY.MANY; } return PLURAL_CATEGORY.OTHER;}
});
}]); | PypiClean |
/Flask-MDEditor-0.1.4.tar.gz/Flask-MDEditor-0.1.4/flask_mdeditor/static/mdeditor/js/lib/codemirror/mode/rpm/rpm.js |
(function(mod) {
if (typeof exports == "object" && typeof module == "object") // CommonJS
mod(require("../../lib/codemirror"));
else if (typeof define == "function" && define.amd) // AMD
define(["../../lib/codemirror"], mod);
else // Plain browser env
mod(CodeMirror);
})(function(CodeMirror) {
"use strict";
CodeMirror.defineMode("rpm-changes", function() {
var headerSeperator = /^-+$/;
var headerLine = /^(Mon|Tue|Wed|Thu|Fri|Sat|Sun) (Jan|Feb|Mar|Apr|May|Jun|Jul|Aug|Sep|Oct|Nov|Dec) ?\d{1,2} \d{2}:\d{2}(:\d{2})? [A-Z]{3,4} \d{4} - /;
var simpleEmail = /^[\w+.-]+@[\w.-]+/;
return {
token: function(stream) {
if (stream.sol()) {
if (stream.match(headerSeperator)) { return 'tag'; }
if (stream.match(headerLine)) { return 'tag'; }
}
if (stream.match(simpleEmail)) { return 'string'; }
stream.next();
return null;
}
};
});
CodeMirror.defineMIME("text/x-rpm-changes", "rpm-changes");
// Quick and dirty spec file highlighting
CodeMirror.defineMode("rpm-spec", function() {
var arch = /^(i386|i586|i686|x86_64|ppc64|ppc|ia64|s390x|s390|sparc64|sparcv9|sparc|noarch|alphaev6|alpha|hppa|mipsel)/;
var preamble = /^(Name|Version|Release|License|Summary|Url|Group|Source|BuildArch|BuildRequires|BuildRoot|AutoReqProv|Provides|Requires(\(\w+\))?|Obsoletes|Conflicts|Recommends|Source\d*|Patch\d*|ExclusiveArch|NoSource|Supplements):/;
var section = /^%(debug_package|package|description|prep|build|install|files|clean|changelog|preinstall|preun|postinstall|postun|pre|post|triggerin|triggerun|pretrans|posttrans|verifyscript|check|triggerpostun|triggerprein|trigger)/;
var control_flow_complex = /^%(ifnarch|ifarch|if)/; // rpm control flow macros
var control_flow_simple = /^%(else|endif)/; // rpm control flow macros
var operators = /^(\!|\?|\<\=|\<|\>\=|\>|\=\=|\&\&|\|\|)/; // operators in control flow macros
return {
startState: function () {
return {
controlFlow: false,
macroParameters: false,
section: false
};
},
token: function (stream, state) {
var ch = stream.peek();
if (ch == "#") { stream.skipToEnd(); return "comment"; }
if (stream.sol()) {
if (stream.match(preamble)) { return "preamble"; }
if (stream.match(section)) { return "section"; }
}
if (stream.match(/^\$\w+/)) { return "def"; } // Variables like '$RPM_BUILD_ROOT'
if (stream.match(/^\$\{\w+\}/)) { return "def"; } // Variables like '${RPM_BUILD_ROOT}'
if (stream.match(control_flow_simple)) { return "keyword"; }
if (stream.match(control_flow_complex)) {
state.controlFlow = true;
return "keyword";
}
if (state.controlFlow) {
if (stream.match(operators)) { return "operator"; }
if (stream.match(/^(\d+)/)) { return "number"; }
if (stream.eol()) { state.controlFlow = false; }
}
if (stream.match(arch)) { return "number"; }
// Macros like '%make_install' or '%attr(0775,root,root)'
if (stream.match(/^%[\w]+/)) {
if (stream.match(/^\(/)) { state.macroParameters = true; }
return "macro";
}
if (state.macroParameters) {
if (stream.match(/^\d+/)) { return "number";}
if (stream.match(/^\)/)) {
state.macroParameters = false;
return "macro";
}
}
if (stream.match(/^%\{\??[\w \-]+\}/)) { return "macro"; } // Macros like '%{defined fedora}'
//TODO: Include bash script sub-parser (CodeMirror supports that)
stream.next();
return null;
}
};
});
CodeMirror.defineMIME("text/x-rpm-spec", "rpm-spec");
}); | PypiClean |
/ImSwitchUC2-2.1.0.tar.gz/ImSwitchUC2-2.1.0/imswitch/imcontrol/model/interfaces/hamamatsu.py |
import ctypes
import ctypes.util
import numpy as np
from imswitch.imcommon.model import initLogger
# Hamamatsu constants.
DCAMCAP_EVENT_FRAMEREADY = int("0x0002", 0)
# DCAM3 API.
DCAMERR_ERROR = 0
DCAMERR_NOERROR = 1
DCAMPROP_ATTR_HASVALUETEXT = int("0x10000000", 0)
DCAMPROP_ATTR_READABLE = int("0x00010000", 0)
DCAMPROP_ATTR_WRITABLE = int("0x00020000", 0)
DCAMPROP_OPTION_NEAREST = int("0x80000000", 0)
DCAMPROP_OPTION_NEXT = int("0x01000000", 0)
DCAMPROP_OPTION_SUPPORT = int("0x00000000", 0)
DCAMPROP_TYPE_MODE = int("0x00000001", 0)
DCAMPROP_TYPE_LONG = int("0x00000002", 0)
DCAMPROP_TYPE_REAL = int("0x00000003", 0)
DCAMPROP_TYPE_MASK = int("0x0000000F", 0)
DCAMWAIT_TIMEOUT_INFINITE = int("0x80000000", 0)
DCAM_CAPTUREMODE_SNAP = 0
DCAM_CAPTUREMODE_SEQUENCE = 1
DCAM_DEFAULT_ARG = 0
DCAM_IDPROP_EXPOSURETIME = int("0x001F0110", 0)
DCAM_IDSTR_MODEL = int("0x04000104", 0)
# Hamamatsu structures.
# ## DCAM_PARAM_PROPERTYATTR
#
# The dcam property attribute structure.
#
class DCAM_PARAM_PROPERTYATTR(ctypes.Structure):
_fields_ = [("cbSize", ctypes.c_int32),
("iProp", ctypes.c_int32),
("option", ctypes.c_int32),
("iReserved1", ctypes.c_int32),
("attribute", ctypes.c_int32),
("iGroup", ctypes.c_int32),
("iUnit", ctypes.c_int32),
("attribute2", ctypes.c_int32),
("valuemin", ctypes.c_double),
("valuemax", ctypes.c_double),
("valuestep", ctypes.c_double),
("valuedefault", ctypes.c_double),
("nMaxChannel", ctypes.c_int32),
("iReserved3", ctypes.c_int32),
("nMaxView", ctypes.c_int32),
("iProp_NumberOfElement", ctypes.c_int32),
("iProp_ArrayBase", ctypes.c_int32),
("iPropStep_Element", ctypes.c_int32)]
# ## DCAM_PARAM_PROPERTYVALUETEXT
#
# The dcam text property structure.
#
class DCAM_PARAM_PROPERTYVALUETEXT(ctypes.Structure):
_fields_ = [("cbSize", ctypes.c_int32),
("iProp", ctypes.c_int32),
("value", ctypes.c_double),
("text", ctypes.c_char_p),
("textbytes", ctypes.c_int32)]
# ## convertPropertyName
#
# "Regularizes" a property name. We are using all lowercase names with
# the spaces replaced by underscores.
#
# @param p_name The property name string to regularize.
#
# @return The regularized property name.DCAMException
#
def convertPropertyName(p_name):
return p_name.lower().decode("utf-8").replace(" ", "_")
# ## DCAMException
#
# Camera exceptions.
#
class DCAMException(Exception):
def __init__(self, message):
Exception.__init__(self, message)
#
# Initialization
#
dcam = None
n_cameras = -1
def initDcam():
global dcam
global n_cameras
if dcam is not None:
return
dcam = ctypes.windll.dcamapi
temp = ctypes.c_int32(0)
if (dcam.dcam_init(None, ctypes.byref(temp), None) != DCAMERR_NOERROR):
raise DCAMException("DCAM initialization failed.")
n_cameras = temp.value
# ## HCamData
#
# Hamamatsu camera data object.
#
# Initially I tried to use create_string_buffer() to allocate storage for the
# data from the camera but this turned out to be too slow. The software
# kept falling behind the camera and create_string_buffer() seemed to be the
# bottleneck.
#
class HCamData:
# ## __init__
#
# Create a data object of the appropriate size.
#
# @param size The size of the data object in bytes.
#
def __init__(self, size):
self.np_array = np.ascontiguousarray(np.empty(int(size / 2), dtype=np.uint16))
self.size = size
# ## __getitem__
#
# @param slice The slice of the item to get.
#
def __getitem__(self, slice):
return self.np_array[slice]
# ## copyData
#
# Uses the C memmove function to copy data from an address in memory
# into memory allocated for the numpy array of this object.
#
# @param address The memory address of the data to copy.
#
def copyData(self, address):
ctypes.memmove(self.np_array.ctypes.data, address, self.size)
# ## getData
#
# @return A numpy array that contains the camera data.
#
def getData(self):
return self.np_array
# ## getDataPtr
#
# @return The physical address in memory of the data.
#
def getDataPtr(self):
return self.np_array.ctypes.data
# ## HamamatsuCamera
#
# Basic camera interface class.
#
# This version uses the Hamamatsu library to allocate camera buffers.
# Storage for the data from the camera is allocated dynamically and
# copied out of the camera buffers.
#
class HamamatsuCamera:
# ## __init__
#
# Open the connection to the camera specified by camera_id.
#
# @param camera_id The id of the camera (an integer).
#
def __init__(self, camera_id):
self._logger = initLogger(self, tryInheritParent=True)
initDcam()
self.buffer_index = 0
self.camera_id = camera_id
self.camera_model = self.getModelInfo(camera_id)
self.debug = False
self.frame_bytes = 0
self.frame_x = 0
self.frame_y = 0
self.last_frame_number = 0
self.properties = {}
self.max_backlog = 0
self.number_image_buffers = 0
# Open the camera.
self.camera_handle = ctypes.c_void_p(0)
self.checkStatus(dcam.dcam_open(ctypes.byref(self.camera_handle),
ctypes.c_int32(self.camera_id),
None),
"dcam_open")
# Get camera properties.
self.properties = self.getCameraProperties()
# Get camera max width, height.
self.max_width = self.getPropertyValue("image_width")[0]
self.max_height = self.getPropertyValue("image_height")[0]
# ## captureSetup
#
# Capture setup (internal use only). This is called at the start
# of new acquisition sequence to determine the current ROI and
# get the camera configured properly.
#
def captureSetup(self):
self.buffer_index = -1
self.last_frame_number = 0
# Set sub array mode.
self.setSubArrayMode()
# Get frame properties.
self.frame_x = self.getPropertyValue("image_width")[0]
self.frame_y = self.getPropertyValue("image_height")[0]
self.frame_bytes = self.getPropertyValue("image_framebytes")[0]
# Set capture mode.
self.checkStatus(dcam.dcam_precapture(self.camera_handle,
ctypes.c_int(DCAM_CAPTUREMODE_SEQUENCE)),
"dcam_precapture")
# ## checkStatus
#
# Check return value of the dcam function call.
# Throw an error if not as expected?
#
# @return The return value of the function.
#
def checkStatus(self, fn_return, fn_name="unknown"):
# if (fn_return != DCAMERR_NOERROR) and (fn_return != DCAMERR_ERROR):
# raise DCAMException("dcam error: " + fn_name + " returned " + str(fn_return))
if (fn_return == DCAMERR_ERROR):
c_buf_len = 80
c_buf = ctypes.create_string_buffer(c_buf_len)
dcam.dcam_getlasterror(self.camera_handle,
c_buf,
ctypes.c_int32(c_buf_len))
raise DCAMException("dcam error " + str(fn_name) + " " + str(c_buf.value))
# print "dcam error", fn_name, c_buf.value
return fn_return
# ## getCameraProperties
#
# Return the ids & names of all the properties that the camera supports. This
# is used at initialization to populate the self.properties attribute.
#
# @return A python dictionary of camera properties.
#
def getCameraProperties(self):
c_buf_len = 64
c_buf = ctypes.create_string_buffer(c_buf_len)
properties = {}
prop_id = ctypes.c_int32(0)
# Reset to the start.
ret = dcam.dcam_getnextpropertyid(self.camera_handle,
ctypes.byref(prop_id),
ctypes.c_int32(DCAMPROP_OPTION_NEAREST))
if (ret != 0) and (ret != DCAMERR_NOERROR):
self.checkStatus(ret, "dcam_getnextpropertyid")
# Get the first property.
ret = dcam.dcam_getnextpropertyid(self.camera_handle,
ctypes.byref(prop_id),
ctypes.c_int32(DCAMPROP_OPTION_NEXT))
if (ret != 0) and (ret != DCAMERR_NOERROR):
self.checkStatus(ret, "dcam_getnextpropertyid")
self.checkStatus(dcam.dcam_getpropertyname(self.camera_handle,
prop_id,
c_buf,
ctypes.c_int32(c_buf_len)),
"dcam_getpropertyname")
# Get the rest of the properties.
last = -1
while (prop_id.value != last):
last = prop_id.value
properties[convertPropertyName(c_buf.value)] = prop_id.value
ret = dcam.dcam_getnextpropertyid(self.camera_handle,
ctypes.byref(prop_id),
ctypes.c_int32(DCAMPROP_OPTION_NEXT))
if (ret != 0) and (ret != DCAMERR_NOERROR):
self.checkStatus(ret, "dcam_getnextpropertyid")
self.checkStatus(dcam.dcam_getpropertyname(self.camera_handle,
prop_id,
c_buf,
ctypes.c_int32(c_buf_len)),
"dcam_getpropertyname")
return properties
# ## getFrames
#
# Gets all of the available frames.
#
# This will block waiting for new frames even if
# there new frames available when it is called.
#
# @return (frames, (frame x size, frame y size))
#
def getFrames(self):
frames = []
k = self.newFrames()
for n in k:
# Lock the frame in the camera buffer & get address.
data_address = ctypes.c_void_p(0)
row_bytes = ctypes.c_int32(0)
self.checkStatus(dcam.dcam_lockdata(self.camera_handle,
ctypes.byref(data_address),
ctypes.byref(row_bytes),
ctypes.c_int32(n)),
"dcam_lockdata")
# Create storage for the frame & copy into this storage.
hc_data = HCamData(self.frame_bytes)
hc_data.copyData(data_address)
# Unlock the frame.
#
# According to the documentation, this would be done automatically
# on the next call to lockdata, but we do this anyway.
self.checkStatus(dcam.dcam_unlockdata(self.camera_handle),
"dcam_unlockdata")
frames.append(hc_data)
return frames, (self.frame_x, self.frame_y)
# ## getModelInfo
#
# Returns the model of the camera
#
# @param camera_id The (integer) camera id number.
#
# @return A string containing the camera name.
#
def getModelInfo(self, camera_id):
c_buf_len = 20
c_buf = ctypes.create_string_buffer(c_buf_len)
self.checkStatus(dcam.dcam_getmodelinfo(ctypes.c_int32(camera_id),
ctypes.c_int32(DCAM_IDSTR_MODEL),
c_buf,
ctypes.c_int(c_buf_len)),
"dcam_getmodelinfo")
return c_buf.value
# ## getProperties
#
# Return the list of camera properties. This is the one to call if you
# want to know the camera properties.
#
# @return A dictionary of camera properties.
#
def getProperties(self):
return self.properties
# ## getPropertyAttribute
#
# Return the attribute structure of a particular property.
#
# FIXME (OPTIMIZATION): Keep track of known attributes?
#
# @param property_name The name of the property to get the attributes of.
#
# @return A DCAM_PARAM_PROPERTYATTR object.
#
def getPropertyAttribute(self, property_name):
p_attr = DCAM_PARAM_PROPERTYATTR()
p_attr.cbSize = ctypes.sizeof(p_attr)
p_attr.iProp = self.properties[property_name]
ret = self.checkStatus(dcam.dcam_getpropertyattr(self.camera_handle,
ctypes.byref(p_attr)),
"dcam_getpropertyattr")
if (ret == 0):
self._logger.warning(f"Property {property_name} is not supported")
return False
else:
return p_attr
# ## getPropertyText
#
# Return the text options of a property (if any).
#
# @param property_name The name of the property to get the text values of.
#
# @return A dictionary of text properties (which may be empty).
#
def getPropertyText(self, property_name):
prop_attr = self.getPropertyAttribute(property_name)
if not (prop_attr.attribute & DCAMPROP_ATTR_HASVALUETEXT):
return {}
else:
# Create property text structure.
prop_id = self.properties[property_name]
v = ctypes.c_double(prop_attr.valuemin)
prop_text = DCAM_PARAM_PROPERTYVALUETEXT()
c_buf_len = 64
c_buf = ctypes.create_string_buffer(c_buf_len)
# prop_text.text = ctypes.c_char_p(ctypes.addressof(c_buf))
prop_text.cbSize = ctypes.c_int32(ctypes.sizeof(prop_text))
prop_text.iProp = ctypes.c_int32(prop_id)
prop_text.value = v
prop_text.text = ctypes.addressof(c_buf)
prop_text.textbytes = c_buf_len
# Collect text options.
done = False
text_options = {}
while not done:
# Get text of current value.
self.checkStatus(dcam.dcam_getpropertyvaluetext(self.camera_handle,
ctypes.byref(prop_text)),
"dcam_getpropertyvaluetext")
text_options[prop_text.text] = int(v.value)
# Get next value.
ret = dcam.dcam_querypropertyvalue(self.camera_handle,
ctypes.c_int32(prop_id),
ctypes.byref(v),
ctypes.c_int32(DCAMPROP_OPTION_NEXT))
prop_text.value = v
if (ret == 0):
done = True
return text_options
# ## getPropertyRange
#
# Return the range for an attribute.
#
# @param property_name The name of the property (as a string).
#
# @return (minimum value, maximum value)
#
def getPropertyRange(self, property_name):
prop_attr = self.getPropertyAttribute(property_name)
temp = prop_attr.attribute & DCAMPROP_TYPE_MASK
if (temp == DCAMPROP_TYPE_REAL):
return float(prop_attr.valuemin), float(prop_attr.valuemax)
else:
return int(prop_attr.valuemin), int(prop_attr.valuemax)
# ## getPropertyRW
#
# Return if a property is readable / writeable.
#
# @return (True/False (readable), True/False (writeable))
#
def getPropertyRW(self, property_name):
prop_attr = self.getPropertyAttribute(property_name)
return (
bool(prop_attr.attribute & DCAMPROP_ATTR_READABLE), # Check if property is readable.
bool(prop_attr.attribute & DCAMPROP_ATTR_WRITABLE) # Check if property is writable.
)
# ## getPropertyVale
#
# Return the current setting of a particular property.
#
# @param property_name The name of the property.
#
# @return (the property value, the property type)
#
def getPropertyValue(self, property_name):
# Check if the property exists.
if not (property_name in self.properties):
self._logger.warning(f"Unknown property name: {property_name}")
return False
prop_id = self.properties[property_name]
# Get the property attributes.
prop_attr = self.getPropertyAttribute(property_name)
# Get the property value.
c_value = ctypes.c_double(0)
self.checkStatus(dcam.dcam_getpropertyvalue(self.camera_handle,
ctypes.c_int32(prop_id),
ctypes.byref(c_value)),
"dcam_getpropertyvalue")
# Convert type based on attribute type.
temp = prop_attr.attribute & DCAMPROP_TYPE_MASK
if (temp == DCAMPROP_TYPE_MODE):
prop_type = "MODE"
prop_value = int(c_value.value)
elif (temp == DCAMPROP_TYPE_LONG):
prop_type = "LONG"
prop_value = int(c_value.value)
elif (temp == DCAMPROP_TYPE_REAL):
prop_type = "REAL"
prop_value = c_value.value
else:
prop_type = "NONE"
prop_value = False
return prop_value, prop_type
# ## isCameraProperty
#
# Check if a property name is supported by the camera.
#
# @param property_name The name of the property.
#
# @return True/False if property_name is a supported camera property.
#
def isCameraProperty(self, property_name):
if (property_name in self.properties):
return True
else:
return False
# ## newFrames
#
# Return a list of the ids of all the new frames since the last check.
#
# This will block waiting for at least one new frame.
#
# @return [id of the first frame, .. , id of the last frame]
#
def newFrames(self):
# Wait for a new frame.
try:
dwait = ctypes.c_int(DCAMCAP_EVENT_FRAMEREADY)
self.checkStatus(dcam.dcam_wait(self.camera_handle,
ctypes.byref(dwait),
ctypes.c_int(0),
None),
"dcam_wait")
# Check how many new frames there are.
b_index, f_count = self.getAq_Info()
# Check that we have not acquired more frames than we can store in our buffer.
# Keep track of the maximum backlog.
cur_frame_number = f_count
backlog = cur_frame_number - self.last_frame_number
if (backlog > self.number_image_buffers):
self._logger.warning("Hamamatsu camera frame buffer overrun detected!")
if (backlog > self.max_backlog):
self.max_backlog = backlog
self.last_frame_number = cur_frame_number
cur_buffer_index = b_index
# Create a list of the new frames.
new_frames = []
if (cur_buffer_index < self.buffer_index):
for i in range(self.buffer_index + 1, self.number_image_buffers):
new_frames.append(i)
for i in range(cur_buffer_index + 1):
new_frames.append(i)
else:
for i in range(self.buffer_index, cur_buffer_index):
new_frames.append(i + 1)
self.buffer_index = cur_buffer_index
if self.debug:
self._logger.debug(new_frames)
return new_frames
except Exception:
return []
def getAq_Info(self):
"""b_index indicates which index position in the buffer was last
written to, f_count indicates how many frames were aquired since start"""
b_index = ctypes.c_int32(0)
f_count = ctypes.c_int32(0)
self.checkStatus(dcam.dcam_gettransferinfo(self.camera_handle,
ctypes.byref(b_index),
ctypes.byref(f_count)),
"dcam_gettransferinfo")
return b_index.value, f_count.value
# ## setPropertyValue
#
# Set the value of a property.
#
# @param property_name The name of the property.
# @param property_value The value to set the property to.
#
# Minor changes made in this function (in part that handle text-propertyvalues.
# Did not behave properly when called from setSubArrayMode. Now setSubArrayMode
# passes b'OFF' instead of 'OFF' which
# necesssitated changes in this function too. /Andreas
def setPropertyValue(self, property_name, property_value):
# Check if the property exists.
if not (property_name in self.properties):
self._logger.warning(f"Unknown property name: {property_name}")
return False
# If the value is text, figure out what the
# corresponding numerical property value is.
if (type(property_value) == bytes): # Used to test == type("")
text_values = self.getPropertyText(property_name)
if (property_value in text_values):
property_value = float(text_values[property_value])
else:
self._logger.warning(
f"Unknown property text value: {property_value} for {property_name}"
)
return False
# Check that the property is within range.
pv_min, pv_max = self.getPropertyRange(property_name)
if (property_value < pv_min):
self._logger.warning(
f"Set property value {property_value} is less than minimum of {pv_min}"
f" {property_name}, setting to minimum"
)
property_value = pv_min
if (property_value > pv_max):
self._logger.warning(
f"Set property value {property_value} is greater than maximum of {pv_max}"
f" {property_name}, setting to maximum")
property_value = pv_max
# Set the property value, return what it was set too.
prop_id = self.properties[property_name]
p_value = ctypes.c_double(property_value)
self.checkStatus(dcam.dcam_setgetpropertyvalue(self.camera_handle,
ctypes.c_int32(prop_id),
ctypes.byref(p_value),
ctypes.c_int32(DCAM_DEFAULT_ARG)),
"dcam_setgetpropertyvalue")
return p_value.value
# ## setSubArrayMode
#
# This sets the sub-array mode as appropriate based on the current ROI.
#
# Change made in here: call to function setPropertyValue with b'OFF' instead of 'OFF'
# due to (maybe new?) structure of self.getPropertyText('subarray_mode') using bytes keywords.
# /Andreas
def setSubArrayMode(self):
# Check ROI properties.
roi_w = self.getPropertyValue("subarray_hsize")[0]
roi_h = self.getPropertyValue("subarray_vsize")[0]
# If the ROI is smaller than the entire frame turn on subarray mode
if ((roi_w == self.max_width) and (roi_h == self.max_height)):
self.setPropertyValue("subarray_mode", b'OFF')
else:
self.setPropertyValue("subarray_mode", b'ON')
# ## startAcquisition
#
# Start data acquisition.
#
def startAcquisition(self):
self.captureSetup()
#
# Allocate Hamamatsu image buffers.
# We allocate enough to buffer 2 seconds of data.
#
n_buffers = int(2.0 * self.getPropertyValue("internal_frame_rate")[0])
self.number_image_buffers = n_buffers
self.checkStatus(dcam.dcam_allocframe(self.camera_handle,
ctypes.c_int32(self.number_image_buffers)),
"dcam_allocframe")
# Start acquisition.
self.checkStatus(dcam.dcam_capture(self.camera_handle),
"dcam_capture")
# ## stopAcquisition
#
# Stop data acquisition.
#
def stopAcquisition(self):
# Stop acquisition.
self.checkStatus(dcam.dcam_idle(self.camera_handle),
"dcam_idle")
self._logger.info(
f"Max camera backlog was {self.max_backlog} of {self.number_image_buffers}"
)
self.max_backlog = 0
# Free image buffers.
self.number_image_buffers = 0
self.checkStatus(dcam.dcam_freeframe(self.camera_handle),
"dcam_freeframe")
# ## shutdown
#
# Close down the connection to the camera.
#
def shutdown(self):
self.checkStatus(dcam.dcam_close(self.camera_handle),
"dcam_close")
# ## HamamatsuCameraMR
#
# Memory recycling camera class.
#
# This version allocates "user memory" for the Hamamatsu camera
# buffers. This memory is also the location of the storage for
# the np_array element of a HCamData() class. The memory is
# allocated once at the beginning, then recycled. This means
# that there is a lot less memory allocation & shuffling compared
# to the basic class, which performs one allocation and (I believe)
# two copies for each frame that is acquired.
#
# WARNING: There is the potential here for chaos. Since the memory
# is now shared there is the possibility that downstream code
# will try and access the same bit of memory at the same time
# as the camera and this could end badly.
#
# FIXME: Use lockbits (and unlockbits) to avoid memory clashes?
# This would probably also involve some kind of reference counting
# scheme.
#
class HamamatsuCameraMR(HamamatsuCamera):
# ## __init__
#
# @param camera_id The id of the camera.
#
def __init__(self, camera_id):
HamamatsuCamera.__init__(self, camera_id)
self.hcam_data = []
self.hcam_ptr = False
self.old_frame_bytes = -1
self.setPropertyValue("output_trigger_kind[0]", 2)
# ## getFrames
#
# Gets all of the available frames.
#
# This will block waiting for new frames even if there new frames
# available when it is called.
#
# FIXME: It does not always seem to block? The length of frames can
# be zero. Are frames getting dropped? Some sort of race condition?
#
# @return (frames, (frame x size, frame y size))
#
def getFrames(self):
frames = []
for n in self.newFrames():
im = self.hcam_data[n].getData()
frames.append(np.reshape(im, (self.frame_y, self.frame_x)))
return np.array(frames), (self.frame_y, self.frame_x)
def getLast(self):
b_index, f_count = self.getAq_Info()
im = self.hcam_data[b_index].getData()
return np.reshape(im, (self.frame_y, self.frame_x))
def updateIndices(self):
b_index, f_count = self.getAq_Info()
self.buffer_index = b_index
self.last_frame_number = f_count
def getSpecFrames(self, ids):
"""Get frames specified by their id's"""
frames = []
for n in ids:
frames.append(self.hcam_data[n])
return frames
def UpdateFrameNrBufferIdx(self):
b_index, f_count = self.getAq_Info()
self.last_frame_number = f_count
self.buffer_index = b_index
# ## startAcquisition
#
# Allocate as many frames as will fit in 2GB of memory and start data acquisition.
#
def startAcquisition(self):
self.captureSetup()
self._logger.debug(self.frame_bytes)
#
# Allocate new image buffers if necessary.
# Allocate as many frames as can fit in 2GB of memory.
# NOTE: The for loop in this function can be timeconsuming if the frame_bytes are small
# since this leads to a large amount of frames and thus a large amount of iterations of the
# loop.hca
#
if (self.old_frame_bytes != self.frame_bytes):
# Even number of frames
n_buffers = 2 * int((4 * 1024 * 1024 * 1024) / (2 * self.frame_bytes))
self._logger.debug(f'Number of frames to buffer: {n_buffers}')
self.number_image_buffers = n_buffers
# Allocate new image buffers.
ptr_array = ctypes.c_void_p * self.number_image_buffers
self.hcam_ptr = ptr_array()
self.hcam_data = []
# This loop can take time if number_image_frames is large
for i in range(self.number_image_buffers):
hc_data = HCamData(self.frame_bytes)
self.hcam_ptr[i] = hc_data.getDataPtr()
self.hcam_data.append(hc_data)
self.old_frame_bytes = self.frame_bytes
self._logger.debug('Finished buffering frames')
# Attach image buffers.
#
# We need to attach & release for each acquisition otherwise
# we'll get an error if we try to change the ROI in any way
# between acquisitions.
self.checkStatus(dcam.dcam_attachbuffer(self.camera_handle,
self.hcam_ptr,
ctypes.sizeof(self.hcam_ptr)),
"dcam_attachbuffer")
# Start acquisition.
self.checkStatus(dcam.dcam_capture(self.camera_handle),
"dcam_capture")
# ## stopAcquisition
#
# Stop data acquisition and release the memory associates with the frames.
#
def stopAcquisition(self):
# Stop acquisition.
self.checkStatus(dcam.dcam_idle(self.camera_handle),
"dcam_idle")
# Release image buffers.
if (self.hcam_ptr):
self.checkStatus(dcam.dcam_releasebuffer(self.camera_handle),
"dcam_releasebuffer")
self._logger.info(f"Max camera backlog was: {self.max_backlog}")
self.max_backlog = 0
#
# The MIT License
#
# Copyright (c) 2013 Zhuang Lab, Harvard University
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# | PypiClean |
/Flask-AWSCognito-1.3.tar.gz/Flask-AWSCognito-1.3/docs/source/configure_aws.rst | ===============
Prepare Cognito
===============
CloudFormation
--------------
The following CloudFormation template will create
- User Pool, a directory of users
- User Pool Client - an entity that holds the configuration of authentication flow
.. code-block:: yaml
AWSTemplateFormatVersion: "2010-09-09"
Description: Cognito Stack
Parameters:
Domain:
Type: String
Description: Unique Name for Cognito Resources
SignInCallback:
Type: String
Description: Full URL to be called after used is signed in
Resources:
UserPool:
Type: "AWS::Cognito::UserPool"
Properties:
UserPoolName: !Join ['-', [!Ref Domain, 'user-pool']]
AutoVerifiedAttributes:
- email
Schema:
- Name: email
AttributeDataType: String
Mutable: false
Required: true
PoolClientUser:
Type: AWS::Cognito::UserPoolClient
Description: Pool client to be used by users
Properties:
ClientName: !Join ['-', [!Ref Domain, 'cognito-user-pool-client']]
UserPoolId: !Ref UserPool
AllowedOAuthFlows:
- code
CallbackURLs:
- !Ref SignInCallback
AllowedOAuthFlowsUserPoolClient: true
AllowedOAuthScopes:
- email
- openid
SupportedIdentityProviders:
- COGNITO
Domain
------
The template doesn't create a Domain (not supported by CLoudFormation as of December 2019)
so it should be created manually from console or through API calls.
.. image:: https://raw.githubusercontent.com/cgauge/Flask-AWSCognito/master/docs/img/cognito_domain.png
Both options - "Amazon Cognito domain" and "Your own domain" are supported. Don't forget to pass it
to Flask app config.
Redirect URL
------------
One of the stack parameters of the CloudFormation template is a redirect URL. It's a Flask endpoint users
will be redirected to after successful sign in (see **Usage**).
ID to pass to Flask
--------------------
After resources are created we need User Pool ID, User Pool Client ID and User Pool Client Secret
(not shown on the screenshots) to configure Flask:
.. image:: https://raw.githubusercontent.com/cgauge/Flask-AWSCognito/master/docs/img/poolid.png
.. image:: https://raw.githubusercontent.com/cgauge/Flask-AWSCognito/master/docs/img/clientid.png | PypiClean |
/EdaSpiffWorkflow-0.0.2.tar.gz/EdaSpiffWorkflow-0.0.2/EdaSpiffWorkflow_Aadesh_G/bpmn/parser/BpmnParser.py | from builtins import object
# Copyright (C) 2012 Matthew Hampton
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA
# 02110-1301 USA
import glob
from lxml import etree
from ..workflow import BpmnWorkflow
from .ValidationException import ValidationException
from ..specs.events import StartEvent, EndEvent, BoundaryEvent, IntermediateCatchEvent, IntermediateThrowEvent
from ..specs.SubWorkflowTask import CallActivity, TransactionSubprocess
from ..specs.ExclusiveGateway import ExclusiveGateway
from ..specs.InclusiveGateway import InclusiveGateway
from ..specs.ManualTask import ManualTask
from ..specs.NoneTask import NoneTask
from ..specs.ParallelGateway import ParallelGateway
from ..specs.ScriptTask import ScriptTask
from ..specs.UserTask import UserTask
from .ProcessParser import ProcessParser
from .util import full_tag, xpath_eval, first
from .task_parsers import (UserTaskParser, NoneTaskParser, ManualTaskParser,
ExclusiveGatewayParser, ParallelGatewayParser, InclusiveGatewayParser,
CallActivityParser, TransactionSubprocessParser,
ScriptTaskParser, SubWorkflowParser)
from .event_parsers import (StartEventParser, EndEventParser, BoundaryEventParser,
IntermediateCatchEventParser, IntermediateThrowEventParser)
CAMUNDA_MODEL_NS = 'http://camunda.org/schema/1.0/bpmn'
class BpmnParser(object):
"""
The BpmnParser class is a pluggable base class that manages the parsing of
a set of BPMN files. It is intended that this class will be overriden by an
application that implements a BPMN engine.
Extension points: OVERRIDE_PARSER_CLASSES provides a map from full BPMN tag
name to a TaskParser and Task class. PROCESS_PARSER_CLASS provides a
subclass of ProcessParser WORKFLOW_CLASS provides a subclass of
BpmnWorkflow
"""
PARSER_CLASSES = {
full_tag('startEvent'): (StartEventParser, StartEvent),
full_tag('endEvent'): (EndEventParser, EndEvent),
full_tag('userTask'): (UserTaskParser, UserTask),
full_tag('task'): (NoneTaskParser, NoneTask),
full_tag('subProcess'): (SubWorkflowParser, CallActivity),
full_tag('manualTask'): (ManualTaskParser, ManualTask),
full_tag('exclusiveGateway'): (ExclusiveGatewayParser,
ExclusiveGateway),
full_tag('parallelGateway'): (ParallelGatewayParser, ParallelGateway),
full_tag('inclusiveGateway'): (InclusiveGatewayParser,
InclusiveGateway),
full_tag('callActivity'): (CallActivityParser, CallActivity),
full_tag('transaction'): (TransactionSubprocessParser, TransactionSubprocess),
full_tag('scriptTask'): (ScriptTaskParser, ScriptTask),
full_tag('intermediateCatchEvent'): (IntermediateCatchEventParser,
IntermediateCatchEvent),
full_tag('intermediateThrowEvent'): (IntermediateThrowEventParser,
IntermediateThrowEvent),
full_tag('boundaryEvent'): (BoundaryEventParser, BoundaryEvent),
}
OVERRIDE_PARSER_CLASSES = {}
PROCESS_PARSER_CLASS = ProcessParser
WORKFLOW_CLASS = BpmnWorkflow
def __init__(self):
"""
Constructor.
"""
self.process_parsers = {}
self.process_parsers_by_name = {}
def _get_parser_class(self, tag):
if tag in self.OVERRIDE_PARSER_CLASSES:
return self.OVERRIDE_PARSER_CLASSES[tag]
elif tag in self.PARSER_CLASSES:
return self.PARSER_CLASSES[tag]
return None, None
def get_process_parser(self, process_id_or_name):
"""
Returns the ProcessParser for the given process ID or name. It matches
by name first.
"""
if process_id_or_name in self.process_parsers_by_name:
return self.process_parsers_by_name[process_id_or_name]
elif process_id_or_name in self.process_parsers:
return self.process_parsers[process_id_or_name]
def get_process_ids(self):
"""Returns a list of process IDs"""
return self.process_parsers.keys()
def add_bpmn_file(self, filename):
"""
Add the given BPMN filename to the parser's set.
"""
self.add_bpmn_files([filename])
def add_bpmn_files_by_glob(self, g):
"""
Add all filenames matching the provided pattern (e.g. *.bpmn) to the
parser's set.
"""
self.add_bpmn_files(glob.glob(g))
def add_bpmn_files(self, filenames):
"""
Add all filenames in the given list to the parser's set.
"""
for filename in filenames:
f = open(filename, 'r')
try:
self.add_bpmn_xml(etree.parse(f), filename=filename)
finally:
f.close()
def add_bpmn_xml(self, bpmn, svg=None, filename=None):
"""
Add the given lxml representation of the BPMN file to the parser's set.
:param svg: Optionally, provide the text data for the SVG of the BPMN
file
:param filename: Optionally, provide the source filename.
"""
xpath = xpath_eval(bpmn)
# do a check on our bpmn to ensure that no id appears twice
# this *should* be taken care of by our modeler - so this test
# should never fail.
ids = [x for x in xpath('.//bpmn:*[@id]')]
foundids = {}
for node in ids:
id = node.get('id')
if foundids.get(id,None) is not None:
raise ValidationException(
'The bpmn document should have no repeating ids but (%s) repeats'%id,
node=node,
filename=filename)
else:
foundids[id] = 1
processes = xpath('.//bpmn:process')
try:
extensions = xpath('.//bpmn:process/bpmn:extensionElements/camunda:properties/') or xpath('.//bpmn:collaboration/bpmn:extensionElements/camunda:properties/')
processes[0].attrib['type'] = extensions[0].attrib['value']
except:
pass
for process in processes:
self.create_parser(process, xpath, svg, filename)
def create_parser(self, node, doc_xpath, svg=None, filename=None, current_lane=None):
parser = self.PROCESS_PARSER_CLASS(self, node, svg, filename=filename, doc_xpath=doc_xpath,
current_lane=current_lane)
if parser.get_id() in self.process_parsers:
raise ValidationException('Duplicate process ID', node=node, filename=filename)
if parser.get_name() in self.process_parsers_by_name:
raise ValidationException('Duplicate process name', node=node, filename=filename)
self.process_parsers[parser.get_id()] = parser
self.process_parsers_by_name[parser.get_name()] = parser
def parse_condition(self, sequence_flow_node):
xpath = xpath_eval(sequence_flow_node)
expression = first(xpath('.//bpmn:conditionExpression'))
return expression.text if expression is not None else None
def parse_extensions(self, node, xpath=None):
extensions = {}
xpath = xpath or xpath_eval(node)
extension_nodes = xpath('.//bpmn:extensionElements/camunda:properties/camunda:property') # xpath('.//') to know the current xpath.
for node in extension_nodes:
extensions[node.get('name')] = node.get('value')
return extensions
def parse_documentation(self, node, xpath=None):
xpath = xpath or xpath_eval(node)
documentation_node = first(xpath('.//bpmn:documentation'))
return None if documentation_node is None else documentation_node.text
def get_spec(self, process_id_or_name):
"""
Parses the required subset of the BPMN files, in order to provide an
instance of BpmnProcessSpec (i.e. WorkflowSpec)
for the given process ID or name. The Name is matched first.
"""
parser = self.get_process_parser(process_id_or_name)
if parser is None:
raise Exception(
f"The process '{process_id_or_name}' was not found. "
f"Did you mean one of the following: "
f"{', '.join(self.get_process_ids())}?")
return parser.get_spec() | PypiClean |
/HyperKitty-1.3.7.tar.gz/HyperKitty-1.3.7/hyperkitty/static/hyperkitty/libs/bootstrap/javascripts/bootstrap.min.js | !function(t,e){"object"==typeof exports&&"undefined"!=typeof module?e(exports,require("jquery"),require("popper.js")):"function"==typeof define&&define.amd?define(["exports","jquery","popper.js"],e):e((t=t||self).bootstrap={},t.jQuery,t.Popper)}(this,function(t,g,u){"use strict";function i(t,e){for(var n=0;n<e.length;n++){var i=e[n];i.enumerable=i.enumerable||!1,i.configurable=!0,"value"in i&&(i.writable=!0),Object.defineProperty(t,i.key,i)}}function s(t,e,n){return e&&i(t.prototype,e),n&&i(t,n),t}function l(o){for(var t=1;t<arguments.length;t++){var r=null!=arguments[t]?arguments[t]:{},e=Object.keys(r);"function"==typeof Object.getOwnPropertySymbols&&(e=e.concat(Object.getOwnPropertySymbols(r).filter(function(t){return Object.getOwnPropertyDescriptor(r,t).enumerable}))),e.forEach(function(t){var e,n,i;e=o,i=r[n=t],n in e?Object.defineProperty(e,n,{value:i,enumerable:!0,configurable:!0,writable:!0}):e[n]=i})}return o}g=g&&g.hasOwnProperty("default")?g.default:g,u=u&&u.hasOwnProperty("default")?u.default:u;var e="transitionend";function n(t){var e=this,n=!1;return g(this).one(_.TRANSITION_END,function(){n=!0}),setTimeout(function(){n||_.triggerTransitionEnd(e)},t),this}var _={TRANSITION_END:"bsTransitionEnd",getUID:function(t){for(;t+=~~(1e6*Math.random()),document.getElementById(t););return t},getSelectorFromElement:function(t){var e=t.getAttribute("data-target");if(!e||"#"===e){var n=t.getAttribute("href");e=n&&"#"!==n?n.trim():""}try{return document.querySelector(e)?e:null}catch(t){return null}},getTransitionDurationFromElement:function(t){if(!t)return 0;var e=g(t).css("transition-duration"),n=g(t).css("transition-delay"),i=parseFloat(e),o=parseFloat(n);return i||o?(e=e.split(",")[0],n=n.split(",")[0],1e3*(parseFloat(e)+parseFloat(n))):0},reflow:function(t){return t.offsetHeight},triggerTransitionEnd:function(t){g(t).trigger(e)},supportsTransitionEnd:function(){return Boolean(e)},isElement:function(t){return(t[0]||t).nodeType},typeCheckConfig:function(t,e,n){for(var i in n)if(Object.prototype.hasOwnProperty.call(n,i)){var o=n[i],r=e[i],s=r&&_.isElement(r)?"element":(a=r,{}.toString.call(a).match(/\s([a-z]+)/i)[1].toLowerCase());if(!new RegExp(o).test(s))throw new Error(t.toUpperCase()+': Option "'+i+'" provided type "'+s+'" but expected type "'+o+'".')}var a},findShadowRoot:function(t){if(!document.documentElement.attachShadow)return null;if("function"!=typeof t.getRootNode)return t instanceof ShadowRoot?t:t.parentNode?_.findShadowRoot(t.parentNode):null;var e=t.getRootNode();return e instanceof ShadowRoot?e:null}};g.fn.emulateTransitionEnd=n,g.event.special[_.TRANSITION_END]={bindType:e,delegateType:e,handle:function(t){if(g(t.target).is(this))return t.handleObj.handler.apply(this,arguments)}};var o="alert",r="bs.alert",a="."+r,c=g.fn[o],h={CLOSE:"close"+a,CLOSED:"closed"+a,CLICK_DATA_API:"click"+a+".data-api"},f="alert",d="fade",m="show",p=function(){function i(t){this._element=t}var t=i.prototype;return t.close=function(t){var e=this._element;t&&(e=this._getRootElement(t)),this._triggerCloseEvent(e).isDefaultPrevented()||this._removeElement(e)},t.dispose=function(){g.removeData(this._element,r),this._element=null},t._getRootElement=function(t){var e=_.getSelectorFromElement(t),n=!1;return e&&(n=document.querySelector(e)),n||(n=g(t).closest("."+f)[0]),n},t._triggerCloseEvent=function(t){var e=g.Event(h.CLOSE);return g(t).trigger(e),e},t._removeElement=function(e){var n=this;if(g(e).removeClass(m),g(e).hasClass(d)){var t=_.getTransitionDurationFromElement(e);g(e).one(_.TRANSITION_END,function(t){return n._destroyElement(e,t)}).emulateTransitionEnd(t)}else this._destroyElement(e)},t._destroyElement=function(t){g(t).detach().trigger(h.CLOSED).remove()},i._jQueryInterface=function(n){return this.each(function(){var t=g(this),e=t.data(r);e||(e=new i(this),t.data(r,e)),"close"===n&&e[n](this)})},i._handleDismiss=function(e){return function(t){t&&t.preventDefault(),e.close(this)}},s(i,null,[{key:"VERSION",get:function(){return"4.3.1"}}]),i}();g(document).on(h.CLICK_DATA_API,'[data-dismiss="alert"]',p._handleDismiss(new p)),g.fn[o]=p._jQueryInterface,g.fn[o].Constructor=p,g.fn[o].noConflict=function(){return g.fn[o]=c,p._jQueryInterface};var v="button",y="bs.button",E="."+y,C=".data-api",T=g.fn[v],S="active",b="btn",I="focus",D='[data-toggle^="button"]',w='[data-toggle="buttons"]',A='input:not([type="hidden"])',N=".active",O=".btn",k={CLICK_DATA_API:"click"+E+C,FOCUS_BLUR_DATA_API:"focus"+E+C+" blur"+E+C},P=function(){function n(t){this._element=t}var t=n.prototype;return t.toggle=function(){var t=!0,e=!0,n=g(this._element).closest(w)[0];if(n){var i=this._element.querySelector(A);if(i){if("radio"===i.type)if(i.checked&&this._element.classList.contains(S))t=!1;else{var o=n.querySelector(N);o&&g(o).removeClass(S)}if(t){if(i.hasAttribute("disabled")||n.hasAttribute("disabled")||i.classList.contains("disabled")||n.classList.contains("disabled"))return;i.checked=!this._element.classList.contains(S),g(i).trigger("change")}i.focus(),e=!1}}e&&this._element.setAttribute("aria-pressed",!this._element.classList.contains(S)),t&&g(this._element).toggleClass(S)},t.dispose=function(){g.removeData(this._element,y),this._element=null},n._jQueryInterface=function(e){return this.each(function(){var t=g(this).data(y);t||(t=new n(this),g(this).data(y,t)),"toggle"===e&&t[e]()})},s(n,null,[{key:"VERSION",get:function(){return"4.3.1"}}]),n}();g(document).on(k.CLICK_DATA_API,D,function(t){t.preventDefault();var e=t.target;g(e).hasClass(b)||(e=g(e).closest(O)),P._jQueryInterface.call(g(e),"toggle")}).on(k.FOCUS_BLUR_DATA_API,D,function(t){var e=g(t.target).closest(O)[0];g(e).toggleClass(I,/^focus(in)?$/.test(t.type))}),g.fn[v]=P._jQueryInterface,g.fn[v].Constructor=P,g.fn[v].noConflict=function(){return g.fn[v]=T,P._jQueryInterface};var L="carousel",j="bs.carousel",H="."+j,R=".data-api",x=g.fn[L],F={interval:5e3,keyboard:!0,slide:!1,pause:"hover",wrap:!0,touch:!0},U={interval:"(number|boolean)",keyboard:"boolean",slide:"(boolean|string)",pause:"(string|boolean)",wrap:"boolean",touch:"boolean"},W="next",q="prev",M="left",K="right",Q={SLIDE:"slide"+H,SLID:"slid"+H,KEYDOWN:"keydown"+H,MOUSEENTER:"mouseenter"+H,MOUSELEAVE:"mouseleave"+H,TOUCHSTART:"touchstart"+H,TOUCHMOVE:"touchmove"+H,TOUCHEND:"touchend"+H,POINTERDOWN:"pointerdown"+H,POINTERUP:"pointerup"+H,DRAG_START:"dragstart"+H,LOAD_DATA_API:"load"+H+R,CLICK_DATA_API:"click"+H+R},B="carousel",V="active",Y="slide",z="carousel-item-right",X="carousel-item-left",$="carousel-item-next",G="carousel-item-prev",J="pointer-event",Z=".active",tt=".active.carousel-item",et=".carousel-item",nt=".carousel-item img",it=".carousel-item-next, .carousel-item-prev",ot=".carousel-indicators",rt="[data-slide], [data-slide-to]",st='[data-ride="carousel"]',at={TOUCH:"touch",PEN:"pen"},lt=function(){function r(t,e){this._items=null,this._interval=null,this._activeElement=null,this._isPaused=!1,this._isSliding=!1,this.touchTimeout=null,this.touchStartX=0,this.touchDeltaX=0,this._config=this._getConfig(e),this._element=t,this._indicatorsElement=this._element.querySelector(ot),this._touchSupported="ontouchstart"in document.documentElement||0<navigator.maxTouchPoints,this._pointerEvent=Boolean(window.PointerEvent||window.MSPointerEvent),this._addEventListeners()}var t=r.prototype;return t.next=function(){this._isSliding||this._slide(W)},t.nextWhenVisible=function(){!document.hidden&&g(this._element).is(":visible")&&"hidden"!==g(this._element).css("visibility")&&this.next()},t.prev=function(){this._isSliding||this._slide(q)},t.pause=function(t){t||(this._isPaused=!0),this._element.querySelector(it)&&(_.triggerTransitionEnd(this._element),this.cycle(!0)),clearInterval(this._interval),this._interval=null},t.cycle=function(t){t||(this._isPaused=!1),this._interval&&(clearInterval(this._interval),this._interval=null),this._config.interval&&!this._isPaused&&(this._interval=setInterval((document.visibilityState?this.nextWhenVisible:this.next).bind(this),this._config.interval))},t.to=function(t){var e=this;this._activeElement=this._element.querySelector(tt);var n=this._getItemIndex(this._activeElement);if(!(t>this._items.length-1||t<0))if(this._isSliding)g(this._element).one(Q.SLID,function(){return e.to(t)});else{if(n===t)return this.pause(),void this.cycle();var i=n<t?W:q;this._slide(i,this._items[t])}},t.dispose=function(){g(this._element).off(H),g.removeData(this._element,j),this._items=null,this._config=null,this._element=null,this._interval=null,this._isPaused=null,this._isSliding=null,this._activeElement=null,this._indicatorsElement=null},t._getConfig=function(t){return t=l({},F,t),_.typeCheckConfig(L,t,U),t},t._handleSwipe=function(){var t=Math.abs(this.touchDeltaX);if(!(t<=40)){var e=t/this.touchDeltaX;0<e&&this.prev(),e<0&&this.next()}},t._addEventListeners=function(){var e=this;this._config.keyboard&&g(this._element).on(Q.KEYDOWN,function(t){return e._keydown(t)}),"hover"===this._config.pause&&g(this._element).on(Q.MOUSEENTER,function(t){return e.pause(t)}).on(Q.MOUSELEAVE,function(t){return e.cycle(t)}),this._config.touch&&this._addTouchEventListeners()},t._addTouchEventListeners=function(){var n=this;if(this._touchSupported){var e=function(t){n._pointerEvent&&at[t.originalEvent.pointerType.toUpperCase()]?n.touchStartX=t.originalEvent.clientX:n._pointerEvent||(n.touchStartX=t.originalEvent.touches[0].clientX)},i=function(t){n._pointerEvent&&at[t.originalEvent.pointerType.toUpperCase()]&&(n.touchDeltaX=t.originalEvent.clientX-n.touchStartX),n._handleSwipe(),"hover"===n._config.pause&&(n.pause(),n.touchTimeout&&clearTimeout(n.touchTimeout),n.touchTimeout=setTimeout(function(t){return n.cycle(t)},500+n._config.interval))};g(this._element.querySelectorAll(nt)).on(Q.DRAG_START,function(t){return t.preventDefault()}),this._pointerEvent?(g(this._element).on(Q.POINTERDOWN,function(t){return e(t)}),g(this._element).on(Q.POINTERUP,function(t){return i(t)}),this._element.classList.add(J)):(g(this._element).on(Q.TOUCHSTART,function(t){return e(t)}),g(this._element).on(Q.TOUCHMOVE,function(t){var e;(e=t).originalEvent.touches&&1<e.originalEvent.touches.length?n.touchDeltaX=0:n.touchDeltaX=e.originalEvent.touches[0].clientX-n.touchStartX}),g(this._element).on(Q.TOUCHEND,function(t){return i(t)}))}},t._keydown=function(t){if(!/input|textarea/i.test(t.target.tagName))switch(t.which){case 37:t.preventDefault(),this.prev();break;case 39:t.preventDefault(),this.next()}},t._getItemIndex=function(t){return this._items=t&&t.parentNode?[].slice.call(t.parentNode.querySelectorAll(et)):[],this._items.indexOf(t)},t._getItemByDirection=function(t,e){var n=t===W,i=t===q,o=this._getItemIndex(e),r=this._items.length-1;if((i&&0===o||n&&o===r)&&!this._config.wrap)return e;var s=(o+(t===q?-1:1))%this._items.length;return-1===s?this._items[this._items.length-1]:this._items[s]},t._triggerSlideEvent=function(t,e){var n=this._getItemIndex(t),i=this._getItemIndex(this._element.querySelector(tt)),o=g.Event(Q.SLIDE,{relatedTarget:t,direction:e,from:i,to:n});return g(this._element).trigger(o),o},t._setActiveIndicatorElement=function(t){if(this._indicatorsElement){var e=[].slice.call(this._indicatorsElement.querySelectorAll(Z));g(e).removeClass(V);var n=this._indicatorsElement.children[this._getItemIndex(t)];n&&g(n).addClass(V)}},t._slide=function(t,e){var n,i,o,r=this,s=this._element.querySelector(tt),a=this._getItemIndex(s),l=e||s&&this._getItemByDirection(t,s),c=this._getItemIndex(l),h=Boolean(this._interval);if(o=t===W?(n=X,i=$,M):(n=z,i=G,K),l&&g(l).hasClass(V))this._isSliding=!1;else if(!this._triggerSlideEvent(l,o).isDefaultPrevented()&&s&&l){this._isSliding=!0,h&&this.pause(),this._setActiveIndicatorElement(l);var u=g.Event(Q.SLID,{relatedTarget:l,direction:o,from:a,to:c});if(g(this._element).hasClass(Y)){g(l).addClass(i),_.reflow(l),g(s).addClass(n),g(l).addClass(n);var f=parseInt(l.getAttribute("data-interval"),10);this._config.interval=f?(this._config.defaultInterval=this._config.defaultInterval||this._config.interval,f):this._config.defaultInterval||this._config.interval;var d=_.getTransitionDurationFromElement(s);g(s).one(_.TRANSITION_END,function(){g(l).removeClass(n+" "+i).addClass(V),g(s).removeClass(V+" "+i+" "+n),r._isSliding=!1,setTimeout(function(){return g(r._element).trigger(u)},0)}).emulateTransitionEnd(d)}else g(s).removeClass(V),g(l).addClass(V),this._isSliding=!1,g(this._element).trigger(u);h&&this.cycle()}},r._jQueryInterface=function(i){return this.each(function(){var t=g(this).data(j),e=l({},F,g(this).data());"object"==typeof i&&(e=l({},e,i));var n="string"==typeof i?i:e.slide;if(t||(t=new r(this,e),g(this).data(j,t)),"number"==typeof i)t.to(i);else if("string"==typeof n){if("undefined"==typeof t[n])throw new TypeError('No method named "'+n+'"');t[n]()}else e.interval&&e.ride&&(t.pause(),t.cycle())})},r._dataApiClickHandler=function(t){var e=_.getSelectorFromElement(this);if(e){var n=g(e)[0];if(n&&g(n).hasClass(B)){var i=l({},g(n).data(),g(this).data()),o=this.getAttribute("data-slide-to");o&&(i.interval=!1),r._jQueryInterface.call(g(n),i),o&&g(n).data(j).to(o),t.preventDefault()}}},s(r,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return F}}]),r}();g(document).on(Q.CLICK_DATA_API,rt,lt._dataApiClickHandler),g(window).on(Q.LOAD_DATA_API,function(){for(var t=[].slice.call(document.querySelectorAll(st)),e=0,n=t.length;e<n;e++){var i=g(t[e]);lt._jQueryInterface.call(i,i.data())}}),g.fn[L]=lt._jQueryInterface,g.fn[L].Constructor=lt,g.fn[L].noConflict=function(){return g.fn[L]=x,lt._jQueryInterface};var ct="collapse",ht="bs.collapse",ut="."+ht,ft=g.fn[ct],dt={toggle:!0,parent:""},gt={toggle:"boolean",parent:"(string|element)"},_t={SHOW:"show"+ut,SHOWN:"shown"+ut,HIDE:"hide"+ut,HIDDEN:"hidden"+ut,CLICK_DATA_API:"click"+ut+".data-api"},mt="show",pt="collapse",vt="collapsing",yt="collapsed",Et="width",Ct="height",Tt=".show, .collapsing",St='[data-toggle="collapse"]',bt=function(){function a(e,t){this._isTransitioning=!1,this._element=e,this._config=this._getConfig(t),this._triggerArray=[].slice.call(document.querySelectorAll('[data-toggle="collapse"][href="#'+e.id+'"],[data-toggle="collapse"][data-target="#'+e.id+'"]'));for(var n=[].slice.call(document.querySelectorAll(St)),i=0,o=n.length;i<o;i++){var r=n[i],s=_.getSelectorFromElement(r),a=[].slice.call(document.querySelectorAll(s)).filter(function(t){return t===e});null!==s&&0<a.length&&(this._selector=s,this._triggerArray.push(r))}this._parent=this._config.parent?this._getParent():null,this._config.parent||this._addAriaAndCollapsedClass(this._element,this._triggerArray),this._config.toggle&&this.toggle()}var t=a.prototype;return t.toggle=function(){g(this._element).hasClass(mt)?this.hide():this.show()},t.show=function(){var t,e,n=this;if(!this._isTransitioning&&!g(this._element).hasClass(mt)&&(this._parent&&0===(t=[].slice.call(this._parent.querySelectorAll(Tt)).filter(function(t){return"string"==typeof n._config.parent?t.getAttribute("data-parent")===n._config.parent:t.classList.contains(pt)})).length&&(t=null),!(t&&(e=g(t).not(this._selector).data(ht))&&e._isTransitioning))){var i=g.Event(_t.SHOW);if(g(this._element).trigger(i),!i.isDefaultPrevented()){t&&(a._jQueryInterface.call(g(t).not(this._selector),"hide"),e||g(t).data(ht,null));var o=this._getDimension();g(this._element).removeClass(pt).addClass(vt),this._element.style[o]=0,this._triggerArray.length&&g(this._triggerArray).removeClass(yt).attr("aria-expanded",!0),this.setTransitioning(!0);var r="scroll"+(o[0].toUpperCase()+o.slice(1)),s=_.getTransitionDurationFromElement(this._element);g(this._element).one(_.TRANSITION_END,function(){g(n._element).removeClass(vt).addClass(pt).addClass(mt),n._element.style[o]="",n.setTransitioning(!1),g(n._element).trigger(_t.SHOWN)}).emulateTransitionEnd(s),this._element.style[o]=this._element[r]+"px"}}},t.hide=function(){var t=this;if(!this._isTransitioning&&g(this._element).hasClass(mt)){var e=g.Event(_t.HIDE);if(g(this._element).trigger(e),!e.isDefaultPrevented()){var n=this._getDimension();this._element.style[n]=this._element.getBoundingClientRect()[n]+"px",_.reflow(this._element),g(this._element).addClass(vt).removeClass(pt).removeClass(mt);var i=this._triggerArray.length;if(0<i)for(var o=0;o<i;o++){var r=this._triggerArray[o],s=_.getSelectorFromElement(r);if(null!==s)g([].slice.call(document.querySelectorAll(s))).hasClass(mt)||g(r).addClass(yt).attr("aria-expanded",!1)}this.setTransitioning(!0);this._element.style[n]="";var a=_.getTransitionDurationFromElement(this._element);g(this._element).one(_.TRANSITION_END,function(){t.setTransitioning(!1),g(t._element).removeClass(vt).addClass(pt).trigger(_t.HIDDEN)}).emulateTransitionEnd(a)}}},t.setTransitioning=function(t){this._isTransitioning=t},t.dispose=function(){g.removeData(this._element,ht),this._config=null,this._parent=null,this._element=null,this._triggerArray=null,this._isTransitioning=null},t._getConfig=function(t){return(t=l({},dt,t)).toggle=Boolean(t.toggle),_.typeCheckConfig(ct,t,gt),t},t._getDimension=function(){return g(this._element).hasClass(Et)?Et:Ct},t._getParent=function(){var t,n=this;_.isElement(this._config.parent)?(t=this._config.parent,"undefined"!=typeof this._config.parent.jquery&&(t=this._config.parent[0])):t=document.querySelector(this._config.parent);var e='[data-toggle="collapse"][data-parent="'+this._config.parent+'"]',i=[].slice.call(t.querySelectorAll(e));return g(i).each(function(t,e){n._addAriaAndCollapsedClass(a._getTargetFromElement(e),[e])}),t},t._addAriaAndCollapsedClass=function(t,e){var n=g(t).hasClass(mt);e.length&&g(e).toggleClass(yt,!n).attr("aria-expanded",n)},a._getTargetFromElement=function(t){var e=_.getSelectorFromElement(t);return e?document.querySelector(e):null},a._jQueryInterface=function(i){return this.each(function(){var t=g(this),e=t.data(ht),n=l({},dt,t.data(),"object"==typeof i&&i?i:{});if(!e&&n.toggle&&/show|hide/.test(i)&&(n.toggle=!1),e||(e=new a(this,n),t.data(ht,e)),"string"==typeof i){if("undefined"==typeof e[i])throw new TypeError('No method named "'+i+'"');e[i]()}})},s(a,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return dt}}]),a}();g(document).on(_t.CLICK_DATA_API,St,function(t){"A"===t.currentTarget.tagName&&t.preventDefault();var n=g(this),e=_.getSelectorFromElement(this),i=[].slice.call(document.querySelectorAll(e));g(i).each(function(){var t=g(this),e=t.data(ht)?"toggle":n.data();bt._jQueryInterface.call(t,e)})}),g.fn[ct]=bt._jQueryInterface,g.fn[ct].Constructor=bt,g.fn[ct].noConflict=function(){return g.fn[ct]=ft,bt._jQueryInterface};var It="dropdown",Dt="bs.dropdown",wt="."+Dt,At=".data-api",Nt=g.fn[It],Ot=new RegExp("38|40|27"),kt={HIDE:"hide"+wt,HIDDEN:"hidden"+wt,SHOW:"show"+wt,SHOWN:"shown"+wt,CLICK:"click"+wt,CLICK_DATA_API:"click"+wt+At,KEYDOWN_DATA_API:"keydown"+wt+At,KEYUP_DATA_API:"keyup"+wt+At},Pt="disabled",Lt="show",jt="dropup",Ht="dropright",Rt="dropleft",xt="dropdown-menu-right",Ft="position-static",Ut='[data-toggle="dropdown"]',Wt=".dropdown form",qt=".dropdown-menu",Mt=".navbar-nav",Kt=".dropdown-menu .dropdown-item:not(.disabled):not(:disabled)",Qt="top-start",Bt="top-end",Vt="bottom-start",Yt="bottom-end",zt="right-start",Xt="left-start",$t={offset:0,flip:!0,boundary:"scrollParent",reference:"toggle",display:"dynamic"},Gt={offset:"(number|string|function)",flip:"boolean",boundary:"(string|element)",reference:"(string|element)",display:"string"},Jt=function(){function c(t,e){this._element=t,this._popper=null,this._config=this._getConfig(e),this._menu=this._getMenuElement(),this._inNavbar=this._detectNavbar(),this._addEventListeners()}var t=c.prototype;return t.toggle=function(){if(!this._element.disabled&&!g(this._element).hasClass(Pt)){var t=c._getParentFromElement(this._element),e=g(this._menu).hasClass(Lt);if(c._clearMenus(),!e){var n={relatedTarget:this._element},i=g.Event(kt.SHOW,n);if(g(t).trigger(i),!i.isDefaultPrevented()){if(!this._inNavbar){if("undefined"==typeof u)throw new TypeError("Bootstrap's dropdowns require Popper.js (https://popper.js.org/)");var o=this._element;"parent"===this._config.reference?o=t:_.isElement(this._config.reference)&&(o=this._config.reference,"undefined"!=typeof this._config.reference.jquery&&(o=this._config.reference[0])),"scrollParent"!==this._config.boundary&&g(t).addClass(Ft),this._popper=new u(o,this._menu,this._getPopperConfig())}"ontouchstart"in document.documentElement&&0===g(t).closest(Mt).length&&g(document.body).children().on("mouseover",null,g.noop),this._element.focus(),this._element.setAttribute("aria-expanded",!0),g(this._menu).toggleClass(Lt),g(t).toggleClass(Lt).trigger(g.Event(kt.SHOWN,n))}}}},t.show=function(){if(!(this._element.disabled||g(this._element).hasClass(Pt)||g(this._menu).hasClass(Lt))){var t={relatedTarget:this._element},e=g.Event(kt.SHOW,t),n=c._getParentFromElement(this._element);g(n).trigger(e),e.isDefaultPrevented()||(g(this._menu).toggleClass(Lt),g(n).toggleClass(Lt).trigger(g.Event(kt.SHOWN,t)))}},t.hide=function(){if(!this._element.disabled&&!g(this._element).hasClass(Pt)&&g(this._menu).hasClass(Lt)){var t={relatedTarget:this._element},e=g.Event(kt.HIDE,t),n=c._getParentFromElement(this._element);g(n).trigger(e),e.isDefaultPrevented()||(g(this._menu).toggleClass(Lt),g(n).toggleClass(Lt).trigger(g.Event(kt.HIDDEN,t)))}},t.dispose=function(){g.removeData(this._element,Dt),g(this._element).off(wt),this._element=null,(this._menu=null)!==this._popper&&(this._popper.destroy(),this._popper=null)},t.update=function(){this._inNavbar=this._detectNavbar(),null!==this._popper&&this._popper.scheduleUpdate()},t._addEventListeners=function(){var e=this;g(this._element).on(kt.CLICK,function(t){t.preventDefault(),t.stopPropagation(),e.toggle()})},t._getConfig=function(t){return t=l({},this.constructor.Default,g(this._element).data(),t),_.typeCheckConfig(It,t,this.constructor.DefaultType),t},t._getMenuElement=function(){if(!this._menu){var t=c._getParentFromElement(this._element);t&&(this._menu=t.querySelector(qt))}return this._menu},t._getPlacement=function(){var t=g(this._element.parentNode),e=Vt;return t.hasClass(jt)?(e=Qt,g(this._menu).hasClass(xt)&&(e=Bt)):t.hasClass(Ht)?e=zt:t.hasClass(Rt)?e=Xt:g(this._menu).hasClass(xt)&&(e=Yt),e},t._detectNavbar=function(){return 0<g(this._element).closest(".navbar").length},t._getOffset=function(){var e=this,t={};return"function"==typeof this._config.offset?t.fn=function(t){return t.offsets=l({},t.offsets,e._config.offset(t.offsets,e._element)||{}),t}:t.offset=this._config.offset,t},t._getPopperConfig=function(){var t={placement:this._getPlacement(),modifiers:{offset:this._getOffset(),flip:{enabled:this._config.flip},preventOverflow:{boundariesElement:this._config.boundary}}};return"static"===this._config.display&&(t.modifiers.applyStyle={enabled:!1}),t},c._jQueryInterface=function(e){return this.each(function(){var t=g(this).data(Dt);if(t||(t=new c(this,"object"==typeof e?e:null),g(this).data(Dt,t)),"string"==typeof e){if("undefined"==typeof t[e])throw new TypeError('No method named "'+e+'"');t[e]()}})},c._clearMenus=function(t){if(!t||3!==t.which&&("keyup"!==t.type||9===t.which))for(var e=[].slice.call(document.querySelectorAll(Ut)),n=0,i=e.length;n<i;n++){var o=c._getParentFromElement(e[n]),r=g(e[n]).data(Dt),s={relatedTarget:e[n]};if(t&&"click"===t.type&&(s.clickEvent=t),r){var a=r._menu;if(g(o).hasClass(Lt)&&!(t&&("click"===t.type&&/input|textarea/i.test(t.target.tagName)||"keyup"===t.type&&9===t.which)&&g.contains(o,t.target))){var l=g.Event(kt.HIDE,s);g(o).trigger(l),l.isDefaultPrevented()||("ontouchstart"in document.documentElement&&g(document.body).children().off("mouseover",null,g.noop),e[n].setAttribute("aria-expanded","false"),g(a).removeClass(Lt),g(o).removeClass(Lt).trigger(g.Event(kt.HIDDEN,s)))}}}},c._getParentFromElement=function(t){var e,n=_.getSelectorFromElement(t);return n&&(e=document.querySelector(n)),e||t.parentNode},c._dataApiKeydownHandler=function(t){if((/input|textarea/i.test(t.target.tagName)?!(32===t.which||27!==t.which&&(40!==t.which&&38!==t.which||g(t.target).closest(qt).length)):Ot.test(t.which))&&(t.preventDefault(),t.stopPropagation(),!this.disabled&&!g(this).hasClass(Pt))){var e=c._getParentFromElement(this),n=g(e).hasClass(Lt);if(n&&(!n||27!==t.which&&32!==t.which)){var i=[].slice.call(e.querySelectorAll(Kt));if(0!==i.length){var o=i.indexOf(t.target);38===t.which&&0<o&&o--,40===t.which&&o<i.length-1&&o++,o<0&&(o=0),i[o].focus()}}else{if(27===t.which){var r=e.querySelector(Ut);g(r).trigger("focus")}g(this).trigger("click")}}},s(c,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return $t}},{key:"DefaultType",get:function(){return Gt}}]),c}();g(document).on(kt.KEYDOWN_DATA_API,Ut,Jt._dataApiKeydownHandler).on(kt.KEYDOWN_DATA_API,qt,Jt._dataApiKeydownHandler).on(kt.CLICK_DATA_API+" "+kt.KEYUP_DATA_API,Jt._clearMenus).on(kt.CLICK_DATA_API,Ut,function(t){t.preventDefault(),t.stopPropagation(),Jt._jQueryInterface.call(g(this),"toggle")}).on(kt.CLICK_DATA_API,Wt,function(t){t.stopPropagation()}),g.fn[It]=Jt._jQueryInterface,g.fn[It].Constructor=Jt,g.fn[It].noConflict=function(){return g.fn[It]=Nt,Jt._jQueryInterface};var Zt="modal",te="bs.modal",ee="."+te,ne=g.fn[Zt],ie={backdrop:!0,keyboard:!0,focus:!0,show:!0},oe={backdrop:"(boolean|string)",keyboard:"boolean",focus:"boolean",show:"boolean"},re={HIDE:"hide"+ee,HIDDEN:"hidden"+ee,SHOW:"show"+ee,SHOWN:"shown"+ee,FOCUSIN:"focusin"+ee,RESIZE:"resize"+ee,CLICK_DISMISS:"click.dismiss"+ee,KEYDOWN_DISMISS:"keydown.dismiss"+ee,MOUSEUP_DISMISS:"mouseup.dismiss"+ee,MOUSEDOWN_DISMISS:"mousedown.dismiss"+ee,CLICK_DATA_API:"click"+ee+".data-api"},se="modal-dialog-scrollable",ae="modal-scrollbar-measure",le="modal-backdrop",ce="modal-open",he="fade",ue="show",fe=".modal-dialog",de=".modal-body",ge='[data-toggle="modal"]',_e='[data-dismiss="modal"]',me=".fixed-top, .fixed-bottom, .is-fixed, .sticky-top",pe=".sticky-top",ve=function(){function o(t,e){this._config=this._getConfig(e),this._element=t,this._dialog=t.querySelector(fe),this._backdrop=null,this._isShown=!1,this._isBodyOverflowing=!1,this._ignoreBackdropClick=!1,this._isTransitioning=!1,this._scrollbarWidth=0}var t=o.prototype;return t.toggle=function(t){return this._isShown?this.hide():this.show(t)},t.show=function(t){var e=this;if(!this._isShown&&!this._isTransitioning){g(this._element).hasClass(he)&&(this._isTransitioning=!0);var n=g.Event(re.SHOW,{relatedTarget:t});g(this._element).trigger(n),this._isShown||n.isDefaultPrevented()||(this._isShown=!0,this._checkScrollbar(),this._setScrollbar(),this._adjustDialog(),this._setEscapeEvent(),this._setResizeEvent(),g(this._element).on(re.CLICK_DISMISS,_e,function(t){return e.hide(t)}),g(this._dialog).on(re.MOUSEDOWN_DISMISS,function(){g(e._element).one(re.MOUSEUP_DISMISS,function(t){g(t.target).is(e._element)&&(e._ignoreBackdropClick=!0)})}),this._showBackdrop(function(){return e._showElement(t)}))}},t.hide=function(t){var e=this;if(t&&t.preventDefault(),this._isShown&&!this._isTransitioning){var n=g.Event(re.HIDE);if(g(this._element).trigger(n),this._isShown&&!n.isDefaultPrevented()){this._isShown=!1;var i=g(this._element).hasClass(he);if(i&&(this._isTransitioning=!0),this._setEscapeEvent(),this._setResizeEvent(),g(document).off(re.FOCUSIN),g(this._element).removeClass(ue),g(this._element).off(re.CLICK_DISMISS),g(this._dialog).off(re.MOUSEDOWN_DISMISS),i){var o=_.getTransitionDurationFromElement(this._element);g(this._element).one(_.TRANSITION_END,function(t){return e._hideModal(t)}).emulateTransitionEnd(o)}else this._hideModal()}}},t.dispose=function(){[window,this._element,this._dialog].forEach(function(t){return g(t).off(ee)}),g(document).off(re.FOCUSIN),g.removeData(this._element,te),this._config=null,this._element=null,this._dialog=null,this._backdrop=null,this._isShown=null,this._isBodyOverflowing=null,this._ignoreBackdropClick=null,this._isTransitioning=null,this._scrollbarWidth=null},t.handleUpdate=function(){this._adjustDialog()},t._getConfig=function(t){return t=l({},ie,t),_.typeCheckConfig(Zt,t,oe),t},t._showElement=function(t){var e=this,n=g(this._element).hasClass(he);this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE||document.body.appendChild(this._element),this._element.style.display="block",this._element.removeAttribute("aria-hidden"),this._element.setAttribute("aria-modal",!0),g(this._dialog).hasClass(se)?this._dialog.querySelector(de).scrollTop=0:this._element.scrollTop=0,n&&_.reflow(this._element),g(this._element).addClass(ue),this._config.focus&&this._enforceFocus();var i=g.Event(re.SHOWN,{relatedTarget:t}),o=function(){e._config.focus&&e._element.focus(),e._isTransitioning=!1,g(e._element).trigger(i)};if(n){var r=_.getTransitionDurationFromElement(this._dialog);g(this._dialog).one(_.TRANSITION_END,o).emulateTransitionEnd(r)}else o()},t._enforceFocus=function(){var e=this;g(document).off(re.FOCUSIN).on(re.FOCUSIN,function(t){document!==t.target&&e._element!==t.target&&0===g(e._element).has(t.target).length&&e._element.focus()})},t._setEscapeEvent=function(){var e=this;this._isShown&&this._config.keyboard?g(this._element).on(re.KEYDOWN_DISMISS,function(t){27===t.which&&(t.preventDefault(),e.hide())}):this._isShown||g(this._element).off(re.KEYDOWN_DISMISS)},t._setResizeEvent=function(){var e=this;this._isShown?g(window).on(re.RESIZE,function(t){return e.handleUpdate(t)}):g(window).off(re.RESIZE)},t._hideModal=function(){var t=this;this._element.style.display="none",this._element.setAttribute("aria-hidden",!0),this._element.removeAttribute("aria-modal"),this._isTransitioning=!1,this._showBackdrop(function(){g(document.body).removeClass(ce),t._resetAdjustments(),t._resetScrollbar(),g(t._element).trigger(re.HIDDEN)})},t._removeBackdrop=function(){this._backdrop&&(g(this._backdrop).remove(),this._backdrop=null)},t._showBackdrop=function(t){var e=this,n=g(this._element).hasClass(he)?he:"";if(this._isShown&&this._config.backdrop){if(this._backdrop=document.createElement("div"),this._backdrop.className=le,n&&this._backdrop.classList.add(n),g(this._backdrop).appendTo(document.body),g(this._element).on(re.CLICK_DISMISS,function(t){e._ignoreBackdropClick?e._ignoreBackdropClick=!1:t.target===t.currentTarget&&("static"===e._config.backdrop?e._element.focus():e.hide())}),n&&_.reflow(this._backdrop),g(this._backdrop).addClass(ue),!t)return;if(!n)return void t();var i=_.getTransitionDurationFromElement(this._backdrop);g(this._backdrop).one(_.TRANSITION_END,t).emulateTransitionEnd(i)}else if(!this._isShown&&this._backdrop){g(this._backdrop).removeClass(ue);var o=function(){e._removeBackdrop(),t&&t()};if(g(this._element).hasClass(he)){var r=_.getTransitionDurationFromElement(this._backdrop);g(this._backdrop).one(_.TRANSITION_END,o).emulateTransitionEnd(r)}else o()}else t&&t()},t._adjustDialog=function(){var t=this._element.scrollHeight>document.documentElement.clientHeight;!this._isBodyOverflowing&&t&&(this._element.style.paddingLeft=this._scrollbarWidth+"px"),this._isBodyOverflowing&&!t&&(this._element.style.paddingRight=this._scrollbarWidth+"px")},t._resetAdjustments=function(){this._element.style.paddingLeft="",this._element.style.paddingRight=""},t._checkScrollbar=function(){var t=document.body.getBoundingClientRect();this._isBodyOverflowing=t.left+t.right<window.innerWidth,this._scrollbarWidth=this._getScrollbarWidth()},t._setScrollbar=function(){var o=this;if(this._isBodyOverflowing){var t=[].slice.call(document.querySelectorAll(me)),e=[].slice.call(document.querySelectorAll(pe));g(t).each(function(t,e){var n=e.style.paddingRight,i=g(e).css("padding-right");g(e).data("padding-right",n).css("padding-right",parseFloat(i)+o._scrollbarWidth+"px")}),g(e).each(function(t,e){var n=e.style.marginRight,i=g(e).css("margin-right");g(e).data("margin-right",n).css("margin-right",parseFloat(i)-o._scrollbarWidth+"px")});var n=document.body.style.paddingRight,i=g(document.body).css("padding-right");g(document.body).data("padding-right",n).css("padding-right",parseFloat(i)+this._scrollbarWidth+"px")}g(document.body).addClass(ce)},t._resetScrollbar=function(){var t=[].slice.call(document.querySelectorAll(me));g(t).each(function(t,e){var n=g(e).data("padding-right");g(e).removeData("padding-right"),e.style.paddingRight=n||""});var e=[].slice.call(document.querySelectorAll(""+pe));g(e).each(function(t,e){var n=g(e).data("margin-right");"undefined"!=typeof n&&g(e).css("margin-right",n).removeData("margin-right")});var n=g(document.body).data("padding-right");g(document.body).removeData("padding-right"),document.body.style.paddingRight=n||""},t._getScrollbarWidth=function(){var t=document.createElement("div");t.className=ae,document.body.appendChild(t);var e=t.getBoundingClientRect().width-t.clientWidth;return document.body.removeChild(t),e},o._jQueryInterface=function(n,i){return this.each(function(){var t=g(this).data(te),e=l({},ie,g(this).data(),"object"==typeof n&&n?n:{});if(t||(t=new o(this,e),g(this).data(te,t)),"string"==typeof n){if("undefined"==typeof t[n])throw new TypeError('No method named "'+n+'"');t[n](i)}else e.show&&t.show(i)})},s(o,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return ie}}]),o}();g(document).on(re.CLICK_DATA_API,ge,function(t){var e,n=this,i=_.getSelectorFromElement(this);i&&(e=document.querySelector(i));var o=g(e).data(te)?"toggle":l({},g(e).data(),g(this).data());"A"!==this.tagName&&"AREA"!==this.tagName||t.preventDefault();var r=g(e).one(re.SHOW,function(t){t.isDefaultPrevented()||r.one(re.HIDDEN,function(){g(n).is(":visible")&&n.focus()})});ve._jQueryInterface.call(g(e),o,this)}),g.fn[Zt]=ve._jQueryInterface,g.fn[Zt].Constructor=ve,g.fn[Zt].noConflict=function(){return g.fn[Zt]=ne,ve._jQueryInterface};var ye=["background","cite","href","itemtype","longdesc","poster","src","xlink:href"],Ee={"*":["class","dir","id","lang","role",/^aria-[\w-]*$/i],a:["target","href","title","rel"],area:[],b:[],br:[],col:[],code:[],div:[],em:[],hr:[],h1:[],h2:[],h3:[],h4:[],h5:[],h6:[],i:[],img:["src","alt","title","width","height"],li:[],ol:[],p:[],pre:[],s:[],small:[],span:[],sub:[],sup:[],strong:[],u:[],ul:[]},Ce=/^(?:(?:https?|mailto|ftp|tel|file):|[^&:/?#]*(?:[/?#]|$))/gi,Te=/^data:(?:image\/(?:bmp|gif|jpeg|jpg|png|tiff|webp)|video\/(?:mpeg|mp4|ogg|webm)|audio\/(?:mp3|oga|ogg|opus));base64,[a-z0-9+/]+=*$/i;function Se(t,s,e){if(0===t.length)return t;if(e&&"function"==typeof e)return e(t);for(var n=(new window.DOMParser).parseFromString(t,"text/html"),a=Object.keys(s),l=[].slice.call(n.body.querySelectorAll("*")),i=function(t,e){var n=l[t],i=n.nodeName.toLowerCase();if(-1===a.indexOf(n.nodeName.toLowerCase()))return n.parentNode.removeChild(n),"continue";var o=[].slice.call(n.attributes),r=[].concat(s["*"]||[],s[i]||[]);o.forEach(function(t){(function(t,e){var n=t.nodeName.toLowerCase();if(-1!==e.indexOf(n))return-1===ye.indexOf(n)||Boolean(t.nodeValue.match(Ce)||t.nodeValue.match(Te));for(var i=e.filter(function(t){return t instanceof RegExp}),o=0,r=i.length;o<r;o++)if(n.match(i[o]))return!0;return!1})(t,r)||n.removeAttribute(t.nodeName)})},o=0,r=l.length;o<r;o++)i(o);return n.body.innerHTML}var be="tooltip",Ie="bs.tooltip",De="."+Ie,we=g.fn[be],Ae="bs-tooltip",Ne=new RegExp("(^|\\s)"+Ae+"\\S+","g"),Oe=["sanitize","whiteList","sanitizeFn"],ke={animation:"boolean",template:"string",title:"(string|element|function)",trigger:"string",delay:"(number|object)",html:"boolean",selector:"(string|boolean)",placement:"(string|function)",offset:"(number|string|function)",container:"(string|element|boolean)",fallbackPlacement:"(string|array)",boundary:"(string|element)",sanitize:"boolean",sanitizeFn:"(null|function)",whiteList:"object"},Pe={AUTO:"auto",TOP:"top",RIGHT:"right",BOTTOM:"bottom",LEFT:"left"},Le={animation:!0,template:'<div class="tooltip" role="tooltip"><div class="arrow"></div><div class="tooltip-inner"></div></div>',trigger:"hover focus",title:"",delay:0,html:!1,selector:!1,placement:"top",offset:0,container:!1,fallbackPlacement:"flip",boundary:"scrollParent",sanitize:!0,sanitizeFn:null,whiteList:Ee},je="show",He="out",Re={HIDE:"hide"+De,HIDDEN:"hidden"+De,SHOW:"show"+De,SHOWN:"shown"+De,INSERTED:"inserted"+De,CLICK:"click"+De,FOCUSIN:"focusin"+De,FOCUSOUT:"focusout"+De,MOUSEENTER:"mouseenter"+De,MOUSELEAVE:"mouseleave"+De},xe="fade",Fe="show",Ue=".tooltip-inner",We=".arrow",qe="hover",Me="focus",Ke="click",Qe="manual",Be=function(){function i(t,e){if("undefined"==typeof u)throw new TypeError("Bootstrap's tooltips require Popper.js (https://popper.js.org/)");this._isEnabled=!0,this._timeout=0,this._hoverState="",this._activeTrigger={},this._popper=null,this.element=t,this.config=this._getConfig(e),this.tip=null,this._setListeners()}var t=i.prototype;return t.enable=function(){this._isEnabled=!0},t.disable=function(){this._isEnabled=!1},t.toggleEnabled=function(){this._isEnabled=!this._isEnabled},t.toggle=function(t){if(this._isEnabled)if(t){var e=this.constructor.DATA_KEY,n=g(t.currentTarget).data(e);n||(n=new this.constructor(t.currentTarget,this._getDelegateConfig()),g(t.currentTarget).data(e,n)),n._activeTrigger.click=!n._activeTrigger.click,n._isWithActiveTrigger()?n._enter(null,n):n._leave(null,n)}else{if(g(this.getTipElement()).hasClass(Fe))return void this._leave(null,this);this._enter(null,this)}},t.dispose=function(){clearTimeout(this._timeout),g.removeData(this.element,this.constructor.DATA_KEY),g(this.element).off(this.constructor.EVENT_KEY),g(this.element).closest(".modal").off("hide.bs.modal"),this.tip&&g(this.tip).remove(),this._isEnabled=null,this._timeout=null,this._hoverState=null,(this._activeTrigger=null)!==this._popper&&this._popper.destroy(),this._popper=null,this.element=null,this.config=null,this.tip=null},t.show=function(){var e=this;if("none"===g(this.element).css("display"))throw new Error("Please use show on visible elements");var t=g.Event(this.constructor.Event.SHOW);if(this.isWithContent()&&this._isEnabled){g(this.element).trigger(t);var n=_.findShadowRoot(this.element),i=g.contains(null!==n?n:this.element.ownerDocument.documentElement,this.element);if(t.isDefaultPrevented()||!i)return;var o=this.getTipElement(),r=_.getUID(this.constructor.NAME);o.setAttribute("id",r),this.element.setAttribute("aria-describedby",r),this.setContent(),this.config.animation&&g(o).addClass(xe);var s="function"==typeof this.config.placement?this.config.placement.call(this,o,this.element):this.config.placement,a=this._getAttachment(s);this.addAttachmentClass(a);var l=this._getContainer();g(o).data(this.constructor.DATA_KEY,this),g.contains(this.element.ownerDocument.documentElement,this.tip)||g(o).appendTo(l),g(this.element).trigger(this.constructor.Event.INSERTED),this._popper=new u(this.element,o,{placement:a,modifiers:{offset:this._getOffset(),flip:{behavior:this.config.fallbackPlacement},arrow:{element:We},preventOverflow:{boundariesElement:this.config.boundary}},onCreate:function(t){t.originalPlacement!==t.placement&&e._handlePopperPlacementChange(t)},onUpdate:function(t){return e._handlePopperPlacementChange(t)}}),g(o).addClass(Fe),"ontouchstart"in document.documentElement&&g(document.body).children().on("mouseover",null,g.noop);var c=function(){e.config.animation&&e._fixTransition();var t=e._hoverState;e._hoverState=null,g(e.element).trigger(e.constructor.Event.SHOWN),t===He&&e._leave(null,e)};if(g(this.tip).hasClass(xe)){var h=_.getTransitionDurationFromElement(this.tip);g(this.tip).one(_.TRANSITION_END,c).emulateTransitionEnd(h)}else c()}},t.hide=function(t){var e=this,n=this.getTipElement(),i=g.Event(this.constructor.Event.HIDE),o=function(){e._hoverState!==je&&n.parentNode&&n.parentNode.removeChild(n),e._cleanTipClass(),e.element.removeAttribute("aria-describedby"),g(e.element).trigger(e.constructor.Event.HIDDEN),null!==e._popper&&e._popper.destroy(),t&&t()};if(g(this.element).trigger(i),!i.isDefaultPrevented()){if(g(n).removeClass(Fe),"ontouchstart"in document.documentElement&&g(document.body).children().off("mouseover",null,g.noop),this._activeTrigger[Ke]=!1,this._activeTrigger[Me]=!1,this._activeTrigger[qe]=!1,g(this.tip).hasClass(xe)){var r=_.getTransitionDurationFromElement(n);g(n).one(_.TRANSITION_END,o).emulateTransitionEnd(r)}else o();this._hoverState=""}},t.update=function(){null!==this._popper&&this._popper.scheduleUpdate()},t.isWithContent=function(){return Boolean(this.getTitle())},t.addAttachmentClass=function(t){g(this.getTipElement()).addClass(Ae+"-"+t)},t.getTipElement=function(){return this.tip=this.tip||g(this.config.template)[0],this.tip},t.setContent=function(){var t=this.getTipElement();this.setElementContent(g(t.querySelectorAll(Ue)),this.getTitle()),g(t).removeClass(xe+" "+Fe)},t.setElementContent=function(t,e){"object"!=typeof e||!e.nodeType&&!e.jquery?this.config.html?(this.config.sanitize&&(e=Se(e,this.config.whiteList,this.config.sanitizeFn)),t.html(e)):t.text(e):this.config.html?g(e).parent().is(t)||t.empty().append(e):t.text(g(e).text())},t.getTitle=function(){var t=this.element.getAttribute("data-original-title");return t||(t="function"==typeof this.config.title?this.config.title.call(this.element):this.config.title),t},t._getOffset=function(){var e=this,t={};return"function"==typeof this.config.offset?t.fn=function(t){return t.offsets=l({},t.offsets,e.config.offset(t.offsets,e.element)||{}),t}:t.offset=this.config.offset,t},t._getContainer=function(){return!1===this.config.container?document.body:_.isElement(this.config.container)?g(this.config.container):g(document).find(this.config.container)},t._getAttachment=function(t){return Pe[t.toUpperCase()]},t._setListeners=function(){var i=this;this.config.trigger.split(" ").forEach(function(t){if("click"===t)g(i.element).on(i.constructor.Event.CLICK,i.config.selector,function(t){return i.toggle(t)});else if(t!==Qe){var e=t===qe?i.constructor.Event.MOUSEENTER:i.constructor.Event.FOCUSIN,n=t===qe?i.constructor.Event.MOUSELEAVE:i.constructor.Event.FOCUSOUT;g(i.element).on(e,i.config.selector,function(t){return i._enter(t)}).on(n,i.config.selector,function(t){return i._leave(t)})}}),g(this.element).closest(".modal").on("hide.bs.modal",function(){i.element&&i.hide()}),this.config.selector?this.config=l({},this.config,{trigger:"manual",selector:""}):this._fixTitle()},t._fixTitle=function(){var t=typeof this.element.getAttribute("data-original-title");(this.element.getAttribute("title")||"string"!==t)&&(this.element.setAttribute("data-original-title",this.element.getAttribute("title")||""),this.element.setAttribute("title",""))},t._enter=function(t,e){var n=this.constructor.DATA_KEY;(e=e||g(t.currentTarget).data(n))||(e=new this.constructor(t.currentTarget,this._getDelegateConfig()),g(t.currentTarget).data(n,e)),t&&(e._activeTrigger["focusin"===t.type?Me:qe]=!0),g(e.getTipElement()).hasClass(Fe)||e._hoverState===je?e._hoverState=je:(clearTimeout(e._timeout),e._hoverState=je,e.config.delay&&e.config.delay.show?e._timeout=setTimeout(function(){e._hoverState===je&&e.show()},e.config.delay.show):e.show())},t._leave=function(t,e){var n=this.constructor.DATA_KEY;(e=e||g(t.currentTarget).data(n))||(e=new this.constructor(t.currentTarget,this._getDelegateConfig()),g(t.currentTarget).data(n,e)),t&&(e._activeTrigger["focusout"===t.type?Me:qe]=!1),e._isWithActiveTrigger()||(clearTimeout(e._timeout),e._hoverState=He,e.config.delay&&e.config.delay.hide?e._timeout=setTimeout(function(){e._hoverState===He&&e.hide()},e.config.delay.hide):e.hide())},t._isWithActiveTrigger=function(){for(var t in this._activeTrigger)if(this._activeTrigger[t])return!0;return!1},t._getConfig=function(t){var e=g(this.element).data();return Object.keys(e).forEach(function(t){-1!==Oe.indexOf(t)&&delete e[t]}),"number"==typeof(t=l({},this.constructor.Default,e,"object"==typeof t&&t?t:{})).delay&&(t.delay={show:t.delay,hide:t.delay}),"number"==typeof t.title&&(t.title=t.title.toString()),"number"==typeof t.content&&(t.content=t.content.toString()),_.typeCheckConfig(be,t,this.constructor.DefaultType),t.sanitize&&(t.template=Se(t.template,t.whiteList,t.sanitizeFn)),t},t._getDelegateConfig=function(){var t={};if(this.config)for(var e in this.config)this.constructor.Default[e]!==this.config[e]&&(t[e]=this.config[e]);return t},t._cleanTipClass=function(){var t=g(this.getTipElement()),e=t.attr("class").match(Ne);null!==e&&e.length&&t.removeClass(e.join(""))},t._handlePopperPlacementChange=function(t){var e=t.instance;this.tip=e.popper,this._cleanTipClass(),this.addAttachmentClass(this._getAttachment(t.placement))},t._fixTransition=function(){var t=this.getTipElement(),e=this.config.animation;null===t.getAttribute("x-placement")&&(g(t).removeClass(xe),this.config.animation=!1,this.hide(),this.show(),this.config.animation=e)},i._jQueryInterface=function(n){return this.each(function(){var t=g(this).data(Ie),e="object"==typeof n&&n;if((t||!/dispose|hide/.test(n))&&(t||(t=new i(this,e),g(this).data(Ie,t)),"string"==typeof n)){if("undefined"==typeof t[n])throw new TypeError('No method named "'+n+'"');t[n]()}})},s(i,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return Le}},{key:"NAME",get:function(){return be}},{key:"DATA_KEY",get:function(){return Ie}},{key:"Event",get:function(){return Re}},{key:"EVENT_KEY",get:function(){return De}},{key:"DefaultType",get:function(){return ke}}]),i}();g.fn[be]=Be._jQueryInterface,g.fn[be].Constructor=Be,g.fn[be].noConflict=function(){return g.fn[be]=we,Be._jQueryInterface};var Ve="popover",Ye="bs.popover",ze="."+Ye,Xe=g.fn[Ve],$e="bs-popover",Ge=new RegExp("(^|\\s)"+$e+"\\S+","g"),Je=l({},Be.Default,{placement:"right",trigger:"click",content:"",template:'<div class="popover" role="tooltip"><div class="arrow"></div><h3 class="popover-header"></h3><div class="popover-body"></div></div>'}),Ze=l({},Be.DefaultType,{content:"(string|element|function)"}),tn="fade",en="show",nn=".popover-header",on=".popover-body",rn={HIDE:"hide"+ze,HIDDEN:"hidden"+ze,SHOW:"show"+ze,SHOWN:"shown"+ze,INSERTED:"inserted"+ze,CLICK:"click"+ze,FOCUSIN:"focusin"+ze,FOCUSOUT:"focusout"+ze,MOUSEENTER:"mouseenter"+ze,MOUSELEAVE:"mouseleave"+ze},sn=function(t){var e,n;function i(){return t.apply(this,arguments)||this}n=t,(e=i).prototype=Object.create(n.prototype),(e.prototype.constructor=e).__proto__=n;var o=i.prototype;return o.isWithContent=function(){return this.getTitle()||this._getContent()},o.addAttachmentClass=function(t){g(this.getTipElement()).addClass($e+"-"+t)},o.getTipElement=function(){return this.tip=this.tip||g(this.config.template)[0],this.tip},o.setContent=function(){var t=g(this.getTipElement());this.setElementContent(t.find(nn),this.getTitle());var e=this._getContent();"function"==typeof e&&(e=e.call(this.element)),this.setElementContent(t.find(on),e),t.removeClass(tn+" "+en)},o._getContent=function(){return this.element.getAttribute("data-content")||this.config.content},o._cleanTipClass=function(){var t=g(this.getTipElement()),e=t.attr("class").match(Ge);null!==e&&0<e.length&&t.removeClass(e.join(""))},i._jQueryInterface=function(n){return this.each(function(){var t=g(this).data(Ye),e="object"==typeof n?n:null;if((t||!/dispose|hide/.test(n))&&(t||(t=new i(this,e),g(this).data(Ye,t)),"string"==typeof n)){if("undefined"==typeof t[n])throw new TypeError('No method named "'+n+'"');t[n]()}})},s(i,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return Je}},{key:"NAME",get:function(){return Ve}},{key:"DATA_KEY",get:function(){return Ye}},{key:"Event",get:function(){return rn}},{key:"EVENT_KEY",get:function(){return ze}},{key:"DefaultType",get:function(){return Ze}}]),i}(Be);g.fn[Ve]=sn._jQueryInterface,g.fn[Ve].Constructor=sn,g.fn[Ve].noConflict=function(){return g.fn[Ve]=Xe,sn._jQueryInterface};var an="scrollspy",ln="bs.scrollspy",cn="."+ln,hn=g.fn[an],un={offset:10,method:"auto",target:""},fn={offset:"number",method:"string",target:"(string|element)"},dn={ACTIVATE:"activate"+cn,SCROLL:"scroll"+cn,LOAD_DATA_API:"load"+cn+".data-api"},gn="dropdown-item",_n="active",mn='[data-spy="scroll"]',pn=".nav, .list-group",vn=".nav-link",yn=".nav-item",En=".list-group-item",Cn=".dropdown",Tn=".dropdown-item",Sn=".dropdown-toggle",bn="offset",In="position",Dn=function(){function n(t,e){var n=this;this._element=t,this._scrollElement="BODY"===t.tagName?window:t,this._config=this._getConfig(e),this._selector=this._config.target+" "+vn+","+this._config.target+" "+En+","+this._config.target+" "+Tn,this._offsets=[],this._targets=[],this._activeTarget=null,this._scrollHeight=0,g(this._scrollElement).on(dn.SCROLL,function(t){return n._process(t)}),this.refresh(),this._process()}var t=n.prototype;return t.refresh=function(){var e=this,t=this._scrollElement===this._scrollElement.window?bn:In,o="auto"===this._config.method?t:this._config.method,r=o===In?this._getScrollTop():0;this._offsets=[],this._targets=[],this._scrollHeight=this._getScrollHeight(),[].slice.call(document.querySelectorAll(this._selector)).map(function(t){var e,n=_.getSelectorFromElement(t);if(n&&(e=document.querySelector(n)),e){var i=e.getBoundingClientRect();if(i.width||i.height)return[g(e)[o]().top+r,n]}return null}).filter(function(t){return t}).sort(function(t,e){return t[0]-e[0]}).forEach(function(t){e._offsets.push(t[0]),e._targets.push(t[1])})},t.dispose=function(){g.removeData(this._element,ln),g(this._scrollElement).off(cn),this._element=null,this._scrollElement=null,this._config=null,this._selector=null,this._offsets=null,this._targets=null,this._activeTarget=null,this._scrollHeight=null},t._getConfig=function(t){if("string"!=typeof(t=l({},un,"object"==typeof t&&t?t:{})).target){var e=g(t.target).attr("id");e||(e=_.getUID(an),g(t.target).attr("id",e)),t.target="#"+e}return _.typeCheckConfig(an,t,fn),t},t._getScrollTop=function(){return this._scrollElement===window?this._scrollElement.pageYOffset:this._scrollElement.scrollTop},t._getScrollHeight=function(){return this._scrollElement.scrollHeight||Math.max(document.body.scrollHeight,document.documentElement.scrollHeight)},t._getOffsetHeight=function(){return this._scrollElement===window?window.innerHeight:this._scrollElement.getBoundingClientRect().height},t._process=function(){var t=this._getScrollTop()+this._config.offset,e=this._getScrollHeight(),n=this._config.offset+e-this._getOffsetHeight();if(this._scrollHeight!==e&&this.refresh(),n<=t){var i=this._targets[this._targets.length-1];this._activeTarget!==i&&this._activate(i)}else{if(this._activeTarget&&t<this._offsets[0]&&0<this._offsets[0])return this._activeTarget=null,void this._clear();for(var o=this._offsets.length;o--;){this._activeTarget!==this._targets[o]&&t>=this._offsets[o]&&("undefined"==typeof this._offsets[o+1]||t<this._offsets[o+1])&&this._activate(this._targets[o])}}},t._activate=function(e){this._activeTarget=e,this._clear();var t=this._selector.split(",").map(function(t){return t+'[data-target="'+e+'"],'+t+'[href="'+e+'"]'}),n=g([].slice.call(document.querySelectorAll(t.join(","))));n.hasClass(gn)?(n.closest(Cn).find(Sn).addClass(_n),n.addClass(_n)):(n.addClass(_n),n.parents(pn).prev(vn+", "+En).addClass(_n),n.parents(pn).prev(yn).children(vn).addClass(_n)),g(this._scrollElement).trigger(dn.ACTIVATE,{relatedTarget:e})},t._clear=function(){[].slice.call(document.querySelectorAll(this._selector)).filter(function(t){return t.classList.contains(_n)}).forEach(function(t){return t.classList.remove(_n)})},n._jQueryInterface=function(e){return this.each(function(){var t=g(this).data(ln);if(t||(t=new n(this,"object"==typeof e&&e),g(this).data(ln,t)),"string"==typeof e){if("undefined"==typeof t[e])throw new TypeError('No method named "'+e+'"');t[e]()}})},s(n,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"Default",get:function(){return un}}]),n}();g(window).on(dn.LOAD_DATA_API,function(){for(var t=[].slice.call(document.querySelectorAll(mn)),e=t.length;e--;){var n=g(t[e]);Dn._jQueryInterface.call(n,n.data())}}),g.fn[an]=Dn._jQueryInterface,g.fn[an].Constructor=Dn,g.fn[an].noConflict=function(){return g.fn[an]=hn,Dn._jQueryInterface};var wn="bs.tab",An="."+wn,Nn=g.fn.tab,On={HIDE:"hide"+An,HIDDEN:"hidden"+An,SHOW:"show"+An,SHOWN:"shown"+An,CLICK_DATA_API:"click"+An+".data-api"},kn="dropdown-menu",Pn="active",Ln="disabled",jn="fade",Hn="show",Rn=".dropdown",xn=".nav, .list-group",Fn=".active",Un="> li > .active",Wn='[data-toggle="tab"], [data-toggle="pill"], [data-toggle="list"]',qn=".dropdown-toggle",Mn="> .dropdown-menu .active",Kn=function(){function i(t){this._element=t}var t=i.prototype;return t.show=function(){var n=this;if(!(this._element.parentNode&&this._element.parentNode.nodeType===Node.ELEMENT_NODE&&g(this._element).hasClass(Pn)||g(this._element).hasClass(Ln))){var t,i,e=g(this._element).closest(xn)[0],o=_.getSelectorFromElement(this._element);if(e){var r="UL"===e.nodeName||"OL"===e.nodeName?Un:Fn;i=(i=g.makeArray(g(e).find(r)))[i.length-1]}var s=g.Event(On.HIDE,{relatedTarget:this._element}),a=g.Event(On.SHOW,{relatedTarget:i});if(i&&g(i).trigger(s),g(this._element).trigger(a),!a.isDefaultPrevented()&&!s.isDefaultPrevented()){o&&(t=document.querySelector(o)),this._activate(this._element,e);var l=function(){var t=g.Event(On.HIDDEN,{relatedTarget:n._element}),e=g.Event(On.SHOWN,{relatedTarget:i});g(i).trigger(t),g(n._element).trigger(e)};t?this._activate(t,t.parentNode,l):l()}}},t.dispose=function(){g.removeData(this._element,wn),this._element=null},t._activate=function(t,e,n){var i=this,o=(!e||"UL"!==e.nodeName&&"OL"!==e.nodeName?g(e).children(Fn):g(e).find(Un))[0],r=n&&o&&g(o).hasClass(jn),s=function(){return i._transitionComplete(t,o,n)};if(o&&r){var a=_.getTransitionDurationFromElement(o);g(o).removeClass(Hn).one(_.TRANSITION_END,s).emulateTransitionEnd(a)}else s()},t._transitionComplete=function(t,e,n){if(e){g(e).removeClass(Pn);var i=g(e.parentNode).find(Mn)[0];i&&g(i).removeClass(Pn),"tab"===e.getAttribute("role")&&e.setAttribute("aria-selected",!1)}if(g(t).addClass(Pn),"tab"===t.getAttribute("role")&&t.setAttribute("aria-selected",!0),_.reflow(t),t.classList.contains(jn)&&t.classList.add(Hn),t.parentNode&&g(t.parentNode).hasClass(kn)){var o=g(t).closest(Rn)[0];if(o){var r=[].slice.call(o.querySelectorAll(qn));g(r).addClass(Pn)}t.setAttribute("aria-expanded",!0)}n&&n()},i._jQueryInterface=function(n){return this.each(function(){var t=g(this),e=t.data(wn);if(e||(e=new i(this),t.data(wn,e)),"string"==typeof n){if("undefined"==typeof e[n])throw new TypeError('No method named "'+n+'"');e[n]()}})},s(i,null,[{key:"VERSION",get:function(){return"4.3.1"}}]),i}();g(document).on(On.CLICK_DATA_API,Wn,function(t){t.preventDefault(),Kn._jQueryInterface.call(g(this),"show")}),g.fn.tab=Kn._jQueryInterface,g.fn.tab.Constructor=Kn,g.fn.tab.noConflict=function(){return g.fn.tab=Nn,Kn._jQueryInterface};var Qn="toast",Bn="bs.toast",Vn="."+Bn,Yn=g.fn[Qn],zn={CLICK_DISMISS:"click.dismiss"+Vn,HIDE:"hide"+Vn,HIDDEN:"hidden"+Vn,SHOW:"show"+Vn,SHOWN:"shown"+Vn},Xn="fade",$n="hide",Gn="show",Jn="showing",Zn={animation:"boolean",autohide:"boolean",delay:"number"},ti={animation:!0,autohide:!0,delay:500},ei='[data-dismiss="toast"]',ni=function(){function i(t,e){this._element=t,this._config=this._getConfig(e),this._timeout=null,this._setListeners()}var t=i.prototype;return t.show=function(){var t=this;g(this._element).trigger(zn.SHOW),this._config.animation&&this._element.classList.add(Xn);var e=function(){t._element.classList.remove(Jn),t._element.classList.add(Gn),g(t._element).trigger(zn.SHOWN),t._config.autohide&&t.hide()};if(this._element.classList.remove($n),this._element.classList.add(Jn),this._config.animation){var n=_.getTransitionDurationFromElement(this._element);g(this._element).one(_.TRANSITION_END,e).emulateTransitionEnd(n)}else e()},t.hide=function(t){var e=this;this._element.classList.contains(Gn)&&(g(this._element).trigger(zn.HIDE),t?this._close():this._timeout=setTimeout(function(){e._close()},this._config.delay))},t.dispose=function(){clearTimeout(this._timeout),this._timeout=null,this._element.classList.contains(Gn)&&this._element.classList.remove(Gn),g(this._element).off(zn.CLICK_DISMISS),g.removeData(this._element,Bn),this._element=null,this._config=null},t._getConfig=function(t){return t=l({},ti,g(this._element).data(),"object"==typeof t&&t?t:{}),_.typeCheckConfig(Qn,t,this.constructor.DefaultType),t},t._setListeners=function(){var t=this;g(this._element).on(zn.CLICK_DISMISS,ei,function(){return t.hide(!0)})},t._close=function(){var t=this,e=function(){t._element.classList.add($n),g(t._element).trigger(zn.HIDDEN)};if(this._element.classList.remove(Gn),this._config.animation){var n=_.getTransitionDurationFromElement(this._element);g(this._element).one(_.TRANSITION_END,e).emulateTransitionEnd(n)}else e()},i._jQueryInterface=function(n){return this.each(function(){var t=g(this),e=t.data(Bn);if(e||(e=new i(this,"object"==typeof n&&n),t.data(Bn,e)),"string"==typeof n){if("undefined"==typeof e[n])throw new TypeError('No method named "'+n+'"');e[n](this)}})},s(i,null,[{key:"VERSION",get:function(){return"4.3.1"}},{key:"DefaultType",get:function(){return Zn}},{key:"Default",get:function(){return ti}}]),i}();g.fn[Qn]=ni._jQueryInterface,g.fn[Qn].Constructor=ni,g.fn[Qn].noConflict=function(){return g.fn[Qn]=Yn,ni._jQueryInterface},function(){if("undefined"==typeof g)throw new TypeError("Bootstrap's JavaScript requires jQuery. jQuery must be included before Bootstrap's JavaScript.");var t=g.fn.jquery.split(" ")[0].split(".");if(t[0]<2&&t[1]<9||1===t[0]&&9===t[1]&&t[2]<1||4<=t[0])throw new Error("Bootstrap's JavaScript requires at least jQuery v1.9.1 but less than v4.0.0")}(),t.Util=_,t.Alert=p,t.Button=P,t.Carousel=lt,t.Collapse=bt,t.Dropdown=Jt,t.Modal=ve,t.Popover=sn,t.Scrollspy=Dn,t.Tab=Kn,t.Toast=ni,t.Tooltip=Be,Object.defineProperty(t,"__esModule",{value:!0})});
//# sourceMappingURL=bootstrap.min.js.map | PypiClean |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.